Merge github.com:grpc/grpc into metadata_filter

reviewable/pr8842/r14
Craig Tiller 8 years ago
commit 5acc826ace
  1. 1
      .gitignore
  2. 3
      BUILD
  3. 7
      CMakeLists.txt
  4. 9
      Makefile
  5. 39
      bazel/BUILD
  6. 1
      bazel/cc_grpc_library.bzl
  7. 4
      bazel/generate_cc.bzl
  8. 1
      build.yaml
  9. 42
      examples/cpp/helloworld/BUILD
  10. 4
      examples/cpp/helloworld/greeter_client.cc
  11. 4
      examples/cpp/helloworld/greeter_server.cc
  12. 52
      examples/protos/BUILD
  13. 2
      gRPC-Core.podspec
  14. 2
      grpc.gemspec
  15. 84
      include/grpc/impl/codegen/gpr_slice.h
  16. 19
      include/grpc/impl/codegen/slice.h
  17. 48
      package.xml
  18. 4
      setup.cfg
  19. 3
      src/core/ext/client_channel/connector.h
  20. 57
      src/core/ext/client_channel/subchannel.c
  21. 13
      src/core/ext/client_channel/subchannel.h
  22. 12
      src/core/ext/client_channel/subchannel_index.c
  23. 12
      src/core/ext/lb_policy/pick_first/pick_first.c
  24. 12
      src/core/ext/lb_policy/round_robin/round_robin.c
  25. 7
      src/core/ext/transport/chttp2/client/chttp2_connector.c
  26. 7
      src/php/README.md
  27. 30
      src/proto/grpc/testing/BUILD
  28. 30
      src/proto/grpc/testing/duplicate/BUILD
  29. 6
      src/python/grpcio/_spawn_patch.py
  30. 54
      src/python/grpcio/commands.py
  31. 152
      src/python/grpcio/grpc/__init__.py
  32. 7
      src/python/grpcio/grpc/_auth.py
  33. 281
      src/python/grpcio/grpc/_channel.py
  34. 21
      src/python/grpcio/grpc/_common.py
  35. 4
      src/python/grpcio/grpc/_credential_composition.py
  36. 19
      src/python/grpcio/grpc/_plugin_wrapping.py
  37. 192
      src/python/grpcio/grpc/_server.py
  38. 19
      src/python/grpcio/grpc/_utilities.py
  39. 424
      src/python/grpcio/grpc/beta/_client_adaptations.py
  40. 27
      src/python/grpcio/grpc/beta/_connectivity_channel.py
  41. 116
      src/python/grpcio/grpc/beta/_server_adaptations.py
  42. 65
      src/python/grpcio/grpc/beta/implementations.py
  43. 2
      src/python/grpcio/grpc/beta/interfaces.py
  44. 2
      src/python/grpcio/grpc/beta/utilities.py
  45. 2
      src/python/grpcio/grpc/framework/__init__.py
  46. 2
      src/python/grpcio/grpc/framework/common/__init__.py
  47. 1
      src/python/grpcio/grpc/framework/common/cardinality.py
  48. 1
      src/python/grpcio/grpc/framework/common/style.py
  49. 2
      src/python/grpcio/grpc/framework/foundation/__init__.py
  50. 1
      src/python/grpcio/grpc/framework/foundation/abandonment.py
  51. 11
      src/python/grpcio/grpc/framework/foundation/callable_util.py
  52. 1
      src/python/grpcio/grpc/framework/foundation/future.py
  53. 11
      src/python/grpcio/grpc/framework/foundation/logging_pool.py
  54. 2
      src/python/grpcio/grpc/framework/foundation/stream.py
  55. 1
      src/python/grpcio/grpc/framework/foundation/stream_util.py
  56. 2
      src/python/grpcio/grpc/framework/interfaces/__init__.py
  57. 2
      src/python/grpcio/grpc/framework/interfaces/base/__init__.py
  58. 20
      src/python/grpcio/grpc/framework/interfaces/base/base.py
  59. 30
      src/python/grpcio/grpc/framework/interfaces/base/utilities.py
  60. 2
      src/python/grpcio/grpc/framework/interfaces/face/__init__.py
  61. 182
      src/python/grpcio/grpc/framework/interfaces/face/face.py
  62. 69
      src/python/grpcio/grpc/framework/interfaces/face/utilities.py
  63. 21
      src/python/grpcio/support.py
  64. 2
      src/python/grpcio_health_checking/grpc_health/__init__.py
  65. 2
      src/python/grpcio_health_checking/grpc_health/v1/__init__.py
  66. 1
      src/python/grpcio_health_checking/grpc_health/v1/health.py
  67. 1
      src/python/grpcio_health_checking/health_commands.py
  68. 14
      src/python/grpcio_health_checking/setup.py
  69. 1
      src/python/grpcio_reflection/grpc_reflection/__init__.py
  70. 1
      src/python/grpcio_reflection/grpc_reflection/v1alpha/__init__.py
  71. 38
      src/python/grpcio_reflection/grpc_reflection/v1alpha/reflection.py
  72. 7
      src/python/grpcio_reflection/reflection_commands.py
  73. 14
      src/python/grpcio_reflection/setup.py
  74. 16
      src/python/grpcio_tests/commands.py
  75. 20
      src/python/grpcio_tests/setup.py
  76. 3
      src/python/grpcio_tests/tests/_loader.py
  77. 136
      src/python/grpcio_tests/tests/_result.py
  78. 31
      src/python/grpcio_tests/tests/_runner.py
  79. 7
      src/python/grpcio_tests/tests/health_check/_health_servicer_test.py
  80. 67
      src/python/grpcio_tests/tests/http2/_negative_http2_client.py
  81. 2
      src/python/grpcio_tests/tests/interop/__init__.py
  82. 8
      src/python/grpcio_tests/tests/interop/_insecure_intraop_test.py
  83. 10
      src/python/grpcio_tests/tests/interop/_intraop_test_case.py
  84. 21
      src/python/grpcio_tests/tests/interop/_secure_intraop_test.py
  85. 43
      src/python/grpcio_tests/tests/interop/client.py
  86. 159
      src/python/grpcio_tests/tests/interop/methods.py
  87. 9
      src/python/grpcio_tests/tests/interop/resources.py
  88. 15
      src/python/grpcio_tests/tests/interop/server.py
  89. 2
      src/python/grpcio_tests/tests/protoc_plugin/__init__.py
  90. 82
      src/python/grpcio_tests/tests/protoc_plugin/_python_plugin_test.py
  91. 54
      src/python/grpcio_tests/tests/protoc_plugin/_split_definitions_test.py
  92. 58
      src/python/grpcio_tests/tests/protoc_plugin/beta_python_plugin_test.py
  93. 2
      src/python/grpcio_tests/tests/protoc_plugin/protos/__init__.py
  94. 2
      src/python/grpcio_tests/tests/protoc_plugin/protos/invocation_testing/__init__.py
  95. 2
      src/python/grpcio_tests/tests/protoc_plugin/protos/invocation_testing/split_messages/__init__.py
  96. 2
      src/python/grpcio_tests/tests/protoc_plugin/protos/invocation_testing/split_services/__init__.py
  97. 2
      src/python/grpcio_tests/tests/protoc_plugin/protos/payload/__init__.py
  98. 2
      src/python/grpcio_tests/tests/protoc_plugin/protos/requests/__init__.py
  99. 2
      src/python/grpcio_tests/tests/protoc_plugin/protos/requests/r/__init__.py
  100. 2
      src/python/grpcio_tests/tests/protoc_plugin/protos/responses/__init__.py
  101. Some files were not shown because too many files have changed in this diff Show More

1
.gitignore vendored

@ -8,6 +8,7 @@ objs
# Python items # Python items
cython_debug/ cython_debug/
python_build/ python_build/
python_format_venv/
.coverage* .coverage*
.eggs .eggs
htmlcov/ htmlcov/

@ -401,6 +401,7 @@ grpc_cc_library(
"include/grpc/impl/codegen/atm_gcc_atomic.h", "include/grpc/impl/codegen/atm_gcc_atomic.h",
"include/grpc/impl/codegen/atm_gcc_sync.h", "include/grpc/impl/codegen/atm_gcc_sync.h",
"include/grpc/impl/codegen/atm_windows.h", "include/grpc/impl/codegen/atm_windows.h",
"include/grpc/impl/codegen/gpr_slice.h",
"include/grpc/impl/codegen/gpr_types.h", "include/grpc/impl/codegen/gpr_types.h",
"include/grpc/impl/codegen/port_platform.h", "include/grpc/impl/codegen/port_platform.h",
"include/grpc/impl/codegen/slice.h", "include/grpc/impl/codegen/slice.h",
@ -1083,6 +1084,7 @@ grpc_cc_library(
"src/cpp/common/completion_queue_cc.cc", "src/cpp/common/completion_queue_cc.cc",
"src/cpp/common/core_codegen.cc", "src/cpp/common/core_codegen.cc",
"src/cpp/common/rpc_method.cc", "src/cpp/common/rpc_method.cc",
"src/cpp/common/version_cc.cc",
"src/cpp/server/async_generic_service.cc", "src/cpp/server/async_generic_service.cc",
"src/cpp/server/create_default_thread_pool.cc", "src/cpp/server/create_default_thread_pool.cc",
"src/cpp/server/dynamic_thread_pool.cc", "src/cpp/server/dynamic_thread_pool.cc",
@ -1091,6 +1093,7 @@ grpc_cc_library(
"src/cpp/server/server_context.cc", "src/cpp/server/server_context.cc",
"src/cpp/server/server_credentials.cc", "src/cpp/server/server_credentials.cc",
"src/cpp/server/server_posix.cc", "src/cpp/server/server_posix.cc",
"src/cpp/thread_manager/thread_manager.cc",
"src/cpp/util/byte_buffer_cc.cc", "src/cpp/util/byte_buffer_cc.cc",
"src/cpp/util/slice_cc.cc", "src/cpp/util/slice_cc.cc",
"src/cpp/util/status.cc", "src/cpp/util/status.cc",

@ -264,6 +264,7 @@ foreach(_hdr
include/grpc/impl/codegen/atm_gcc_atomic.h include/grpc/impl/codegen/atm_gcc_atomic.h
include/grpc/impl/codegen/atm_gcc_sync.h include/grpc/impl/codegen/atm_gcc_sync.h
include/grpc/impl/codegen/atm_windows.h include/grpc/impl/codegen/atm_windows.h
include/grpc/impl/codegen/gpr_slice.h
include/grpc/impl/codegen/gpr_types.h include/grpc/impl/codegen/gpr_types.h
include/grpc/impl/codegen/port_platform.h include/grpc/impl/codegen/port_platform.h
include/grpc/impl/codegen/slice.h include/grpc/impl/codegen/slice.h
@ -548,6 +549,7 @@ foreach(_hdr
include/grpc/impl/codegen/atm_gcc_atomic.h include/grpc/impl/codegen/atm_gcc_atomic.h
include/grpc/impl/codegen/atm_gcc_sync.h include/grpc/impl/codegen/atm_gcc_sync.h
include/grpc/impl/codegen/atm_windows.h include/grpc/impl/codegen/atm_windows.h
include/grpc/impl/codegen/gpr_slice.h
include/grpc/impl/codegen/gpr_types.h include/grpc/impl/codegen/gpr_types.h
include/grpc/impl/codegen/port_platform.h include/grpc/impl/codegen/port_platform.h
include/grpc/impl/codegen/slice.h include/grpc/impl/codegen/slice.h
@ -804,6 +806,7 @@ foreach(_hdr
include/grpc/impl/codegen/atm_gcc_atomic.h include/grpc/impl/codegen/atm_gcc_atomic.h
include/grpc/impl/codegen/atm_gcc_sync.h include/grpc/impl/codegen/atm_gcc_sync.h
include/grpc/impl/codegen/atm_windows.h include/grpc/impl/codegen/atm_windows.h
include/grpc/impl/codegen/gpr_slice.h
include/grpc/impl/codegen/gpr_types.h include/grpc/impl/codegen/gpr_types.h
include/grpc/impl/codegen/port_platform.h include/grpc/impl/codegen/port_platform.h
include/grpc/impl/codegen/slice.h include/grpc/impl/codegen/slice.h
@ -1060,6 +1063,7 @@ foreach(_hdr
include/grpc/impl/codegen/atm_gcc_atomic.h include/grpc/impl/codegen/atm_gcc_atomic.h
include/grpc/impl/codegen/atm_gcc_sync.h include/grpc/impl/codegen/atm_gcc_sync.h
include/grpc/impl/codegen/atm_windows.h include/grpc/impl/codegen/atm_windows.h
include/grpc/impl/codegen/gpr_slice.h
include/grpc/impl/codegen/gpr_types.h include/grpc/impl/codegen/gpr_types.h
include/grpc/impl/codegen/port_platform.h include/grpc/impl/codegen/port_platform.h
include/grpc/impl/codegen/slice.h include/grpc/impl/codegen/slice.h
@ -1225,6 +1229,7 @@ foreach(_hdr
include/grpc/impl/codegen/atm_gcc_atomic.h include/grpc/impl/codegen/atm_gcc_atomic.h
include/grpc/impl/codegen/atm_gcc_sync.h include/grpc/impl/codegen/atm_gcc_sync.h
include/grpc/impl/codegen/atm_windows.h include/grpc/impl/codegen/atm_windows.h
include/grpc/impl/codegen/gpr_slice.h
include/grpc/impl/codegen/gpr_types.h include/grpc/impl/codegen/gpr_types.h
include/grpc/impl/codegen/port_platform.h include/grpc/impl/codegen/port_platform.h
include/grpc/impl/codegen/slice.h include/grpc/impl/codegen/slice.h
@ -1564,6 +1569,7 @@ foreach(_hdr
include/grpc/impl/codegen/atm_gcc_atomic.h include/grpc/impl/codegen/atm_gcc_atomic.h
include/grpc/impl/codegen/atm_gcc_sync.h include/grpc/impl/codegen/atm_gcc_sync.h
include/grpc/impl/codegen/atm_windows.h include/grpc/impl/codegen/atm_windows.h
include/grpc/impl/codegen/gpr_slice.h
include/grpc/impl/codegen/gpr_types.h include/grpc/impl/codegen/gpr_types.h
include/grpc/impl/codegen/port_platform.h include/grpc/impl/codegen/port_platform.h
include/grpc/impl/codegen/slice.h include/grpc/impl/codegen/slice.h
@ -1733,6 +1739,7 @@ foreach(_hdr
include/grpc/impl/codegen/atm_gcc_atomic.h include/grpc/impl/codegen/atm_gcc_atomic.h
include/grpc/impl/codegen/atm_gcc_sync.h include/grpc/impl/codegen/atm_gcc_sync.h
include/grpc/impl/codegen/atm_windows.h include/grpc/impl/codegen/atm_windows.h
include/grpc/impl/codegen/gpr_slice.h
include/grpc/impl/codegen/gpr_types.h include/grpc/impl/codegen/gpr_types.h
include/grpc/impl/codegen/port_platform.h include/grpc/impl/codegen/port_platform.h
include/grpc/impl/codegen/slice.h include/grpc/impl/codegen/slice.h

@ -2539,6 +2539,7 @@ PUBLIC_HEADERS_C += \
include/grpc/impl/codegen/atm_gcc_atomic.h \ include/grpc/impl/codegen/atm_gcc_atomic.h \
include/grpc/impl/codegen/atm_gcc_sync.h \ include/grpc/impl/codegen/atm_gcc_sync.h \
include/grpc/impl/codegen/atm_windows.h \ include/grpc/impl/codegen/atm_windows.h \
include/grpc/impl/codegen/gpr_slice.h \
include/grpc/impl/codegen/gpr_types.h \ include/grpc/impl/codegen/gpr_types.h \
include/grpc/impl/codegen/port_platform.h \ include/grpc/impl/codegen/port_platform.h \
include/grpc/impl/codegen/slice.h \ include/grpc/impl/codegen/slice.h \
@ -2851,6 +2852,7 @@ PUBLIC_HEADERS_C += \
include/grpc/impl/codegen/atm_gcc_atomic.h \ include/grpc/impl/codegen/atm_gcc_atomic.h \
include/grpc/impl/codegen/atm_gcc_sync.h \ include/grpc/impl/codegen/atm_gcc_sync.h \
include/grpc/impl/codegen/atm_windows.h \ include/grpc/impl/codegen/atm_windows.h \
include/grpc/impl/codegen/gpr_slice.h \
include/grpc/impl/codegen/gpr_types.h \ include/grpc/impl/codegen/gpr_types.h \
include/grpc/impl/codegen/port_platform.h \ include/grpc/impl/codegen/port_platform.h \
include/grpc/impl/codegen/slice.h \ include/grpc/impl/codegen/slice.h \
@ -3126,6 +3128,7 @@ PUBLIC_HEADERS_C += \
include/grpc/impl/codegen/atm_gcc_atomic.h \ include/grpc/impl/codegen/atm_gcc_atomic.h \
include/grpc/impl/codegen/atm_gcc_sync.h \ include/grpc/impl/codegen/atm_gcc_sync.h \
include/grpc/impl/codegen/atm_windows.h \ include/grpc/impl/codegen/atm_windows.h \
include/grpc/impl/codegen/gpr_slice.h \
include/grpc/impl/codegen/gpr_types.h \ include/grpc/impl/codegen/gpr_types.h \
include/grpc/impl/codegen/port_platform.h \ include/grpc/impl/codegen/port_platform.h \
include/grpc/impl/codegen/slice.h \ include/grpc/impl/codegen/slice.h \
@ -3348,6 +3351,7 @@ PUBLIC_HEADERS_C += \
include/grpc/impl/codegen/atm_gcc_atomic.h \ include/grpc/impl/codegen/atm_gcc_atomic.h \
include/grpc/impl/codegen/atm_gcc_sync.h \ include/grpc/impl/codegen/atm_gcc_sync.h \
include/grpc/impl/codegen/atm_windows.h \ include/grpc/impl/codegen/atm_windows.h \
include/grpc/impl/codegen/gpr_slice.h \
include/grpc/impl/codegen/gpr_types.h \ include/grpc/impl/codegen/gpr_types.h \
include/grpc/impl/codegen/port_platform.h \ include/grpc/impl/codegen/port_platform.h \
include/grpc/impl/codegen/slice.h \ include/grpc/impl/codegen/slice.h \
@ -3643,6 +3647,7 @@ PUBLIC_HEADERS_C += \
include/grpc/impl/codegen/atm_gcc_atomic.h \ include/grpc/impl/codegen/atm_gcc_atomic.h \
include/grpc/impl/codegen/atm_gcc_sync.h \ include/grpc/impl/codegen/atm_gcc_sync.h \
include/grpc/impl/codegen/atm_windows.h \ include/grpc/impl/codegen/atm_windows.h \
include/grpc/impl/codegen/gpr_slice.h \
include/grpc/impl/codegen/gpr_types.h \ include/grpc/impl/codegen/gpr_types.h \
include/grpc/impl/codegen/port_platform.h \ include/grpc/impl/codegen/port_platform.h \
include/grpc/impl/codegen/slice.h \ include/grpc/impl/codegen/slice.h \
@ -3889,6 +3894,7 @@ PUBLIC_HEADERS_CXX += \
include/grpc/impl/codegen/atm_gcc_atomic.h \ include/grpc/impl/codegen/atm_gcc_atomic.h \
include/grpc/impl/codegen/atm_gcc_sync.h \ include/grpc/impl/codegen/atm_gcc_sync.h \
include/grpc/impl/codegen/atm_windows.h \ include/grpc/impl/codegen/atm_windows.h \
include/grpc/impl/codegen/gpr_slice.h \
include/grpc/impl/codegen/gpr_types.h \ include/grpc/impl/codegen/gpr_types.h \
include/grpc/impl/codegen/port_platform.h \ include/grpc/impl/codegen/port_platform.h \
include/grpc/impl/codegen/slice.h \ include/grpc/impl/codegen/slice.h \
@ -4257,6 +4263,7 @@ PUBLIC_HEADERS_CXX += \
include/grpc/impl/codegen/atm_gcc_atomic.h \ include/grpc/impl/codegen/atm_gcc_atomic.h \
include/grpc/impl/codegen/atm_gcc_sync.h \ include/grpc/impl/codegen/atm_gcc_sync.h \
include/grpc/impl/codegen/atm_windows.h \ include/grpc/impl/codegen/atm_windows.h \
include/grpc/impl/codegen/gpr_slice.h \
include/grpc/impl/codegen/gpr_types.h \ include/grpc/impl/codegen/gpr_types.h \
include/grpc/impl/codegen/port_platform.h \ include/grpc/impl/codegen/port_platform.h \
include/grpc/impl/codegen/slice.h \ include/grpc/impl/codegen/slice.h \
@ -4614,6 +4621,7 @@ PUBLIC_HEADERS_CXX += \
include/grpc/impl/codegen/atm_gcc_atomic.h \ include/grpc/impl/codegen/atm_gcc_atomic.h \
include/grpc/impl/codegen/atm_gcc_sync.h \ include/grpc/impl/codegen/atm_gcc_sync.h \
include/grpc/impl/codegen/atm_windows.h \ include/grpc/impl/codegen/atm_windows.h \
include/grpc/impl/codegen/gpr_slice.h \
include/grpc/impl/codegen/gpr_types.h \ include/grpc/impl/codegen/gpr_types.h \
include/grpc/impl/codegen/port_platform.h \ include/grpc/impl/codegen/port_platform.h \
include/grpc/impl/codegen/slice.h \ include/grpc/impl/codegen/slice.h \
@ -4794,6 +4802,7 @@ PUBLIC_HEADERS_CXX += \
include/grpc/impl/codegen/atm_gcc_atomic.h \ include/grpc/impl/codegen/atm_gcc_atomic.h \
include/grpc/impl/codegen/atm_gcc_sync.h \ include/grpc/impl/codegen/atm_gcc_sync.h \
include/grpc/impl/codegen/atm_windows.h \ include/grpc/impl/codegen/atm_windows.h \
include/grpc/impl/codegen/gpr_slice.h \
include/grpc/impl/codegen/gpr_types.h \ include/grpc/impl/codegen/gpr_types.h \
include/grpc/impl/codegen/port_platform.h \ include/grpc/impl/codegen/port_platform.h \
include/grpc/impl/codegen/slice.h \ include/grpc/impl/codegen/slice.h \

@ -1,9 +1,46 @@
# Copyright 2017, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
licenses(["notice"]) # 3-clause BSD
package(default_visibility = ["//:__subpackages__"]) package(default_visibility = ["//:__subpackages__"])
load(":cc_grpc_library.bzl", "cc_grpc_library") load(":cc_grpc_library.bzl", "cc_grpc_library")
proto_library(
name = "well_known_protos_list",
srcs = ["@submodule_protobuf//:well_known_protos"],
)
cc_grpc_library( cc_grpc_library(
name = "well_known_protos", name = "well_known_protos",
srcs = "@submodule_protobuf//:well_known_protos", srcs = "well_known_protos_list",
deps = [],
proto_only = True, proto_only = True,
) )

@ -44,7 +44,6 @@ def cc_grpc_library(name, srcs, deps, proto_only, **kwargs):
**kwargs **kwargs
) )
if not proto_only:
native.cc_library( native.cc_library(
name = name, name = name,
srcs = [":" + codegen_grpc_target, ":" + codegen_target], srcs = [":" + codegen_grpc_target, ":" + codegen_target],

@ -24,13 +24,15 @@ def generate_cc_impl(ctx):
if ctx.executable.plugin: if ctx.executable.plugin:
arguments += ["--plugin=protoc-gen-PLUGIN=" + ctx.executable.plugin.path] arguments += ["--plugin=protoc-gen-PLUGIN=" + ctx.executable.plugin.path]
arguments += ["--PLUGIN_out=" + ",".join(ctx.attr.flags) + ":" + dir_out] arguments += ["--PLUGIN_out=" + ",".join(ctx.attr.flags) + ":" + dir_out]
additional_input = [ctx.executable.plugin]
else: else:
arguments += ["--cpp_out=" + ",".join(ctx.attr.flags) + ":" + dir_out] arguments += ["--cpp_out=" + ",".join(ctx.attr.flags) + ":" + dir_out]
additional_input = []
arguments += ["-I{0}={0}".format(include.path) for include in includes] arguments += ["-I{0}={0}".format(include.path) for include in includes]
arguments += [proto.path for proto in protos] arguments += [proto.path for proto in protos]
ctx.action( ctx.action(
inputs = protos + includes, inputs = protos + includes + additional_input,
outputs = out_files, outputs = out_files,
executable = ctx.executable._protoc, executable = ctx.executable._protoc,
arguments = arguments, arguments = arguments,

@ -144,6 +144,7 @@ filegroups:
- include/grpc/impl/codegen/atm_gcc_atomic.h - include/grpc/impl/codegen/atm_gcc_atomic.h
- include/grpc/impl/codegen/atm_gcc_sync.h - include/grpc/impl/codegen/atm_gcc_sync.h
- include/grpc/impl/codegen/atm_windows.h - include/grpc/impl/codegen/atm_windows.h
- include/grpc/impl/codegen/gpr_slice.h
- include/grpc/impl/codegen/gpr_types.h - include/grpc/impl/codegen/gpr_types.h
- include/grpc/impl/codegen/port_platform.h - include/grpc/impl/codegen/port_platform.h
- include/grpc/impl/codegen/slice.h - include/grpc/impl/codegen/slice.h

@ -0,0 +1,42 @@
# Copyright 2017, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
cc_binary(
name = "greeter_client",
srcs = ["greeter_client.cc"],
deps = ["//examples/protos:helloworld"],
defines = ["BAZEL_BUILD"],
)
cc_binary(
name = "greeter_server",
srcs = ["greeter_server.cc"],
deps = ["//examples/protos:helloworld"],
defines = ["BAZEL_BUILD"],
)

@ -37,7 +37,11 @@
#include <grpc++/grpc++.h> #include <grpc++/grpc++.h>
#ifdef BAZEL_BUILD
#include "examples/protos/helloworld.grpc.pb.h"
#else
#include "helloworld.grpc.pb.h" #include "helloworld.grpc.pb.h"
#endif
using grpc::Channel; using grpc::Channel;
using grpc::ClientContext; using grpc::ClientContext;

@ -37,7 +37,11 @@
#include <grpc++/grpc++.h> #include <grpc++/grpc++.h>
#ifdef BAZEL_BUILD
#include "examples/protos/helloworld.grpc.pb.h"
#else
#include "helloworld.grpc.pb.h" #include "helloworld.grpc.pb.h"
#endif
using grpc::Server; using grpc::Server;
using grpc::ServerBuilder; using grpc::ServerBuilder;

@ -0,0 +1,52 @@
# Copyright 2017, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package(default_visibility = ["//visibility:public"])
load("//bazel:grpc_build_system.bzl", "grpc_proto_library")
grpc_proto_library(
name = "auth_sample",
srcs = ["auth_sample.proto"],
)
grpc_proto_library(
name = "hellostreamingworld",
srcs = ["hellostreamingworld.proto"],
)
grpc_proto_library(
name = "helloworld",
srcs = ["helloworld.proto"],
)
grpc_proto_library(
name = "route_guide",
srcs = ["route_guide.proto"],
)

@ -148,6 +148,7 @@ Pod::Spec.new do |s|
'include/grpc/impl/codegen/atm_gcc_atomic.h', 'include/grpc/impl/codegen/atm_gcc_atomic.h',
'include/grpc/impl/codegen/atm_gcc_sync.h', 'include/grpc/impl/codegen/atm_gcc_sync.h',
'include/grpc/impl/codegen/atm_windows.h', 'include/grpc/impl/codegen/atm_windows.h',
'include/grpc/impl/codegen/gpr_slice.h',
'include/grpc/impl/codegen/gpr_types.h', 'include/grpc/impl/codegen/gpr_types.h',
'include/grpc/impl/codegen/port_platform.h', 'include/grpc/impl/codegen/port_platform.h',
'include/grpc/impl/codegen/slice.h', 'include/grpc/impl/codegen/slice.h',
@ -175,6 +176,7 @@ Pod::Spec.new do |s|
'include/grpc/impl/codegen/atm_gcc_atomic.h', 'include/grpc/impl/codegen/atm_gcc_atomic.h',
'include/grpc/impl/codegen/atm_gcc_sync.h', 'include/grpc/impl/codegen/atm_gcc_sync.h',
'include/grpc/impl/codegen/atm_windows.h', 'include/grpc/impl/codegen/atm_windows.h',
'include/grpc/impl/codegen/gpr_slice.h',
'include/grpc/impl/codegen/gpr_types.h', 'include/grpc/impl/codegen/gpr_types.h',
'include/grpc/impl/codegen/port_platform.h', 'include/grpc/impl/codegen/port_platform.h',
'include/grpc/impl/codegen/slice.h', 'include/grpc/impl/codegen/slice.h',

@ -73,6 +73,7 @@ Gem::Specification.new do |s|
s.files += %w( include/grpc/impl/codegen/atm_gcc_atomic.h ) s.files += %w( include/grpc/impl/codegen/atm_gcc_atomic.h )
s.files += %w( include/grpc/impl/codegen/atm_gcc_sync.h ) s.files += %w( include/grpc/impl/codegen/atm_gcc_sync.h )
s.files += %w( include/grpc/impl/codegen/atm_windows.h ) s.files += %w( include/grpc/impl/codegen/atm_windows.h )
s.files += %w( include/grpc/impl/codegen/gpr_slice.h )
s.files += %w( include/grpc/impl/codegen/gpr_types.h ) s.files += %w( include/grpc/impl/codegen/gpr_types.h )
s.files += %w( include/grpc/impl/codegen/port_platform.h ) s.files += %w( include/grpc/impl/codegen/port_platform.h )
s.files += %w( include/grpc/impl/codegen/slice.h ) s.files += %w( include/grpc/impl/codegen/slice.h )
@ -156,6 +157,7 @@ Gem::Specification.new do |s|
s.files += %w( include/grpc/impl/codegen/atm_gcc_atomic.h ) s.files += %w( include/grpc/impl/codegen/atm_gcc_atomic.h )
s.files += %w( include/grpc/impl/codegen/atm_gcc_sync.h ) s.files += %w( include/grpc/impl/codegen/atm_gcc_sync.h )
s.files += %w( include/grpc/impl/codegen/atm_windows.h ) s.files += %w( include/grpc/impl/codegen/atm_windows.h )
s.files += %w( include/grpc/impl/codegen/gpr_slice.h )
s.files += %w( include/grpc/impl/codegen/gpr_types.h ) s.files += %w( include/grpc/impl/codegen/gpr_types.h )
s.files += %w( include/grpc/impl/codegen/port_platform.h ) s.files += %w( include/grpc/impl/codegen/port_platform.h )
s.files += %w( include/grpc/impl/codegen/slice.h ) s.files += %w( include/grpc/impl/codegen/slice.h )

@ -0,0 +1,84 @@
/*
*
* Copyright 2016, Google Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#ifndef GRPC_IMPL_CODEGEN_GPR_SLICE_H
#define GRPC_IMPL_CODEGEN_GPR_SLICE_H
/* WARNING: Please do not use this header. This was added as a temporary measure
* to not break some of the external projects that depend on gpr_slice_*
* functions. We are actively working on moving all the gpr_slice_* references
* to grpc_slice_* and this file will be removed
* */
/* TODO (sreek) - Allowed by default but will be very soon turned off */
#define GRPC_ALLOW_GPR_SLICE_FUNCTIONS 1
#ifdef GRPC_ALLOW_GPR_SLICE_FUNCTIONS
#define gpr_slice_refcount grpc_slice_refcount
#define gpr_slice grpc_slice
#define gpr_slice_buffer grpc_slice_buffer
#define gpr_slice_ref grpc_slice_ref
#define gpr_slice_unref grpc_slice_unref
#define gpr_slice_new grpc_slice_new
#define gpr_slice_new_with_user_data grpc_slice_new_with_user_data
#define gpr_slice_new_with_len grpc_slice_new_with_len
#define gpr_slice_malloc grpc_slice_malloc
#define gpr_slice_from_copied_string grpc_slice_from_copied_string
#define gpr_slice_from_copied_buffer grpc_slice_from_copied_buffer
#define gpr_slice_from_static_string grpc_slice_from_static_string
#define gpr_slice_sub grpc_slice_sub
#define gpr_slice_sub_no_ref grpc_slice_sub_no_ref
#define gpr_slice_split_tail grpc_slice_split_tail
#define gpr_slice_split_head grpc_slice_split_head
#define gpr_slice_cmp grpc_slice_cmp
#define gpr_slice_str_cmp grpc_slice_str_cmp
#define gpr_slice_buffer grpc_slice_buffer
#define gpr_slice_buffer_init grpc_slice_buffer_init
#define gpr_slice_buffer_destroy grpc_slice_buffer_destroy
#define gpr_slice_buffer_add grpc_slice_buffer_add
#define gpr_slice_buffer_add_indexed grpc_slice_buffer_add_indexed
#define gpr_slice_buffer_addn grpc_slice_buffer_addn
#define gpr_slice_buffer_tiny_add grpc_slice_buffer_tiny_add
#define gpr_slice_buffer_pop grpc_slice_buffer_pop
#define gpr_slice_buffer_reset_and_unref grpc_slice_buffer_reset_and_unref
#define gpr_slice_buffer_swap grpc_slice_buffer_swap
#define gpr_slice_buffer_move_into grpc_slice_buffer_move_into
#define gpr_slice_buffer_trim_end grpc_slice_buffer_trim_end
#define gpr_slice_buffer_move_first grpc_slice_buffer_move_first
#define gpr_slice_buffer_take_first grpc_slice_buffer_take_first
#endif /* GRPC_ALLOW_GPR_SLICE_FUNCTIONS */
#endif /* GRPC_IMPL_CODEGEN_GPR_SLICE_H */

@ -38,6 +38,7 @@
#include <stdint.h> #include <stdint.h>
#include <grpc/impl/codegen/exec_ctx_fwd.h> #include <grpc/impl/codegen/exec_ctx_fwd.h>
#include <grpc/impl/codegen/gpr_slice.h>
typedef struct grpc_slice grpc_slice; typedef struct grpc_slice grpc_slice;
@ -130,4 +131,22 @@ typedef struct {
GRPC_SLICE_START_PTR(slice) + GRPC_SLICE_LENGTH(slice) GRPC_SLICE_START_PTR(slice) + GRPC_SLICE_LENGTH(slice)
#define GRPC_SLICE_IS_EMPTY(slice) (GRPC_SLICE_LENGTH(slice) == 0) #define GRPC_SLICE_IS_EMPTY(slice) (GRPC_SLICE_LENGTH(slice) == 0)
#ifdef GRPC_ALLOW_GPR_SLICE_FUNCTIONS
/* Duplicate GPR_* definitions */
#define GPR_SLICE_START_PTR(slice) \
((slice).refcount ? (slice).data.refcounted.bytes \
: (slice).data.inlined.bytes)
#define GPR_SLICE_LENGTH(slice) \
((slice).refcount ? (slice).data.refcounted.length \
: (slice).data.inlined.length)
#define GPR_SLICE_SET_LENGTH(slice, newlen) \
((slice).refcount ? ((slice).data.refcounted.length = (size_t)(newlen)) \
: ((slice).data.inlined.length = (uint8_t)(newlen)))
#define GPR_SLICE_END_PTR(slice) \
GRPC_SLICE_START_PTR(slice) + GRPC_SLICE_LENGTH(slice)
#define GPR_SLICE_IS_EMPTY(slice) (GRPC_SLICE_LENGTH(slice) == 0)
#endif /* GRPC_ALLOW_GPR_SLICE_FUNCTIONS */
#endif /* GRPC_IMPL_CODEGEN_SLICE_H */ #endif /* GRPC_IMPL_CODEGEN_SLICE_H */

@ -10,19 +10,20 @@
<email>grpc-packages@google.com</email> <email>grpc-packages@google.com</email>
<active>yes</active> <active>yes</active>
</lead> </lead>
<date>2016-08-22</date> <date>2017-01-13</date>
<time>16:06:07</time> <time>16:06:07</time>
<version> <version>
<release>1.1.0dev</release> <release>1.1.0dev</release>
<api>1.1.0dev</api> <api>1.1.0dev</api>
</version> </version>
<stability> <stability>
<release>stable</release> <release>beta</release>
<api>stable</api> <api>beta</api>
</stability> </stability>
<license>BSD</license> <license>BSD</license>
<notes> <notes>
- Reject metadata keys which are not legal #7881 - PHP Proto3 adoption #8179
- Various bug fixes
</notes> </notes>
<contents> <contents>
<dir baseinstalldir="/" name="/"> <dir baseinstalldir="/" name="/">
@ -81,6 +82,7 @@
<file baseinstalldir="/" name="include/grpc/impl/codegen/atm_gcc_atomic.h" role="src" /> <file baseinstalldir="/" name="include/grpc/impl/codegen/atm_gcc_atomic.h" role="src" />
<file baseinstalldir="/" name="include/grpc/impl/codegen/atm_gcc_sync.h" role="src" /> <file baseinstalldir="/" name="include/grpc/impl/codegen/atm_gcc_sync.h" role="src" />
<file baseinstalldir="/" name="include/grpc/impl/codegen/atm_windows.h" role="src" /> <file baseinstalldir="/" name="include/grpc/impl/codegen/atm_windows.h" role="src" />
<file baseinstalldir="/" name="include/grpc/impl/codegen/gpr_slice.h" role="src" />
<file baseinstalldir="/" name="include/grpc/impl/codegen/gpr_types.h" role="src" /> <file baseinstalldir="/" name="include/grpc/impl/codegen/gpr_types.h" role="src" />
<file baseinstalldir="/" name="include/grpc/impl/codegen/port_platform.h" role="src" /> <file baseinstalldir="/" name="include/grpc/impl/codegen/port_platform.h" role="src" />
<file baseinstalldir="/" name="include/grpc/impl/codegen/slice.h" role="src" /> <file baseinstalldir="/" name="include/grpc/impl/codegen/slice.h" role="src" />
@ -164,6 +166,7 @@
<file baseinstalldir="/" name="include/grpc/impl/codegen/atm_gcc_atomic.h" role="src" /> <file baseinstalldir="/" name="include/grpc/impl/codegen/atm_gcc_atomic.h" role="src" />
<file baseinstalldir="/" name="include/grpc/impl/codegen/atm_gcc_sync.h" role="src" /> <file baseinstalldir="/" name="include/grpc/impl/codegen/atm_gcc_sync.h" role="src" />
<file baseinstalldir="/" name="include/grpc/impl/codegen/atm_windows.h" role="src" /> <file baseinstalldir="/" name="include/grpc/impl/codegen/atm_windows.h" role="src" />
<file baseinstalldir="/" name="include/grpc/impl/codegen/gpr_slice.h" role="src" />
<file baseinstalldir="/" name="include/grpc/impl/codegen/gpr_types.h" role="src" /> <file baseinstalldir="/" name="include/grpc/impl/codegen/gpr_types.h" role="src" />
<file baseinstalldir="/" name="include/grpc/impl/codegen/port_platform.h" role="src" /> <file baseinstalldir="/" name="include/grpc/impl/codegen/port_platform.h" role="src" />
<file baseinstalldir="/" name="include/grpc/impl/codegen/slice.h" role="src" /> <file baseinstalldir="/" name="include/grpc/impl/codegen/slice.h" role="src" />
@ -1220,18 +1223,49 @@ Update to wrap gRPC C Core version 0.10.0
</release> </release>
<release> <release>
<version> <version>
<release>1.1.0dev</release> <release>1.0.1RC1</release>
<api>1.1.0dev</api> <api>1.0.1RC1</api>
</version>
<stability>
<release>beta</release>
<api>beta</api>
</stability>
<date>2016-10-06</date>
<license>BSD</license>
<notes>
- Reject metadata keys which are not legal #7881
</notes>
</release>
<release>
<version>
<release>1.0.1</release>
<api>1.0.1</api>
</version> </version>
<stability> <stability>
<release>stable</release> <release>stable</release>
<api>stable</api> <api>stable</api>
</stability> </stability>
<date>2016-08-22</date> <date>2016-10-27</date>
<license>BSD</license> <license>BSD</license>
<notes> <notes>
- Reject metadata keys which are not legal #7881 - Reject metadata keys which are not legal #7881
</notes> </notes>
</release> </release>
<release>
<version>
<release>1.1.0dev</release>
<api>1.1.0dev</api>
</version>
<stability>
<release>beta</release>
<api>beta</api>
</stability>
<date>2017-01-13</date>
<license>BSD</license>
<notes>
- PHP Proto3 adoption #8179
- Various bug fixes
</notes>
</release>
</changelog> </changelog>
</package> </package>

@ -11,3 +11,7 @@ inplace=1
[build_package_protos] [build_package_protos]
exclude=.*protoc_plugin/protoc_plugin_test\.proto$ exclude=.*protoc_plugin/protoc_plugin_test\.proto$
# Style settings
[yapf]
based_on_style = google

@ -48,9 +48,6 @@ struct grpc_connector {
typedef struct { typedef struct {
/** set of pollsets interested in this connection */ /** set of pollsets interested in this connection */
grpc_pollset_set *interested_parties; grpc_pollset_set *interested_parties;
/** address to connect to */
const grpc_resolved_address *addr;
size_t addr_len;
/** initial connect string to send */ /** initial connect string to send */
grpc_slice initial_connect_string; grpc_slice initial_connect_string;
/** deadline for connection */ /** deadline for connection */

@ -38,12 +38,16 @@
#include <grpc/support/alloc.h> #include <grpc/support/alloc.h>
#include <grpc/support/avl.h> #include <grpc/support/avl.h>
#include <grpc/support/string_util.h>
#include "src/core/ext/client_channel/client_channel.h" #include "src/core/ext/client_channel/client_channel.h"
#include "src/core/ext/client_channel/initial_connect_string.h" #include "src/core/ext/client_channel/initial_connect_string.h"
#include "src/core/ext/client_channel/parse_address.h"
#include "src/core/ext/client_channel/subchannel_index.h" #include "src/core/ext/client_channel/subchannel_index.h"
#include "src/core/ext/client_channel/uri_parser.h"
#include "src/core/lib/channel/channel_args.h" #include "src/core/lib/channel/channel_args.h"
#include "src/core/lib/channel/connected_channel.h" #include "src/core/lib/channel/connected_channel.h"
#include "src/core/lib/iomgr/sockaddr_utils.h"
#include "src/core/lib/iomgr/timer.h" #include "src/core/lib/iomgr/timer.h"
#include "src/core/lib/profiling/timers.h" #include "src/core/lib/profiling/timers.h"
#include "src/core/lib/slice/slice_internal.h" #include "src/core/lib/slice/slice_internal.h"
@ -95,8 +99,6 @@ struct grpc_subchannel {
size_t num_filters; size_t num_filters;
/** channel arguments */ /** channel arguments */
grpc_channel_args *args; grpc_channel_args *args;
/** address to connect to */
grpc_resolved_address *addr;
grpc_subchannel_key *key; grpc_subchannel_key *key;
@ -211,7 +213,6 @@ static void subchannel_destroy(grpc_exec_ctx *exec_ctx, void *arg,
grpc_subchannel *c = arg; grpc_subchannel *c = arg;
gpr_free((void *)c->filters); gpr_free((void *)c->filters);
grpc_channel_args_destroy(exec_ctx, c->args); grpc_channel_args_destroy(exec_ctx, c->args);
gpr_free(c->addr);
grpc_slice_unref_internal(exec_ctx, c->initial_connect_string); grpc_slice_unref_internal(exec_ctx, c->initial_connect_string);
grpc_connectivity_state_destroy(exec_ctx, &c->state_tracker); grpc_connectivity_state_destroy(exec_ctx, &c->state_tracker);
grpc_connector_unref(exec_ctx, c->connector); grpc_connector_unref(exec_ctx, c->connector);
@ -327,12 +328,17 @@ grpc_subchannel *grpc_subchannel_create(grpc_exec_ctx *exec_ctx,
} else { } else {
c->filters = NULL; c->filters = NULL;
} }
c->addr = gpr_malloc(sizeof(grpc_resolved_address));
if (args->addr->len)
memcpy(c->addr, args->addr, sizeof(grpc_resolved_address));
c->pollset_set = grpc_pollset_set_create(); c->pollset_set = grpc_pollset_set_create();
grpc_set_initial_connect_string(&c->addr, &c->initial_connect_string); grpc_resolved_address *addr = gpr_malloc(sizeof(*addr));
c->args = grpc_channel_args_copy(args->args); grpc_get_subchannel_address_arg(args->args, addr);
grpc_set_initial_connect_string(&addr, &c->initial_connect_string);
static const char *keys_to_remove[] = {GRPC_ARG_SUBCHANNEL_ADDRESS};
grpc_arg new_arg = grpc_create_subchannel_address_arg(addr);
gpr_free(addr);
c->args = grpc_channel_args_copy_and_add_and_remove(
args->args, keys_to_remove, GPR_ARRAY_SIZE(keys_to_remove), &new_arg, 1);
gpr_free(new_arg.value.string);
c->root_external_state_watcher.next = c->root_external_state_watcher.prev = c->root_external_state_watcher.next = c->root_external_state_watcher.prev =
&c->root_external_state_watcher; &c->root_external_state_watcher;
grpc_closure_init(&c->connected, subchannel_connected, c, grpc_closure_init(&c->connected, subchannel_connected, c,
@ -385,7 +391,6 @@ static void continue_connect_locked(grpc_exec_ctx *exec_ctx,
grpc_connect_in_args args; grpc_connect_in_args args;
args.interested_parties = c->pollset_set; args.interested_parties = c->pollset_set;
args.addr = c->addr;
args.deadline = c->next_attempt; args.deadline = c->next_attempt;
args.channel_args = c->args; args.channel_args = c->args;
args.initial_connect_string = c->initial_connect_string; args.initial_connect_string = c->initial_connect_string;
@ -769,3 +774,37 @@ grpc_call_stack *grpc_subchannel_call_get_call_stack(
grpc_subchannel_call *subchannel_call) { grpc_subchannel_call *subchannel_call) {
return SUBCHANNEL_CALL_TO_CALL_STACK(subchannel_call); return SUBCHANNEL_CALL_TO_CALL_STACK(subchannel_call);
} }
static void grpc_uri_to_sockaddr(char *uri_str, grpc_resolved_address *addr) {
grpc_uri *uri = grpc_uri_parse(uri_str, 0 /* suppress_errors */);
GPR_ASSERT(uri != NULL);
if (strcmp(uri->scheme, "ipv4") == 0) {
GPR_ASSERT(parse_ipv4(uri, addr));
} else if (strcmp(uri->scheme, "ipv6") == 0) {
GPR_ASSERT(parse_ipv6(uri, addr));
} else {
GPR_ASSERT(parse_unix(uri, addr));
}
grpc_uri_destroy(uri);
}
void grpc_get_subchannel_address_arg(const grpc_channel_args *args,
grpc_resolved_address *addr) {
const grpc_arg *addr_arg =
grpc_channel_args_find(args, GRPC_ARG_SUBCHANNEL_ADDRESS);
GPR_ASSERT(addr_arg != NULL); // Should have been set by LB policy.
GPR_ASSERT(addr_arg->type == GRPC_ARG_STRING);
memset(addr, 0, sizeof(*addr));
if (*addr_arg->value.string != '\0') {
grpc_uri_to_sockaddr(addr_arg->value.string, addr);
}
}
grpc_arg grpc_create_subchannel_address_arg(const grpc_resolved_address *addr) {
grpc_arg new_arg;
new_arg.key = GRPC_ARG_SUBCHANNEL_ADDRESS;
new_arg.type = GRPC_ARG_STRING;
new_arg.value.string =
addr->len > 0 ? grpc_sockaddr_to_uri(addr) : gpr_strdup("");
return new_arg;
}

@ -40,6 +40,9 @@
#include "src/core/lib/transport/connectivity_state.h" #include "src/core/lib/transport/connectivity_state.h"
#include "src/core/lib/transport/metadata.h" #include "src/core/lib/transport/metadata.h"
// Channel arg containing a grpc_resolved_address to connect to.
#define GRPC_ARG_SUBCHANNEL_ADDRESS "grpc.subchannel_address"
/** A (sub-)channel that knows how to connect to exactly one target /** A (sub-)channel that knows how to connect to exactly one target
address. Provides a target for load balancing. */ address. Provides a target for load balancing. */
typedef struct grpc_subchannel grpc_subchannel; typedef struct grpc_subchannel grpc_subchannel;
@ -164,8 +167,6 @@ struct grpc_subchannel_args {
size_t filter_count; size_t filter_count;
/** Channel arguments to be supplied to the newly created channel */ /** Channel arguments to be supplied to the newly created channel */
const grpc_channel_args *args; const grpc_channel_args *args;
/** Address to connect to */
grpc_resolved_address *addr;
}; };
/** create a subchannel given a connector */ /** create a subchannel given a connector */
@ -173,4 +174,12 @@ grpc_subchannel *grpc_subchannel_create(grpc_exec_ctx *exec_ctx,
grpc_connector *connector, grpc_connector *connector,
const grpc_subchannel_args *args); const grpc_subchannel_args *args);
/// Sets \a addr from \a args.
void grpc_get_subchannel_address_arg(const grpc_channel_args *args,
grpc_resolved_address *addr);
/// Returns a new channel arg encoding the subchannel address as a string.
/// Caller is responsible for freeing the string.
grpc_arg grpc_create_subchannel_address_arg(const grpc_resolved_address *addr);
#endif /* GRPC_CORE_EXT_CLIENT_CHANNEL_SUBCHANNEL_H */ #endif /* GRPC_CORE_EXT_CLIENT_CHANNEL_SUBCHANNEL_H */

@ -86,11 +86,6 @@ static grpc_subchannel_key *create_key(
} else { } else {
k->args.filters = NULL; k->args.filters = NULL;
} }
k->args.addr = gpr_malloc(sizeof(grpc_resolved_address));
k->args.addr->len = args->addr->len;
if (k->args.addr->len > 0) {
memcpy(k->args.addr, args->addr, sizeof(grpc_resolved_address));
}
k->args.args = copy_channel_args(args->args); k->args.args = copy_channel_args(args->args);
return k; return k;
} }
@ -108,14 +103,8 @@ static int subchannel_key_compare(grpc_subchannel_key *a,
grpc_subchannel_key *b) { grpc_subchannel_key *b) {
int c = GPR_ICMP(a->connector, b->connector); int c = GPR_ICMP(a->connector, b->connector);
if (c != 0) return c; if (c != 0) return c;
c = GPR_ICMP(a->args.addr->len, b->args.addr->len);
if (c != 0) return c;
c = GPR_ICMP(a->args.filter_count, b->args.filter_count); c = GPR_ICMP(a->args.filter_count, b->args.filter_count);
if (c != 0) return c; if (c != 0) return c;
if (a->args.addr->len) {
c = memcmp(a->args.addr->addr, b->args.addr->addr, a->args.addr->len);
if (c != 0) return c;
}
if (a->args.filter_count > 0) { if (a->args.filter_count > 0) {
c = memcmp(a->args.filters, b->args.filters, c = memcmp(a->args.filters, b->args.filters,
a->args.filter_count * sizeof(*a->args.filters)); a->args.filter_count * sizeof(*a->args.filters));
@ -129,7 +118,6 @@ void grpc_subchannel_key_destroy(grpc_exec_ctx *exec_ctx,
grpc_connector_unref(exec_ctx, k->connector); grpc_connector_unref(exec_ctx, k->connector);
gpr_free((grpc_channel_args *)k->args.filters); gpr_free((grpc_channel_args *)k->args.filters);
grpc_channel_args_destroy(exec_ctx, (grpc_channel_args *)k->args.args); grpc_channel_args_destroy(exec_ctx, (grpc_channel_args *)k->args.args);
gpr_free(k->args.addr);
gpr_free(k); gpr_free(k);
} }

@ -36,7 +36,9 @@
#include <grpc/support/alloc.h> #include <grpc/support/alloc.h>
#include "src/core/ext/client_channel/lb_policy_registry.h" #include "src/core/ext/client_channel/lb_policy_registry.h"
#include "src/core/ext/client_channel/subchannel.h"
#include "src/core/lib/channel/channel_args.h" #include "src/core/lib/channel/channel_args.h"
#include "src/core/lib/iomgr/sockaddr_utils.h"
#include "src/core/lib/transport/connectivity_state.h" #include "src/core/lib/transport/connectivity_state.h"
typedef struct pending_pick { typedef struct pending_pick {
@ -466,11 +468,15 @@ static grpc_lb_policy *create_pick_first(grpc_exec_ctx *exec_ctx,
} }
memset(&sc_args, 0, sizeof(grpc_subchannel_args)); memset(&sc_args, 0, sizeof(grpc_subchannel_args));
sc_args.addr = &addresses->addresses[i].address; grpc_arg addr_arg =
sc_args.args = args->args; grpc_create_subchannel_address_arg(&addresses->addresses[i].address);
grpc_channel_args *new_args =
grpc_channel_args_copy_and_add(args->args, &addr_arg, 1);
gpr_free(addr_arg.value.string);
sc_args.args = new_args;
grpc_subchannel *subchannel = grpc_client_channel_factory_create_subchannel( grpc_subchannel *subchannel = grpc_client_channel_factory_create_subchannel(
exec_ctx, args->client_channel_factory, &sc_args); exec_ctx, args->client_channel_factory, &sc_args);
grpc_channel_args_destroy(exec_ctx, new_args);
if (subchannel != NULL) { if (subchannel != NULL) {
p->subchannels[subchannel_idx++] = subchannel; p->subchannels[subchannel_idx++] = subchannel;

@ -64,8 +64,10 @@
#include <grpc/support/alloc.h> #include <grpc/support/alloc.h>
#include "src/core/ext/client_channel/lb_policy_registry.h" #include "src/core/ext/client_channel/lb_policy_registry.h"
#include "src/core/ext/client_channel/subchannel.h"
#include "src/core/lib/channel/channel_args.h" #include "src/core/lib/channel/channel_args.h"
#include "src/core/lib/debug/trace.h" #include "src/core/lib/debug/trace.h"
#include "src/core/lib/iomgr/sockaddr_utils.h"
#include "src/core/lib/transport/connectivity_state.h" #include "src/core/lib/transport/connectivity_state.h"
#include "src/core/lib/transport/static_metadata.h" #include "src/core/lib/transport/static_metadata.h"
@ -729,11 +731,15 @@ static grpc_lb_policy *round_robin_create(grpc_exec_ctx *exec_ctx,
if (addresses->addresses[i].is_balancer) continue; if (addresses->addresses[i].is_balancer) continue;
memset(&sc_args, 0, sizeof(grpc_subchannel_args)); memset(&sc_args, 0, sizeof(grpc_subchannel_args));
sc_args.addr = &addresses->addresses[i].address; grpc_arg addr_arg =
sc_args.args = args->args; grpc_create_subchannel_address_arg(&addresses->addresses[i].address);
grpc_channel_args *new_args =
grpc_channel_args_copy_and_add(args->args, &addr_arg, 1);
gpr_free(addr_arg.value.string);
sc_args.args = new_args;
grpc_subchannel *subchannel = grpc_client_channel_factory_create_subchannel( grpc_subchannel *subchannel = grpc_client_channel_factory_create_subchannel(
exec_ctx, args->client_channel_factory, &sc_args); exec_ctx, args->client_channel_factory, &sc_args);
grpc_channel_args_destroy(exec_ctx, new_args);
if (subchannel != NULL) { if (subchannel != NULL) {
subchannel_data *sd = gpr_malloc(sizeof(*sd)); subchannel_data *sd = gpr_malloc(sizeof(*sd));

@ -43,6 +43,7 @@
#include "src/core/ext/client_channel/connector.h" #include "src/core/ext/client_channel/connector.h"
#include "src/core/ext/client_channel/http_connect_handshaker.h" #include "src/core/ext/client_channel/http_connect_handshaker.h"
#include "src/core/ext/client_channel/subchannel.h"
#include "src/core/ext/transport/chttp2/transport/chttp2_transport.h" #include "src/core/ext/transport/chttp2/transport/chttp2_transport.h"
#include "src/core/lib/channel/channel_args.h" #include "src/core/lib/channel/channel_args.h"
#include "src/core/lib/channel/handshaker.h" #include "src/core/lib/channel/handshaker.h"
@ -220,6 +221,8 @@ static void chttp2_connector_connect(grpc_exec_ctx *exec_ctx,
grpc_connect_out_args *result, grpc_connect_out_args *result,
grpc_closure *notify) { grpc_closure *notify) {
chttp2_connector *c = (chttp2_connector *)con; chttp2_connector *c = (chttp2_connector *)con;
grpc_resolved_address addr;
grpc_get_subchannel_address_arg(args->channel_args, &addr);
gpr_mu_lock(&c->mu); gpr_mu_lock(&c->mu);
GPR_ASSERT(c->notify == NULL); GPR_ASSERT(c->notify == NULL);
c->notify = notify; c->notify = notify;
@ -231,8 +234,8 @@ static void chttp2_connector_connect(grpc_exec_ctx *exec_ctx,
GPR_ASSERT(!c->connecting); GPR_ASSERT(!c->connecting);
c->connecting = true; c->connecting = true;
grpc_tcp_client_connect(exec_ctx, &c->connected, &c->endpoint, grpc_tcp_client_connect(exec_ctx, &c->connected, &c->endpoint,
args->interested_parties, args->channel_args, args->interested_parties, args->channel_args, &addr,
args->addr, args->deadline); args->deadline);
gpr_mu_unlock(&c->mu); gpr_mu_unlock(&c->mu);
} }

@ -163,6 +163,13 @@ of this repo. The plugin can be found in the `bins/opt` directory. We are
planning to provide a better way to download and install the plugin planning to provide a better way to download and install the plugin
in the future. in the future.
You can also just build the gRPC PHP protoc plugin by running:
```sh
$ cd grpc
$ make grpc_php_plugin
```
### Client Stub ### Client Stub

@ -1,3 +1,33 @@
# Copyright 2017, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
licenses(["notice"]) # 3-clause BSD
package(default_visibility = ["//visibility:public"]) package(default_visibility = ["//visibility:public"])

@ -1,3 +1,33 @@
# Copyright 2017, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
licenses(["notice"]) # 3-clause BSD
package(default_visibility = ["//visibility:public"]) package(default_visibility = ["//visibility:public"])

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Patches the spawn() command for windows compilers. """Patches the spawn() command for windows compilers.
Windows has an 8191 character command line limit, but some compilers Windows has an 8191 character command line limit, but some compilers
@ -45,6 +44,7 @@ MAX_COMMAND_LENGTH = 8191
_classic_spawn = ccompiler.CCompiler.spawn _classic_spawn = ccompiler.CCompiler.spawn
def _commandfile_spawn(self, command): def _commandfile_spawn(self, command):
command_length = sum([len(arg) for arg in command]) command_length = sum([len(arg) for arg in command])
if os.name == 'nt' and command_length > MAX_COMMAND_LENGTH: if os.name == 'nt' and command_length > MAX_COMMAND_LENGTH:
@ -56,7 +56,9 @@ def _commandfile_spawn(self, command):
command_filename = os.path.abspath( command_filename = os.path.abspath(
os.path.join(temporary_directory, 'command')) os.path.join(temporary_directory, 'command'))
with open(command_filename, 'w') as command_file: with open(command_filename, 'w') as command_file:
escaped_args = ['"' + arg.replace('\\', '\\\\') + '"' for arg in command[1:]] escaped_args = [
'"' + arg.replace('\\', '\\\\') + '"' for arg in command[1:]
]
command_file.write(' '.join(escaped_args)) command_file.write(' '.join(escaped_args))
modified_command = command[:1] + ['@{}'.format(command_filename)] modified_command = command[:1] + ['@{}'.format(command_filename)]
try: try:

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Provides distutils command classes for the GRPC Python setup process.""" """Provides distutils command classes for the GRPC Python setup process."""
import distutils import distutils
@ -112,17 +111,15 @@ def _get_grpc_custom_bdist(decorated_basename, target_bdist_basename):
url = BINARIES_REPOSITORY + '/{target}'.format(target=decorated_path) url = BINARIES_REPOSITORY + '/{target}'.format(target=decorated_path)
bdist_data = request.urlopen(url).read() bdist_data = request.urlopen(url).read()
except IOError as error: except IOError as error:
raise CommandError( raise CommandError('{}\n\nCould not find the bdist {}: {}'.format(
'{}\n\nCould not find the bdist {}: {}' traceback.format_exc(), decorated_path, error.message))
.format(traceback.format_exc(), decorated_path, error.message))
# Our chosen local bdist path. # Our chosen local bdist path.
bdist_path = target_bdist_basename + GRPC_CUSTOM_BDIST_EXT bdist_path = target_bdist_basename + GRPC_CUSTOM_BDIST_EXT
try: try:
with open(bdist_path, 'w') as bdist_file: with open(bdist_path, 'w') as bdist_file:
bdist_file.write(bdist_data) bdist_file.write(bdist_data)
except IOError as error: except IOError as error:
raise CommandError( raise CommandError('{}\n\nCould not write grpcio bdist: {}'
'{}\n\nCould not write grpcio bdist: {}'
.format(traceback.format_exc(), error.message)) .format(traceback.format_exc(), error.message))
return bdist_path return bdist_path
@ -149,15 +146,17 @@ class SphinxDocumentation(setuptools.Command):
sys.path.append(src_dir) sys.path.append(src_dir)
sphinx.apidoc.main([ sphinx.apidoc.main([
'', '--force', '--full', '-H', metadata.name, '-A', metadata.author, '', '--force', '--full', '-H', metadata.name, '-A', metadata.author,
'-V', metadata.version, '-R', metadata.version, '-V', metadata.version, '-R', metadata.version, '-o',
'-o', os.path.join('doc', 'src'), src_dir]) os.path.join('doc', 'src'), src_dir
])
conf_filepath = os.path.join('doc', 'src', 'conf.py') conf_filepath = os.path.join('doc', 'src', 'conf.py')
with open(conf_filepath, 'a') as conf_file: with open(conf_filepath, 'a') as conf_file:
conf_file.write(CONF_PY_ADDENDUM) conf_file.write(CONF_PY_ADDENDUM)
glossary_filepath = os.path.join('doc', 'src', 'grpc.rst') glossary_filepath = os.path.join('doc', 'src', 'grpc.rst')
with open(glossary_filepath, 'a') as glossary_filepath: with open(glossary_filepath, 'a') as glossary_filepath:
glossary_filepath.write(API_GLOSSARY) glossary_filepath.write(API_GLOSSARY)
sphinx.main(['', os.path.join('doc', 'src'), os.path.join('doc', 'build')]) sphinx.main(
['', os.path.join('doc', 'src'), os.path.join('doc', 'build')])
class BuildProjectMetadata(setuptools.Command): class BuildProjectMetadata(setuptools.Command):
@ -173,7 +172,8 @@ class BuildProjectMetadata(setuptools.Command):
pass pass
def run(self): def run(self):
with open(os.path.join(PYTHON_STEM, 'grpc/_grpcio_metadata.py'), 'w') as module_file: with open(os.path.join(PYTHON_STEM, 'grpc/_grpcio_metadata.py'),
'w') as module_file:
module_file.write('__version__ = """{}"""'.format( module_file.write('__version__ = """{}"""'.format(
self.distribution.get_version())) self.distribution.get_version()))
@ -194,6 +194,7 @@ def _poison_extensions(extensions, message):
for extension in extensions: for extension in extensions:
extension.sources = [poison_filename] extension.sources = [poison_filename]
def check_and_update_cythonization(extensions): def check_and_update_cythonization(extensions):
"""Replace .pyx files with their generated counterparts and return whether or """Replace .pyx files with their generated counterparts and return whether or
not cythonization still needs to occur.""" not cythonization still needs to occur."""
@ -203,9 +204,12 @@ def check_and_update_cythonization(extensions):
for source in extension.sources: for source in extension.sources:
base, file_ext = os.path.splitext(source) base, file_ext = os.path.splitext(source)
if file_ext == '.pyx': if file_ext == '.pyx':
generated_pyx_source = next( generated_pyx_source = next((base + gen_ext
(base + gen_ext for gen_ext in ('.c', '.cpp',) for gen_ext in (
if os.path.isfile(base + gen_ext)), None) '.c',
'.cpp',)
if os.path.isfile(base + gen_ext)),
None)
if generated_pyx_source: if generated_pyx_source:
generated_pyx_sources.append(generated_pyx_source) generated_pyx_sources.append(generated_pyx_source)
else: else:
@ -217,6 +221,7 @@ def check_and_update_cythonization(extensions):
sys.stderr.write('Found cython-generated files...\n') sys.stderr.write('Found cython-generated files...\n')
return True return True
def try_cythonize(extensions, linetracing=False, mandatory=True): def try_cythonize(extensions, linetracing=False, mandatory=True):
"""Attempt to cythonize the extensions. """Attempt to cythonize the extensions.
@ -236,7 +241,8 @@ def try_cythonize(extensions, linetracing=False, mandatory=True):
"Poisoning extension sources to disallow extension commands...") "Poisoning extension sources to disallow extension commands...")
_poison_extensions( _poison_extensions(
extensions, extensions,
"Extensions have been poisoned due to missing Cython-generated code.") "Extensions have been poisoned due to missing Cython-generated code."
)
return extensions return extensions
cython_compiler_directives = {} cython_compiler_directives = {}
if linetracing: if linetracing:
@ -245,10 +251,11 @@ def try_cythonize(extensions, linetracing=False, mandatory=True):
return Cython.Build.cythonize( return Cython.Build.cythonize(
extensions, extensions,
include_path=[ include_path=[
include_dir for extension in extensions for include_dir in extension.include_dirs include_dir
for extension in extensions
for include_dir in extension.include_dirs
] + [CYTHON_STEM], ] + [CYTHON_STEM],
compiler_directives=cython_compiler_directives compiler_directives=cython_compiler_directives)
)
class BuildExt(build_ext.build_ext): class BuildExt(build_ext.build_ext):
@ -264,10 +271,12 @@ class BuildExt(build_ext.build_ext):
compiler = self.compiler.compiler_type compiler = self.compiler.compiler_type
if compiler in BuildExt.C_OPTIONS: if compiler in BuildExt.C_OPTIONS:
for extension in self.extensions: for extension in self.extensions:
extension.extra_compile_args += list(BuildExt.C_OPTIONS[compiler]) extension.extra_compile_args += list(BuildExt.C_OPTIONS[
compiler])
if compiler in BuildExt.LINK_OPTIONS: if compiler in BuildExt.LINK_OPTIONS:
for extension in self.extensions: for extension in self.extensions:
extension.extra_link_args += list(BuildExt.LINK_OPTIONS[compiler]) extension.extra_link_args += list(BuildExt.LINK_OPTIONS[
compiler])
if not check_and_update_cythonization(self.extensions): if not check_and_update_cythonization(self.extensions):
self.extensions = try_cythonize(self.extensions) self.extensions = try_cythonize(self.extensions)
try: try:
@ -275,8 +284,8 @@ class BuildExt(build_ext.build_ext):
except Exception as error: except Exception as error:
formatted_exception = traceback.format_exc() formatted_exception = traceback.format_exc()
support.diagnose_build_ext_error(self, error, formatted_exception) support.diagnose_build_ext_error(self, error, formatted_exception)
raise CommandError( raise CommandError("Failed `build_ext` step:\n{}".format(
"Failed `build_ext` step:\n{}".format(formatted_exception)) formatted_exception))
class Gather(setuptools.Command): class Gather(setuptools.Command):
@ -298,6 +307,7 @@ class Gather(setuptools.Command):
def run(self): def run(self):
if self.install and self.distribution.install_requires: if self.install and self.distribution.install_requires:
self.distribution.fetch_build_eggs(self.distribution.install_requires) self.distribution.fetch_build_eggs(
self.distribution.install_requires)
if self.test and self.distribution.tests_require: if self.test and self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require) self.distribution.fetch_build_eggs(self.distribution.tests_require)

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""gRPC's Python API.""" """gRPC's Python API."""
import abc import abc
@ -37,7 +36,6 @@ import six
from grpc._cython import cygrpc as _cygrpc from grpc._cython import cygrpc as _cygrpc
############################## Future Interface ############################### ############################## Future Interface ###############################
@ -216,8 +214,8 @@ class ChannelConnectivity(enum.Enum):
IDLE = (_cygrpc.ConnectivityState.idle, 'idle') IDLE = (_cygrpc.ConnectivityState.idle, 'idle')
CONNECTING = (_cygrpc.ConnectivityState.connecting, 'connecting') CONNECTING = (_cygrpc.ConnectivityState.connecting, 'connecting')
READY = (_cygrpc.ConnectivityState.ready, 'ready') READY = (_cygrpc.ConnectivityState.ready, 'ready')
TRANSIENT_FAILURE = ( TRANSIENT_FAILURE = (_cygrpc.ConnectivityState.transient_failure,
_cygrpc.ConnectivityState.transient_failure, 'transient failure') 'transient failure')
SHUTDOWN = (_cygrpc.ConnectivityState.shutdown, 'shutdown') SHUTDOWN = (_cygrpc.ConnectivityState.shutdown, 'shutdown')
@ -227,18 +225,17 @@ class StatusCode(enum.Enum):
OK = (_cygrpc.StatusCode.ok, 'ok') OK = (_cygrpc.StatusCode.ok, 'ok')
CANCELLED = (_cygrpc.StatusCode.cancelled, 'cancelled') CANCELLED = (_cygrpc.StatusCode.cancelled, 'cancelled')
UNKNOWN = (_cygrpc.StatusCode.unknown, 'unknown') UNKNOWN = (_cygrpc.StatusCode.unknown, 'unknown')
INVALID_ARGUMENT = ( INVALID_ARGUMENT = (_cygrpc.StatusCode.invalid_argument, 'invalid argument')
_cygrpc.StatusCode.invalid_argument, 'invalid argument') DEADLINE_EXCEEDED = (_cygrpc.StatusCode.deadline_exceeded,
DEADLINE_EXCEEDED = ( 'deadline exceeded')
_cygrpc.StatusCode.deadline_exceeded, 'deadline exceeded')
NOT_FOUND = (_cygrpc.StatusCode.not_found, 'not found') NOT_FOUND = (_cygrpc.StatusCode.not_found, 'not found')
ALREADY_EXISTS = (_cygrpc.StatusCode.already_exists, 'already exists') ALREADY_EXISTS = (_cygrpc.StatusCode.already_exists, 'already exists')
PERMISSION_DENIED = ( PERMISSION_DENIED = (_cygrpc.StatusCode.permission_denied,
_cygrpc.StatusCode.permission_denied, 'permission denied') 'permission denied')
RESOURCE_EXHAUSTED = ( RESOURCE_EXHAUSTED = (_cygrpc.StatusCode.resource_exhausted,
_cygrpc.StatusCode.resource_exhausted, 'resource exhausted') 'resource exhausted')
FAILED_PRECONDITION = ( FAILED_PRECONDITION = (_cygrpc.StatusCode.failed_precondition,
_cygrpc.StatusCode.failed_precondition, 'failed precondition') 'failed precondition')
ABORTED = (_cygrpc.StatusCode.aborted, 'aborted') ABORTED = (_cygrpc.StatusCode.aborted, 'aborted')
OUT_OF_RANGE = (_cygrpc.StatusCode.out_of_range, 'out of range') OUT_OF_RANGE = (_cygrpc.StatusCode.out_of_range, 'out of range')
UNIMPLEMENTED = (_cygrpc.StatusCode.unimplemented, 'unimplemented') UNIMPLEMENTED = (_cygrpc.StatusCode.unimplemented, 'unimplemented')
@ -523,8 +520,11 @@ class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
"""Affords invoking a stream-unary RPC in any call style.""" """Affords invoking a stream-unary RPC in any call style."""
@abc.abstractmethod @abc.abstractmethod
def __call__( def __call__(self,
self, request_iterator, timeout=None, metadata=None, credentials=None): request_iterator,
timeout=None,
metadata=None,
credentials=None):
"""Synchronously invokes the underlying RPC. """Synchronously invokes the underlying RPC.
Args: Args:
@ -546,8 +546,11 @@ class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
raise NotImplementedError() raise NotImplementedError()
@abc.abstractmethod @abc.abstractmethod
def with_call( def with_call(self,
self, request_iterator, timeout=None, metadata=None, credentials=None): request_iterator,
timeout=None,
metadata=None,
credentials=None):
"""Synchronously invokes the underlying RPC. """Synchronously invokes the underlying RPC.
Args: Args:
@ -568,8 +571,11 @@ class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
raise NotImplementedError() raise NotImplementedError()
@abc.abstractmethod @abc.abstractmethod
def future( def future(self,
self, request_iterator, timeout=None, metadata=None, credentials=None): request_iterator,
timeout=None,
metadata=None,
credentials=None):
"""Asynchronously invokes the underlying RPC. """Asynchronously invokes the underlying RPC.
Args: Args:
@ -592,8 +598,11 @@ class StreamStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
"""Affords invoking a stream-stream RPC in any call style.""" """Affords invoking a stream-stream RPC in any call style."""
@abc.abstractmethod @abc.abstractmethod
def __call__( def __call__(self,
self, request_iterator, timeout=None, metadata=None, credentials=None): request_iterator,
timeout=None,
metadata=None,
credentials=None):
"""Invokes the underlying RPC. """Invokes the underlying RPC.
Args: Args:
@ -644,8 +653,10 @@ class Channel(six.with_metaclass(abc.ABCMeta)):
raise NotImplementedError() raise NotImplementedError()
@abc.abstractmethod @abc.abstractmethod
def unary_unary( def unary_unary(self,
self, method, request_serializer=None, response_deserializer=None): method,
request_serializer=None,
response_deserializer=None):
"""Creates a UnaryUnaryMultiCallable for a unary-unary method. """Creates a UnaryUnaryMultiCallable for a unary-unary method.
Args: Args:
@ -661,8 +672,10 @@ class Channel(six.with_metaclass(abc.ABCMeta)):
raise NotImplementedError() raise NotImplementedError()
@abc.abstractmethod @abc.abstractmethod
def unary_stream( def unary_stream(self,
self, method, request_serializer=None, response_deserializer=None): method,
request_serializer=None,
response_deserializer=None):
"""Creates a UnaryStreamMultiCallable for a unary-stream method. """Creates a UnaryStreamMultiCallable for a unary-stream method.
Args: Args:
@ -678,8 +691,10 @@ class Channel(six.with_metaclass(abc.ABCMeta)):
raise NotImplementedError() raise NotImplementedError()
@abc.abstractmethod @abc.abstractmethod
def stream_unary( def stream_unary(self,
self, method, request_serializer=None, response_deserializer=None): method,
request_serializer=None,
response_deserializer=None):
"""Creates a StreamUnaryMultiCallable for a stream-unary method. """Creates a StreamUnaryMultiCallable for a stream-unary method.
Args: Args:
@ -695,8 +710,10 @@ class Channel(six.with_metaclass(abc.ABCMeta)):
raise NotImplementedError() raise NotImplementedError()
@abc.abstractmethod @abc.abstractmethod
def stream_stream( def stream_stream(self,
self, method, request_serializer=None, response_deserializer=None): method,
request_serializer=None,
response_deserializer=None):
"""Creates a StreamStreamMultiCallable for a stream-stream method. """Creates a StreamStreamMultiCallable for a stream-stream method.
Args: Args:
@ -973,8 +990,9 @@ class Server(six.with_metaclass(abc.ABCMeta)):
################################# Functions ################################ ################################# Functions ################################
def unary_unary_rpc_method_handler( def unary_unary_rpc_method_handler(behavior,
behavior, request_deserializer=None, response_serializer=None): request_deserializer=None,
response_serializer=None):
"""Creates an RpcMethodHandler for a unary-unary RPC method. """Creates an RpcMethodHandler for a unary-unary RPC method.
Args: Args:
@ -988,13 +1006,14 @@ def unary_unary_rpc_method_handler(
parameters. parameters.
""" """
from grpc import _utilities from grpc import _utilities
return _utilities.RpcMethodHandler( return _utilities.RpcMethodHandler(False, False, request_deserializer,
False, False, request_deserializer, response_serializer, behavior, None, response_serializer, behavior, None,
None, None) None, None)
def unary_stream_rpc_method_handler( def unary_stream_rpc_method_handler(behavior,
behavior, request_deserializer=None, response_serializer=None): request_deserializer=None,
response_serializer=None):
"""Creates an RpcMethodHandler for a unary-stream RPC method. """Creates an RpcMethodHandler for a unary-stream RPC method.
Args: Args:
@ -1008,13 +1027,14 @@ def unary_stream_rpc_method_handler(
given parameters. given parameters.
""" """
from grpc import _utilities from grpc import _utilities
return _utilities.RpcMethodHandler( return _utilities.RpcMethodHandler(False, True, request_deserializer,
False, True, request_deserializer, response_serializer, None, behavior, response_serializer, None, behavior,
None, None) None, None)
def stream_unary_rpc_method_handler( def stream_unary_rpc_method_handler(behavior,
behavior, request_deserializer=None, response_serializer=None): request_deserializer=None,
response_serializer=None):
"""Creates an RpcMethodHandler for a stream-unary RPC method. """Creates an RpcMethodHandler for a stream-unary RPC method.
Args: Args:
@ -1028,13 +1048,14 @@ def stream_unary_rpc_method_handler(
given parameters. given parameters.
""" """
from grpc import _utilities from grpc import _utilities
return _utilities.RpcMethodHandler( return _utilities.RpcMethodHandler(True, False, request_deserializer,
True, False, request_deserializer, response_serializer, None, None, response_serializer, None, None,
behavior, None) behavior, None)
def stream_stream_rpc_method_handler( def stream_stream_rpc_method_handler(behavior,
behavior, request_deserializer=None, response_serializer=None): request_deserializer=None,
response_serializer=None):
"""Creates an RpcMethodHandler for a stream-stream RPC method. """Creates an RpcMethodHandler for a stream-stream RPC method.
Args: Args:
@ -1049,8 +1070,8 @@ def stream_stream_rpc_method_handler(
given parameters. given parameters.
""" """
from grpc import _utilities from grpc import _utilities
return _utilities.RpcMethodHandler( return _utilities.RpcMethodHandler(True, True, request_deserializer,
True, True, request_deserializer, response_serializer, None, None, None, response_serializer, None, None, None,
behavior) behavior)
@ -1069,8 +1090,9 @@ def method_handlers_generic_handler(service, method_handlers):
return _utilities.DictionaryGenericHandler(service, method_handlers) return _utilities.DictionaryGenericHandler(service, method_handlers)
def ssl_channel_credentials( def ssl_channel_credentials(root_certificates=None,
root_certificates=None, private_key=None, certificate_chain=None): private_key=None,
certificate_chain=None):
"""Creates a ChannelCredentials for use with an SSL-enabled Channel. """Creates a ChannelCredentials for use with an SSL-enabled Channel.
Args: Args:
@ -1112,8 +1134,8 @@ def metadata_call_credentials(metadata_plugin, name=None):
else: else:
effective_name = name effective_name = name
return CallCredentials( return CallCredentials(
_plugin_wrapping.call_credentials_metadata_plugin( _plugin_wrapping.call_credentials_metadata_plugin(metadata_plugin,
metadata_plugin, effective_name)) effective_name))
def access_token_call_credentials(access_token): def access_token_call_credentials(access_token):
@ -1164,12 +1186,12 @@ def composite_channel_credentials(channel_credentials, *call_credentials):
single_call_credentials._credentials single_call_credentials._credentials
for single_call_credentials in call_credentials) for single_call_credentials in call_credentials)
return ChannelCredentials( return ChannelCredentials(
_credential_composition.channel( _credential_composition.channel(channel_credentials._credentials,
channel_credentials._credentials, cygrpc_call_credentials)) cygrpc_call_credentials))
def ssl_server_credentials( def ssl_server_credentials(private_key_certificate_chain_pairs,
private_key_certificate_chain_pairs, root_certificates=None, root_certificates=None,
require_client_auth=False): require_client_auth=False):
"""Creates a ServerCredentials for use with an SSL-enabled Server. """Creates a ServerCredentials for use with an SSL-enabled Server.
@ -1192,14 +1214,14 @@ def ssl_server_credentials(
'At least one private key-certificate chain pair is required!') 'At least one private key-certificate chain pair is required!')
elif require_client_auth and root_certificates is None: elif require_client_auth and root_certificates is None:
raise ValueError( raise ValueError(
'Illegal to require client auth without providing root certificates!') 'Illegal to require client auth without providing root certificates!'
)
else: else:
return ServerCredentials( return ServerCredentials(
_cygrpc.server_credentials_ssl( _cygrpc.server_credentials_ssl(root_certificates, [
root_certificates, _cygrpc.SslPemKeyCertPair(key, pem)
[_cygrpc.SslPemKeyCertPair(key, pem) for key, pem in private_key_certificate_chain_pairs
for key, pem in private_key_certificate_chain_pairs], ], require_client_auth))
require_client_auth))
def channel_ready_future(channel): def channel_ready_future(channel):
@ -1270,13 +1292,12 @@ def server(thread_pool, handlers=None, options=None):
A Server with which RPCs can be serviced. A Server with which RPCs can be serviced.
""" """
from grpc import _server from grpc import _server
return _server.Server(thread_pool, () if handlers is None else handlers, return _server.Server(thread_pool, () if handlers is None else handlers, ()
() if options is None else options) if options is None else options)
################################### __all__ ################################# ################################### __all__ #################################
__all__ = ( __all__ = (
'FutureTimeoutError', 'FutureTimeoutError',
'FutureCancelledError', 'FutureCancelledError',
@ -1317,13 +1338,10 @@ __all__ = (
'channel_ready_future', 'channel_ready_future',
'insecure_channel', 'insecure_channel',
'secure_channel', 'secure_channel',
'server', 'server',)
)
############################### Extension Shims ################################ ############################### Extension Shims ################################
# Here to maintain backwards compatibility; avoid using these in new code! # Here to maintain backwards compatibility; avoid using these in new code!
try: try:
import grpc_tools import grpc_tools

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""GRPCAuthMetadataPlugins for standard authentication.""" """GRPCAuthMetadataPlugins for standard authentication."""
import inspect import inspect
@ -58,11 +57,13 @@ class GoogleCallCredentials(grpc.AuthMetadataPlugin):
def __call__(self, context, callback): def __call__(self, context, callback):
# MetadataPlugins cannot block (see grpc.beta.interfaces.py) # MetadataPlugins cannot block (see grpc.beta.interfaces.py)
if self._is_jwt: if self._is_jwt:
future = self._pool.submit(self._credentials.get_access_token, future = self._pool.submit(
self._credentials.get_access_token,
additional_claims={'aud': context.service_url}) additional_claims={'aud': context.service_url})
else: else:
future = self._pool.submit(self._credentials.get_access_token) future = self._pool.submit(self._credentials.get_access_token)
future.add_done_callback(lambda x: self._get_token_callback(callback, x)) future.add_done_callback(
lambda x: self._get_token_callback(callback, x))
def _get_token_callback(self, callback, future): def _get_token_callback(self, callback, future):
try: try:

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Invocation-side implementation of gRPC Python.""" """Invocation-side implementation of gRPC Python."""
import sys import sys
@ -52,26 +51,22 @@ _UNARY_UNARY_INITIAL_DUE = (
cygrpc.OperationType.send_close_from_client, cygrpc.OperationType.send_close_from_client,
cygrpc.OperationType.receive_initial_metadata, cygrpc.OperationType.receive_initial_metadata,
cygrpc.OperationType.receive_message, cygrpc.OperationType.receive_message,
cygrpc.OperationType.receive_status_on_client, cygrpc.OperationType.receive_status_on_client,)
)
_UNARY_STREAM_INITIAL_DUE = ( _UNARY_STREAM_INITIAL_DUE = (
cygrpc.OperationType.send_initial_metadata, cygrpc.OperationType.send_initial_metadata,
cygrpc.OperationType.send_message, cygrpc.OperationType.send_message,
cygrpc.OperationType.send_close_from_client, cygrpc.OperationType.send_close_from_client,
cygrpc.OperationType.receive_initial_metadata, cygrpc.OperationType.receive_initial_metadata,
cygrpc.OperationType.receive_status_on_client, cygrpc.OperationType.receive_status_on_client,)
)
_STREAM_UNARY_INITIAL_DUE = ( _STREAM_UNARY_INITIAL_DUE = (
cygrpc.OperationType.send_initial_metadata, cygrpc.OperationType.send_initial_metadata,
cygrpc.OperationType.receive_initial_metadata, cygrpc.OperationType.receive_initial_metadata,
cygrpc.OperationType.receive_message, cygrpc.OperationType.receive_message,
cygrpc.OperationType.receive_status_on_client, cygrpc.OperationType.receive_status_on_client,)
)
_STREAM_STREAM_INITIAL_DUE = ( _STREAM_STREAM_INITIAL_DUE = (
cygrpc.OperationType.send_initial_metadata, cygrpc.OperationType.send_initial_metadata,
cygrpc.OperationType.receive_initial_metadata, cygrpc.OperationType.receive_initial_metadata,
cygrpc.OperationType.receive_status_on_client, cygrpc.OperationType.receive_status_on_client,)
)
_CHANNEL_SUBSCRIPTION_CALLBACK_ERROR_LOG_MESSAGE = ( _CHANNEL_SUBSCRIPTION_CALLBACK_ERROR_LOG_MESSAGE = (
'Exception calling channel subscription callback!') 'Exception calling channel subscription callback!')
@ -100,23 +95,28 @@ def _wait_once_until(condition, until):
else: else:
condition.wait(timeout=remaining) condition.wait(timeout=remaining)
_INTERNAL_CALL_ERROR_MESSAGE_FORMAT = ( _INTERNAL_CALL_ERROR_MESSAGE_FORMAT = (
'Internal gRPC call error %d. ' + 'Internal gRPC call error %d. ' +
'Please report to https://github.com/grpc/grpc/issues') 'Please report to https://github.com/grpc/grpc/issues')
def _check_call_error(call_error, metadata): def _check_call_error(call_error, metadata):
if call_error == cygrpc.CallError.invalid_metadata: if call_error == cygrpc.CallError.invalid_metadata:
raise ValueError('metadata was invalid: %s' % metadata) raise ValueError('metadata was invalid: %s' % metadata)
elif call_error != cygrpc.CallError.ok: elif call_error != cygrpc.CallError.ok:
raise ValueError(_INTERNAL_CALL_ERROR_MESSAGE_FORMAT % call_error) raise ValueError(_INTERNAL_CALL_ERROR_MESSAGE_FORMAT % call_error)
def _call_error_set_RPCstate(state, call_error, metadata): def _call_error_set_RPCstate(state, call_error, metadata):
if call_error == cygrpc.CallError.invalid_metadata: if call_error == cygrpc.CallError.invalid_metadata:
_abort(state, grpc.StatusCode.INTERNAL, 'metadata was invalid: %s' % metadata) _abort(state, grpc.StatusCode.INTERNAL,
'metadata was invalid: %s' % metadata)
else: else:
_abort(state, grpc.StatusCode.INTERNAL, _abort(state, grpc.StatusCode.INTERNAL,
_INTERNAL_CALL_ERROR_MESSAGE_FORMAT % call_error) _INTERNAL_CALL_ERROR_MESSAGE_FORMAT % call_error)
class _RPCState(object): class _RPCState(object):
def __init__(self, due, initial_metadata, trailing_metadata, code, details): def __init__(self, due, initial_metadata, trailing_metadata, code, details):
@ -156,8 +156,8 @@ def _handle_event(event, state, response_deserializer):
elif operation_type == cygrpc.OperationType.receive_message: elif operation_type == cygrpc.OperationType.receive_message:
serialized_response = batch_operation.received_message.bytes() serialized_response = batch_operation.received_message.bytes()
if serialized_response is not None: if serialized_response is not None:
response = _common.deserialize( response = _common.deserialize(serialized_response,
serialized_response, response_deserializer) response_deserializer)
if response is None: if response is None:
details = 'Exception deserializing response!' details = 'Exception deserializing response!'
_abort(state, grpc.StatusCode.INTERNAL, details) _abort(state, grpc.StatusCode.INTERNAL, details)
@ -182,6 +182,7 @@ def _handle_event(event, state, response_deserializer):
def _event_handler(state, call, response_deserializer): def _event_handler(state, call, response_deserializer):
def handle_event(event): def handle_event(event):
with state.condition: with state.condition:
callbacks = _handle_event(event, state, response_deserializer) callbacks = _handle_event(event, state, response_deserializer)
@ -190,11 +191,12 @@ def _event_handler(state, call, response_deserializer):
for callback in callbacks: for callback in callbacks:
callback() callback()
return call if done else None return call if done else None
return handle_event return handle_event
def _consume_request_iterator( def _consume_request_iterator(request_iterator, state, call,
request_iterator, state, call, request_serializer): request_serializer):
event_handler = _event_handler(state, call, None) event_handler = _event_handler(state, call, None)
def consume_request_iterator(): def consume_request_iterator():
@ -206,7 +208,8 @@ def _consume_request_iterator(
except Exception as e: except Exception as e:
logging.exception("Exception iterating requests!") logging.exception("Exception iterating requests!")
call.cancel() call.cancel()
_abort(state, grpc.StatusCode.UNKNOWN, "Exception iterating requests!") _abort(state, grpc.StatusCode.UNKNOWN,
"Exception iterating requests!")
return return
serialized_request = _common.serialize(request, request_serializer) serialized_request = _common.serialize(request, request_serializer)
with state.condition: with state.condition:
@ -217,12 +220,10 @@ def _consume_request_iterator(
_abort(state, grpc.StatusCode.INTERNAL, details) _abort(state, grpc.StatusCode.INTERNAL, details)
return return
else: else:
operations = ( operations = (cygrpc.operation_send_message(
cygrpc.operation_send_message( serialized_request, _EMPTY_FLAGS),)
serialized_request, _EMPTY_FLAGS), call.start_client_batch(
) cygrpc.Operations(operations), event_handler)
call.start_client_batch(cygrpc.Operations(operations),
event_handler)
state.due.add(cygrpc.OperationType.send_message) state.due.add(cygrpc.OperationType.send_message)
while True: while True:
state.condition.wait() state.condition.wait()
@ -236,9 +237,9 @@ def _consume_request_iterator(
with state.condition: with state.condition:
if state.code is None: if state.code is None:
operations = ( operations = (
cygrpc.operation_send_close_from_client(_EMPTY_FLAGS), cygrpc.operation_send_close_from_client(_EMPTY_FLAGS),)
) call.start_client_batch(
call.start_client_batch(cygrpc.Operations(operations), event_handler) cygrpc.Operations(operations), event_handler)
state.due.add(cygrpc.OperationType.send_close_from_client) state.due.add(cygrpc.OperationType.send_close_from_client)
def stop_consumption_thread(timeout): def stop_consumption_thread(timeout):
@ -337,8 +338,8 @@ class _Rendezvous(grpc.RpcError, grpc.Future, grpc.Call):
def _next(self): def _next(self):
with self._state.condition: with self._state.condition:
if self._state.code is None: if self._state.code is None:
event_handler = _event_handler( event_handler = _event_handler(self._state, self._call,
self._state, self._call, self._response_deserializer) self._response_deserializer)
self._call.start_client_batch( self._call.start_client_batch(
cygrpc.Operations( cygrpc.Operations(
(cygrpc.operation_receive_message(_EMPTY_FLAGS),)), (cygrpc.operation_receive_message(_EMPTY_FLAGS),)),
@ -438,8 +439,8 @@ def _start_unary_request(request, timeout, request_serializer):
deadline, deadline_timespec = _deadline(timeout) deadline, deadline_timespec = _deadline(timeout)
serialized_request = _common.serialize(request, request_serializer) serialized_request = _common.serialize(request, request_serializer)
if serialized_request is None: if serialized_request is None:
state = _RPCState( state = _RPCState((), _EMPTY_METADATA, _EMPTY_METADATA,
(), _EMPTY_METADATA, _EMPTY_METADATA, grpc.StatusCode.INTERNAL, grpc.StatusCode.INTERNAL,
'Exception serializing request!') 'Exception serializing request!')
rendezvous = _Rendezvous(state, None, None, deadline) rendezvous = _Rendezvous(state, None, None, deadline)
return deadline, deadline_timespec, None, rendezvous return deadline, deadline_timespec, None, rendezvous
@ -460,8 +461,7 @@ def _end_unary_response_blocking(state, with_call, deadline):
class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable): class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable):
def __init__( def __init__(self, channel, managed_call, method, request_serializer,
self, channel, managed_call, method, request_serializer,
response_deserializer): response_deserializer):
self._channel = channel self._channel = channel
self._managed_call = managed_call self._managed_call = managed_call
@ -483,8 +483,7 @@ class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable):
cygrpc.operation_send_close_from_client(_EMPTY_FLAGS), cygrpc.operation_send_close_from_client(_EMPTY_FLAGS),
cygrpc.operation_receive_initial_metadata(_EMPTY_FLAGS), cygrpc.operation_receive_initial_metadata(_EMPTY_FLAGS),
cygrpc.operation_receive_message(_EMPTY_FLAGS), cygrpc.operation_receive_message(_EMPTY_FLAGS),
cygrpc.operation_receive_status_on_client(_EMPTY_FLAGS), cygrpc.operation_receive_status_on_client(_EMPTY_FLAGS),)
)
return state, operations, deadline, deadline_timespec, None return state, operations, deadline, deadline_timespec, None
def _blocking(self, request, timeout, metadata, credentials): def _blocking(self, request, timeout, metadata, credentials):
@ -494,21 +493,26 @@ class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable):
raise rendezvous raise rendezvous
else: else:
completion_queue = cygrpc.CompletionQueue() completion_queue = cygrpc.CompletionQueue()
call = self._channel.create_call( call = self._channel.create_call(None, 0, completion_queue,
None, 0, completion_queue, self._method, None, deadline_timespec) self._method, None,
deadline_timespec)
if credentials is not None: if credentials is not None:
call.set_credentials(credentials._credentials) call.set_credentials(credentials._credentials)
call_error = call.start_client_batch(cygrpc.Operations(operations), None) call_error = call.start_client_batch(
cygrpc.Operations(operations), None)
_check_call_error(call_error, metadata) _check_call_error(call_error, metadata)
_handle_event(completion_queue.poll(), state, self._response_deserializer) _handle_event(completion_queue.poll(), state,
self._response_deserializer)
return state, deadline return state, deadline
def __call__(self, request, timeout=None, metadata=None, credentials=None): def __call__(self, request, timeout=None, metadata=None, credentials=None):
state, deadline, = self._blocking(request, timeout, metadata, credentials) state, deadline, = self._blocking(request, timeout, metadata,
credentials)
return _end_unary_response_blocking(state, False, deadline) return _end_unary_response_blocking(state, False, deadline)
def with_call(self, request, timeout=None, metadata=None, credentials=None): def with_call(self, request, timeout=None, metadata=None, credentials=None):
state, deadline, = self._blocking(request, timeout, metadata, credentials) state, deadline, = self._blocking(request, timeout, metadata,
credentials)
return _end_unary_response_blocking(state, True, deadline) return _end_unary_response_blocking(state, True, deadline)
def future(self, request, timeout=None, metadata=None, credentials=None): def future(self, request, timeout=None, metadata=None, credentials=None):
@ -517,25 +521,26 @@ class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable):
if rendezvous: if rendezvous:
return rendezvous return rendezvous
else: else:
call, drive_call = self._managed_call( call, drive_call = self._managed_call(None, 0, self._method, None,
None, 0, self._method, None, deadline_timespec) deadline_timespec)
if credentials is not None: if credentials is not None:
call.set_credentials(credentials._credentials) call.set_credentials(credentials._credentials)
event_handler = _event_handler(state, call, self._response_deserializer) event_handler = _event_handler(state, call,
self._response_deserializer)
with state.condition: with state.condition:
call_error = call.start_client_batch(cygrpc.Operations(operations), call_error = call.start_client_batch(
event_handler) cygrpc.Operations(operations), event_handler)
if call_error != cygrpc.CallError.ok: if call_error != cygrpc.CallError.ok:
_call_error_set_RPCstate(state, call_error, metadata) _call_error_set_RPCstate(state, call_error, metadata)
return _Rendezvous(state, None, None, deadline) return _Rendezvous(state, None, None, deadline)
drive_call() drive_call()
return _Rendezvous(state, call, self._response_deserializer, deadline) return _Rendezvous(state, call, self._response_deserializer,
deadline)
class _UnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable): class _UnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable):
def __init__( def __init__(self, channel, managed_call, method, request_serializer,
self, channel, managed_call, method, request_serializer,
response_deserializer): response_deserializer):
self._channel = channel self._channel = channel
self._managed_call = managed_call self._managed_call = managed_call
@ -550,36 +555,37 @@ class _UnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable):
raise rendezvous raise rendezvous
else: else:
state = _RPCState(_UNARY_STREAM_INITIAL_DUE, None, None, None, None) state = _RPCState(_UNARY_STREAM_INITIAL_DUE, None, None, None, None)
call, drive_call = self._managed_call( call, drive_call = self._managed_call(None, 0, self._method, None,
None, 0, self._method, None, deadline_timespec) deadline_timespec)
if credentials is not None: if credentials is not None:
call.set_credentials(credentials._credentials) call.set_credentials(credentials._credentials)
event_handler = _event_handler(state, call, self._response_deserializer) event_handler = _event_handler(state, call,
self._response_deserializer)
with state.condition: with state.condition:
call.start_client_batch( call.start_client_batch(
cygrpc.Operations( cygrpc.Operations((
(cygrpc.operation_receive_initial_metadata(_EMPTY_FLAGS),)), cygrpc.operation_receive_initial_metadata(_EMPTY_FLAGS),
event_handler) )), event_handler)
operations = ( operations = (
cygrpc.operation_send_initial_metadata( cygrpc.operation_send_initial_metadata(
_common.cygrpc_metadata(metadata), _EMPTY_FLAGS), _common.cygrpc_metadata(metadata), _EMPTY_FLAGS),
cygrpc.operation_send_message(serialized_request, _EMPTY_FLAGS), cygrpc.operation_send_message(serialized_request,
_EMPTY_FLAGS),
cygrpc.operation_send_close_from_client(_EMPTY_FLAGS), cygrpc.operation_send_close_from_client(_EMPTY_FLAGS),
cygrpc.operation_receive_status_on_client(_EMPTY_FLAGS), cygrpc.operation_receive_status_on_client(_EMPTY_FLAGS),)
) call_error = call.start_client_batch(
call_error = call.start_client_batch(cygrpc.Operations(operations), cygrpc.Operations(operations), event_handler)
event_handler)
if call_error != cygrpc.CallError.ok: if call_error != cygrpc.CallError.ok:
_call_error_set_RPCstate(state, call_error, metadata) _call_error_set_RPCstate(state, call_error, metadata)
return _Rendezvous(state, None, None, deadline) return _Rendezvous(state, None, None, deadline)
drive_call() drive_call()
return _Rendezvous(state, call, self._response_deserializer, deadline) return _Rendezvous(state, call, self._response_deserializer,
deadline)
class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable): class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable):
def __init__( def __init__(self, channel, managed_call, method, request_serializer,
self, channel, managed_call, method, request_serializer,
response_deserializer): response_deserializer):
self._channel = channel self._channel = channel
self._managed_call = managed_call self._managed_call = managed_call
@ -591,8 +597,8 @@ class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable):
deadline, deadline_timespec = _deadline(timeout) deadline, deadline_timespec = _deadline(timeout)
state = _RPCState(_STREAM_UNARY_INITIAL_DUE, None, None, None, None) state = _RPCState(_STREAM_UNARY_INITIAL_DUE, None, None, None, None)
completion_queue = cygrpc.CompletionQueue() completion_queue = cygrpc.CompletionQueue()
call = self._channel.create_call( call = self._channel.create_call(None, 0, completion_queue,
None, 0, completion_queue, self._method, None, deadline_timespec) self._method, None, deadline_timespec)
if credentials is not None: if credentials is not None:
call.set_credentials(credentials._credentials) call.set_credentials(credentials._credentials)
with state.condition: with state.condition:
@ -604,12 +610,12 @@ class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable):
cygrpc.operation_send_initial_metadata( cygrpc.operation_send_initial_metadata(
_common.cygrpc_metadata(metadata), _EMPTY_FLAGS), _common.cygrpc_metadata(metadata), _EMPTY_FLAGS),
cygrpc.operation_receive_message(_EMPTY_FLAGS), cygrpc.operation_receive_message(_EMPTY_FLAGS),
cygrpc.operation_receive_status_on_client(_EMPTY_FLAGS), cygrpc.operation_receive_status_on_client(_EMPTY_FLAGS),)
) call_error = call.start_client_batch(
call_error = call.start_client_batch(cygrpc.Operations(operations), None) cygrpc.Operations(operations), None)
_check_call_error(call_error, metadata) _check_call_error(call_error, metadata)
_consume_request_iterator( _consume_request_iterator(request_iterator, state, call,
request_iterator, state, call, self._request_serializer) self._request_serializer)
while True: while True:
event = completion_queue.poll() event = completion_queue.poll()
with state.condition: with state.condition:
@ -619,24 +625,33 @@ class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable):
break break
return state, deadline return state, deadline
def __call__( def __call__(self,
self, request_iterator, timeout=None, metadata=None, credentials=None): request_iterator,
state, deadline, = self._blocking( timeout=None,
request_iterator, timeout, metadata, credentials) metadata=None,
credentials=None):
state, deadline, = self._blocking(request_iterator, timeout, metadata,
credentials)
return _end_unary_response_blocking(state, False, deadline) return _end_unary_response_blocking(state, False, deadline)
def with_call( def with_call(self,
self, request_iterator, timeout=None, metadata=None, credentials=None): request_iterator,
state, deadline, = self._blocking( timeout=None,
request_iterator, timeout, metadata, credentials) metadata=None,
credentials=None):
state, deadline, = self._blocking(request_iterator, timeout, metadata,
credentials)
return _end_unary_response_blocking(state, True, deadline) return _end_unary_response_blocking(state, True, deadline)
def future( def future(self,
self, request_iterator, timeout=None, metadata=None, credentials=None): request_iterator,
timeout=None,
metadata=None,
credentials=None):
deadline, deadline_timespec = _deadline(timeout) deadline, deadline_timespec = _deadline(timeout)
state = _RPCState(_STREAM_UNARY_INITIAL_DUE, None, None, None, None) state = _RPCState(_STREAM_UNARY_INITIAL_DUE, None, None, None, None)
call, drive_call = self._managed_call( call, drive_call = self._managed_call(None, 0, self._method, None,
None, 0, self._method, None, deadline_timespec) deadline_timespec)
if credentials is not None: if credentials is not None:
call.set_credentials(credentials._credentials) call.set_credentials(credentials._credentials)
event_handler = _event_handler(state, call, self._response_deserializer) event_handler = _event_handler(state, call, self._response_deserializer)
@ -649,23 +664,21 @@ class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable):
cygrpc.operation_send_initial_metadata( cygrpc.operation_send_initial_metadata(
_common.cygrpc_metadata(metadata), _EMPTY_FLAGS), _common.cygrpc_metadata(metadata), _EMPTY_FLAGS),
cygrpc.operation_receive_message(_EMPTY_FLAGS), cygrpc.operation_receive_message(_EMPTY_FLAGS),
cygrpc.operation_receive_status_on_client(_EMPTY_FLAGS), cygrpc.operation_receive_status_on_client(_EMPTY_FLAGS),)
) call_error = call.start_client_batch(
call_error = call.start_client_batch(cygrpc.Operations(operations), cygrpc.Operations(operations), event_handler)
event_handler)
if call_error != cygrpc.CallError.ok: if call_error != cygrpc.CallError.ok:
_call_error_set_RPCstate(state, call_error, metadata) _call_error_set_RPCstate(state, call_error, metadata)
return _Rendezvous(state, None, None, deadline) return _Rendezvous(state, None, None, deadline)
drive_call() drive_call()
_consume_request_iterator( _consume_request_iterator(request_iterator, state, call,
request_iterator, state, call, self._request_serializer) self._request_serializer)
return _Rendezvous(state, call, self._response_deserializer, deadline) return _Rendezvous(state, call, self._response_deserializer, deadline)
class _StreamStreamMultiCallable(grpc.StreamStreamMultiCallable): class _StreamStreamMultiCallable(grpc.StreamStreamMultiCallable):
def __init__( def __init__(self, channel, managed_call, method, request_serializer,
self, channel, managed_call, method, request_serializer,
response_deserializer): response_deserializer):
self._channel = channel self._channel = channel
self._managed_call = managed_call self._managed_call = managed_call
@ -673,12 +686,15 @@ class _StreamStreamMultiCallable(grpc.StreamStreamMultiCallable):
self._request_serializer = request_serializer self._request_serializer = request_serializer
self._response_deserializer = response_deserializer self._response_deserializer = response_deserializer
def __call__( def __call__(self,
self, request_iterator, timeout=None, metadata=None, credentials=None): request_iterator,
timeout=None,
metadata=None,
credentials=None):
deadline, deadline_timespec = _deadline(timeout) deadline, deadline_timespec = _deadline(timeout)
state = _RPCState(_STREAM_STREAM_INITIAL_DUE, None, None, None, None) state = _RPCState(_STREAM_STREAM_INITIAL_DUE, None, None, None, None)
call, drive_call = self._managed_call( call, drive_call = self._managed_call(None, 0, self._method, None,
None, 0, self._method, None, deadline_timespec) deadline_timespec)
if credentials is not None: if credentials is not None:
call.set_credentials(credentials._credentials) call.set_credentials(credentials._credentials)
event_handler = _event_handler(state, call, self._response_deserializer) event_handler = _event_handler(state, call, self._response_deserializer)
@ -690,16 +706,15 @@ class _StreamStreamMultiCallable(grpc.StreamStreamMultiCallable):
operations = ( operations = (
cygrpc.operation_send_initial_metadata( cygrpc.operation_send_initial_metadata(
_common.cygrpc_metadata(metadata), _EMPTY_FLAGS), _common.cygrpc_metadata(metadata), _EMPTY_FLAGS),
cygrpc.operation_receive_status_on_client(_EMPTY_FLAGS), cygrpc.operation_receive_status_on_client(_EMPTY_FLAGS),)
) call_error = call.start_client_batch(
call_error = call.start_client_batch(cygrpc.Operations(operations), cygrpc.Operations(operations), event_handler)
event_handler)
if call_error != cygrpc.CallError.ok: if call_error != cygrpc.CallError.ok:
_call_error_set_RPCstate(state, call_error, metadata) _call_error_set_RPCstate(state, call_error, metadata)
return _Rendezvous(state, None, None, deadline) return _Rendezvous(state, None, None, deadline)
drive_call() drive_call()
_consume_request_iterator( _consume_request_iterator(request_iterator, state, call,
request_iterator, state, call, self._request_serializer) self._request_serializer)
return _Rendezvous(state, call, self._response_deserializer, deadline) return _Rendezvous(state, call, self._response_deserializer, deadline)
@ -713,6 +728,7 @@ class _ChannelCallState(object):
def _run_channel_spin_thread(state): def _run_channel_spin_thread(state):
def channel_spin(): def channel_spin():
while True: while True:
event = state.completion_queue.poll() event = state.completion_queue.poll()
@ -736,6 +752,7 @@ def _run_channel_spin_thread(state):
def _channel_managed_call_management(state): def _channel_managed_call_management(state):
def create(parent, flags, method, host, deadline): def create(parent, flags, method, host, deadline):
"""Creates a managed cygrpc.Call and a function to call to drive it. """Creates a managed cygrpc.Call and a function to call to drive it.
@ -754,8 +771,8 @@ def _channel_managed_call_management(state):
A cygrpc.Call with which to conduct an RPC and a function to call if A cygrpc.Call with which to conduct an RPC and a function to call if
operations are successfully started on the call. operations are successfully started on the call.
""" """
call = state.channel.create_call( call = state.channel.create_call(parent, flags, state.completion_queue,
parent, flags, state.completion_queue, method, host, deadline) method, host, deadline)
def drive(): def drive():
with state.lock: with state.lock:
@ -766,6 +783,7 @@ def _channel_managed_call_management(state):
state.managed_calls.add(call) state.managed_calls.add(call)
return call, drive return call, drive
return create return create
@ -810,7 +828,10 @@ def _deliver(state, initial_connectivity, initial_callbacks):
def _spawn_delivery(state, callbacks): def _spawn_delivery(state, callbacks):
delivering_thread = threading.Thread( delivering_thread = threading.Thread(
target=_deliver, args=(state, state.connectivity, callbacks,)) target=_deliver, args=(
state,
state.connectivity,
callbacks,))
delivering_thread.start() delivering_thread.start()
state.delivering = True state.delivering = True
@ -823,8 +844,8 @@ def _poll_connectivity(state, channel, initial_try_to_connect):
state.connectivity = ( state.connectivity = (
_common.CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[ _common.CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[
connectivity]) connectivity])
callbacks = tuple( callbacks = tuple(callback
callback for callback, unused_but_known_to_be_none_connectivity for callback, unused_but_known_to_be_none_connectivity
in state.callbacks_and_connectivities) in state.callbacks_and_connectivities)
for callback_and_connectivity in state.callbacks_and_connectivities: for callback_and_connectivity in state.callbacks_and_connectivities:
callback_and_connectivity[1] = state.connectivity callback_and_connectivity[1] = state.connectivity
@ -832,8 +853,8 @@ def _poll_connectivity(state, channel, initial_try_to_connect):
_spawn_delivery(state, callbacks) _spawn_delivery(state, callbacks)
completion_queue = cygrpc.CompletionQueue() completion_queue = cygrpc.CompletionQueue()
while True: while True:
channel.watch_connectivity_state( channel.watch_connectivity_state(connectivity,
connectivity, cygrpc.Timespec(time.time() + 0.2), cygrpc.Timespec(time.time() + 0.2),
completion_queue, None) completion_queue, None)
event = completion_queue.poll() event = completion_queue.poll()
with state.lock: with state.lock:
@ -863,10 +884,13 @@ def _moot(state):
def _subscribe(state, callback, try_to_connect): def _subscribe(state, callback, try_to_connect):
with state.lock: with state.lock:
if not state.callbacks_and_connectivities and not state.polling: if not state.callbacks_and_connectivities and not state.polling:
def cancel_all_subscriptions(timeout): def cancel_all_subscriptions(timeout):
_moot(state) _moot(state)
polling_thread = _common.CleanupThread( polling_thread = _common.CleanupThread(
cancel_all_subscriptions, target=_poll_connectivity, cancel_all_subscriptions,
target=_poll_connectivity,
args=(state, state.channel, bool(try_to_connect))) args=(state, state.channel, bool(try_to_connect)))
polling_thread.start() polling_thread.start()
state.polling = True state.polling = True
@ -883,8 +907,8 @@ def _subscribe(state, callback, try_to_connect):
def _unsubscribe(state, callback): def _unsubscribe(state, callback):
with state.lock: with state.lock:
for index, (subscribed_callback, unused_connectivity) in enumerate( for index, (subscribed_callback, unused_connectivity
state.callbacks_and_connectivities): ) in enumerate(state.callbacks_and_connectivities):
if callback == subscribed_callback: if callback == subscribed_callback:
state.callbacks_and_connectivities.pop(index) state.callbacks_and_connectivities.pop(index)
break break
@ -892,7 +916,8 @@ def _unsubscribe(state, callback):
def _options(options): def _options(options):
return list(options) + [ return list(options) + [
(cygrpc.ChannelArgKey.primary_user_agent_string, _USER_AGENT)] (cygrpc.ChannelArgKey.primary_user_agent_string, _USER_AGENT)
]
class Channel(grpc.Channel): class Channel(grpc.Channel):
@ -907,8 +932,8 @@ class Channel(grpc.Channel):
credentials: A cygrpc.ChannelCredentials or None. credentials: A cygrpc.ChannelCredentials or None.
""" """
self._channel = cygrpc.Channel( self._channel = cygrpc.Channel(
_common.encode(target), _common.channel_args(_options(options)), _common.encode(target),
credentials) _common.channel_args(_options(options)), credentials)
self._call_state = _ChannelCallState(self._channel) self._call_state = _ChannelCallState(self._channel)
self._connectivity_state = _ChannelConnectivityState(self._channel) self._connectivity_state = _ChannelConnectivityState(self._channel)
@ -918,28 +943,40 @@ class Channel(grpc.Channel):
def unsubscribe(self, callback): def unsubscribe(self, callback):
_unsubscribe(self._connectivity_state, callback) _unsubscribe(self._connectivity_state, callback)
def unary_unary( def unary_unary(self,
self, method, request_serializer=None, response_deserializer=None): method,
request_serializer=None,
response_deserializer=None):
return _UnaryUnaryMultiCallable( return _UnaryUnaryMultiCallable(
self._channel, _channel_managed_call_management(self._call_state), self._channel,
_channel_managed_call_management(self._call_state),
_common.encode(method), request_serializer, response_deserializer) _common.encode(method), request_serializer, response_deserializer)
def unary_stream( def unary_stream(self,
self, method, request_serializer=None, response_deserializer=None): method,
request_serializer=None,
response_deserializer=None):
return _UnaryStreamMultiCallable( return _UnaryStreamMultiCallable(
self._channel, _channel_managed_call_management(self._call_state), self._channel,
_channel_managed_call_management(self._call_state),
_common.encode(method), request_serializer, response_deserializer) _common.encode(method), request_serializer, response_deserializer)
def stream_unary( def stream_unary(self,
self, method, request_serializer=None, response_deserializer=None): method,
request_serializer=None,
response_deserializer=None):
return _StreamUnaryMultiCallable( return _StreamUnaryMultiCallable(
self._channel, _channel_managed_call_management(self._call_state), self._channel,
_channel_managed_call_management(self._call_state),
_common.encode(method), request_serializer, response_deserializer) _common.encode(method), request_serializer, response_deserializer)
def stream_stream( def stream_stream(self,
self, method, request_serializer=None, response_deserializer=None): method,
request_serializer=None,
response_deserializer=None):
return _StreamStreamMultiCallable( return _StreamStreamMultiCallable(
self._channel, _channel_managed_call_management(self._call_state), self._channel,
_channel_managed_call_management(self._call_state),
_common.encode(method), request_serializer, response_deserializer) _common.encode(method), request_serializer, response_deserializer)
def __del__(self): def __del__(self):

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Shared implementation.""" """Shared implementation."""
import logging import logging
@ -46,8 +45,7 @@ CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY = {
cygrpc.ConnectivityState.ready: grpc.ChannelConnectivity.READY, cygrpc.ConnectivityState.ready: grpc.ChannelConnectivity.READY,
cygrpc.ConnectivityState.transient_failure: cygrpc.ConnectivityState.transient_failure:
grpc.ChannelConnectivity.TRANSIENT_FAILURE, grpc.ChannelConnectivity.TRANSIENT_FAILURE,
cygrpc.ConnectivityState.shutdown: cygrpc.ConnectivityState.shutdown: grpc.ChannelConnectivity.SHUTDOWN,
grpc.ChannelConnectivity.SHUTDOWN,
} }
CYGRPC_STATUS_CODE_TO_STATUS_CODE = { CYGRPC_STATUS_CODE_TO_STATUS_CODE = {
@ -114,8 +112,8 @@ def application_metadata(cygrpc_metadata):
if cygrpc_metadata is None: if cygrpc_metadata is None:
return () return ()
else: else:
return tuple( return tuple((decode(key), value
(decode(key), value if key[-4:] == b'-bin' else decode(value)) if key[-4:] == b'-bin' else decode(value))
for key, value in cygrpc_metadata) for key, value in cygrpc_metadata)
@ -151,8 +149,13 @@ class CleanupThread(threading.Thread):
we accomplish this by overriding the join() method. we accomplish this by overriding the join() method.
""" """
def __init__(self, behavior, group=None, target=None, name=None, def __init__(self,
args=(), kwargs={}): behavior,
group=None,
target=None,
name=None,
args=(),
kwargs={}):
"""Constructor. """Constructor.
Args: Args:
@ -169,8 +172,8 @@ class CleanupThread(threading.Thread):
kwargs (dict[str,object]): A dictionary of keyword arguments to kwargs (dict[str,object]): A dictionary of keyword arguments to
pass to `target`. pass to `target`.
""" """
super(CleanupThread, self).__init__(group=group, target=target, super(CleanupThread, self).__init__(
name=name, args=args, kwargs=kwargs) group=group, target=target, name=name, args=args, kwargs=kwargs)
self._behavior = behavior self._behavior = behavior
def join(self, timeout=None): def join(self, timeout=None):

@ -44,5 +44,5 @@ def call(call_credentialses):
def channel(channel_credentials, call_credentialses): def channel(channel_credentials, call_credentialses):
return cygrpc.channel_credentials_composite( return cygrpc.channel_credentials_composite(channel_credentials,
channel_credentials, _call(call_credentialses)) _call(call_credentialses))

@ -36,9 +36,9 @@ from grpc._cython import cygrpc
class AuthMetadataContext( class AuthMetadataContext(
collections.namedtuple( collections.namedtuple('AuthMetadataContext', (
'AuthMetadataContext', ('service_url', 'method_name',)), 'service_url',
grpc.AuthMetadataContext): 'method_name',)), grpc.AuthMetadataContext):
pass pass
@ -62,8 +62,7 @@ class _WrappedCygrpcCallback(object):
def _invoke_failure(self, error): def _invoke_failure(self, error):
# TODO(atash) translate different Exception superclasses into different # TODO(atash) translate different Exception superclasses into different
# status codes. # status codes.
self.cygrpc_callback( self.cygrpc_callback(_common.EMPTY_METADATA, cygrpc.StatusCode.internal,
_common.EMPTY_METADATA, cygrpc.StatusCode.internal,
_common.encode(str(error))) _common.encode(str(error)))
def _invoke_success(self, metadata): def _invoke_success(self, metadata):
@ -101,10 +100,11 @@ class _WrappedPlugin(object):
def __call__(self, context, cygrpc_callback): def __call__(self, context, cygrpc_callback):
wrapped_cygrpc_callback = _WrappedCygrpcCallback(cygrpc_callback) wrapped_cygrpc_callback = _WrappedCygrpcCallback(cygrpc_callback)
wrapped_context = AuthMetadataContext( wrapped_context = AuthMetadataContext(
_common.decode(context.service_url), _common.decode(context.method_name)) _common.decode(context.service_url),
_common.decode(context.method_name))
try: try:
self.plugin( self.plugin(wrapped_context,
wrapped_context, AuthMetadataPluginCallback(wrapped_cygrpc_callback)) AuthMetadataPluginCallback(wrapped_cygrpc_callback))
except Exception as error: except Exception as error:
wrapped_cygrpc_callback.notify_failure(error) wrapped_cygrpc_callback.notify_failure(error)
raise raise
@ -120,4 +120,5 @@ def call_credentials_metadata_plugin(plugin, name):
plugin's invocation must be non-blocking. plugin's invocation must be non-blocking.
""" """
return cygrpc.call_credentials_metadata_plugin( return cygrpc.call_credentials_metadata_plugin(
cygrpc.CredentialsMetadataPlugin(_WrappedPlugin(plugin), _common.encode(name))) cygrpc.CredentialsMetadataPlugin(
_WrappedPlugin(plugin), _common.encode(name)))

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Service-side implementation of gRPC Python.""" """Service-side implementation of gRPC Python."""
import collections import collections
@ -91,9 +90,9 @@ def _details(state):
class _HandlerCallDetails( class _HandlerCallDetails(
collections.namedtuple( collections.namedtuple('_HandlerCallDetails', (
'_HandlerCallDetails', ('method', 'invocation_metadata',)), 'method',
grpc.HandlerCallDetails): 'invocation_metadata',)), grpc.HandlerCallDetails):
pass pass
@ -131,9 +130,11 @@ def _possibly_finish_call(state, token):
def _send_status_from_server(state, token): def _send_status_from_server(state, token):
def send_status_from_server(unused_send_status_from_server_event): def send_status_from_server(unused_send_status_from_server_event):
with state.condition: with state.condition:
return _possibly_finish_call(state, token) return _possibly_finish_call(state, token)
return send_status_from_server return send_status_from_server
@ -143,19 +144,16 @@ def _abort(state, call, code, details):
effective_details = details if state.details is None else state.details effective_details = details if state.details is None else state.details
if state.initial_metadata_allowed: if state.initial_metadata_allowed:
operations = ( operations = (
cygrpc.operation_send_initial_metadata( cygrpc.operation_send_initial_metadata(_EMPTY_METADATA,
_EMPTY_METADATA, _EMPTY_FLAGS), _EMPTY_FLAGS),
cygrpc.operation_send_status_from_server( cygrpc.operation_send_status_from_server(
_common.cygrpc_metadata(state.trailing_metadata), effective_code, _common.cygrpc_metadata(state.trailing_metadata),
effective_details, _EMPTY_FLAGS), effective_code, effective_details, _EMPTY_FLAGS),)
)
token = _SEND_INITIAL_METADATA_AND_SEND_STATUS_FROM_SERVER_TOKEN token = _SEND_INITIAL_METADATA_AND_SEND_STATUS_FROM_SERVER_TOKEN
else: else:
operations = ( operations = (cygrpc.operation_send_status_from_server(
cygrpc.operation_send_status_from_server( _common.cygrpc_metadata(state.trailing_metadata),
_common.cygrpc_metadata(state.trailing_metadata), effective_code, effective_code, effective_details, _EMPTY_FLAGS),)
effective_details, _EMPTY_FLAGS),
)
token = _SEND_STATUS_FROM_SERVER_TOKEN token = _SEND_STATUS_FROM_SERVER_TOKEN
call.start_server_batch( call.start_server_batch(
cygrpc.Operations(operations), cygrpc.Operations(operations),
@ -165,18 +163,22 @@ def _abort(state, call, code, details):
def _receive_close_on_server(state): def _receive_close_on_server(state):
def receive_close_on_server(receive_close_on_server_event): def receive_close_on_server(receive_close_on_server_event):
with state.condition: with state.condition:
if receive_close_on_server_event.batch_operations[0].received_cancelled: if receive_close_on_server_event.batch_operations[
0].received_cancelled:
state.client = _CANCELLED state.client = _CANCELLED
elif state.client is _OPEN: elif state.client is _OPEN:
state.client = _CLOSED state.client = _CLOSED
state.condition.notify_all() state.condition.notify_all()
return _possibly_finish_call(state, _RECEIVE_CLOSE_ON_SERVER_TOKEN) return _possibly_finish_call(state, _RECEIVE_CLOSE_ON_SERVER_TOKEN)
return receive_close_on_server return receive_close_on_server
def _receive_message(state, call, request_deserializer): def _receive_message(state, call, request_deserializer):
def receive_message(receive_message_event): def receive_message(receive_message_event):
serialized_request = _serialized_request(receive_message_event) serialized_request = _serialized_request(receive_message_event)
if serialized_request is None: if serialized_request is None:
@ -186,31 +188,36 @@ def _receive_message(state, call, request_deserializer):
state.condition.notify_all() state.condition.notify_all()
return _possibly_finish_call(state, _RECEIVE_MESSAGE_TOKEN) return _possibly_finish_call(state, _RECEIVE_MESSAGE_TOKEN)
else: else:
request = _common.deserialize(serialized_request, request_deserializer) request = _common.deserialize(serialized_request,
request_deserializer)
with state.condition: with state.condition:
if request is None: if request is None:
_abort( _abort(state, call, cygrpc.StatusCode.internal,
state, call, cygrpc.StatusCode.internal,
b'Exception deserializing request!') b'Exception deserializing request!')
else: else:
state.request = request state.request = request
state.condition.notify_all() state.condition.notify_all()
return _possibly_finish_call(state, _RECEIVE_MESSAGE_TOKEN) return _possibly_finish_call(state, _RECEIVE_MESSAGE_TOKEN)
return receive_message return receive_message
def _send_initial_metadata(state): def _send_initial_metadata(state):
def send_initial_metadata(unused_send_initial_metadata_event): def send_initial_metadata(unused_send_initial_metadata_event):
with state.condition: with state.condition:
return _possibly_finish_call(state, _SEND_INITIAL_METADATA_TOKEN) return _possibly_finish_call(state, _SEND_INITIAL_METADATA_TOKEN)
return send_initial_metadata return send_initial_metadata
def _send_message(state, token): def _send_message(state, token):
def send_message(unused_send_message_event): def send_message(unused_send_message_event):
with state.condition: with state.condition:
state.condition.notify_all() state.condition.notify_all()
return _possibly_finish_call(state, token) return _possibly_finish_call(state, token)
return send_message return send_message
@ -226,7 +233,8 @@ class _Context(grpc.ServicerContext):
return self._state.client is not _CANCELLED and not self._state.statused return self._state.client is not _CANCELLED and not self._state.statused
def time_remaining(self): def time_remaining(self):
return max(self._rpc_event.request_call_details.deadline - time.time(), 0) return max(self._rpc_event.request_call_details.deadline - time.time(),
0)
def cancel(self): def cancel(self):
self._rpc_event.operation_call.cancel() self._rpc_event.operation_call.cancel()
@ -293,8 +301,10 @@ class _RequestIterator(object):
raise StopIteration() raise StopIteration()
else: else:
self._call.start_server_batch( self._call.start_server_batch(
cygrpc.Operations((cygrpc.operation_receive_message(_EMPTY_FLAGS),)), cygrpc.Operations(
_receive_message(self._state, self._call, self._request_deserializer)) (cygrpc.operation_receive_message(_EMPTY_FLAGS),)),
_receive_message(self._state, self._call,
self._request_deserializer))
self._state.due.add(_RECEIVE_MESSAGE_TOKEN) self._state.due.add(_RECEIVE_MESSAGE_TOKEN)
def _look_for_request(self): def _look_for_request(self):
@ -328,6 +338,7 @@ class _RequestIterator(object):
def _unary_request(rpc_event, state, request_deserializer): def _unary_request(rpc_event, state, request_deserializer):
def unary_request(): def unary_request():
with state.condition: with state.condition:
if state.client is _CANCELLED or state.statused: if state.client is _CANCELLED or state.statused:
@ -336,8 +347,8 @@ def _unary_request(rpc_event, state, request_deserializer):
start_server_batch_result = rpc_event.operation_call.start_server_batch( start_server_batch_result = rpc_event.operation_call.start_server_batch(
cygrpc.Operations( cygrpc.Operations(
(cygrpc.operation_receive_message(_EMPTY_FLAGS),)), (cygrpc.operation_receive_message(_EMPTY_FLAGS),)),
_receive_message( _receive_message(state, rpc_event.operation_call,
state, rpc_event.operation_call, request_deserializer)) request_deserializer))
state.due.add(_RECEIVE_MESSAGE_TOKEN) state.due.add(_RECEIVE_MESSAGE_TOKEN)
while True: while True:
state.condition.wait() state.condition.wait()
@ -345,9 +356,9 @@ def _unary_request(rpc_event, state, request_deserializer):
if state.client is _CLOSED: if state.client is _CLOSED:
details = '"{}" requires exactly one request message.'.format( details = '"{}" requires exactly one request message.'.format(
rpc_event.request_call_details.method) rpc_event.request_call_details.method)
_abort( _abort(state, rpc_event.operation_call,
state, rpc_event.operation_call, cygrpc.StatusCode.unimplemented,
cygrpc.StatusCode.unimplemented, _common.encode(details)) _common.encode(details))
return None return None
elif state.client is _CANCELLED: elif state.client is _CANCELLED:
return None return None
@ -355,6 +366,7 @@ def _unary_request(rpc_event, state, request_deserializer):
request = state.request request = state.request
state.request = None state.request = None
return request return request
return unary_request return unary_request
@ -391,8 +403,7 @@ def _serialize_response(rpc_event, state, response, response_serializer):
serialized_response = _common.serialize(response, response_serializer) serialized_response = _common.serialize(response, response_serializer)
if serialized_response is None: if serialized_response is None:
with state.condition: with state.condition:
_abort( _abort(state, rpc_event.operation_call, cygrpc.StatusCode.internal,
state, rpc_event.operation_call, cygrpc.StatusCode.internal,
b'Failed to serialize response!') b'Failed to serialize response!')
return None return None
else: else:
@ -406,16 +417,15 @@ def _send_response(rpc_event, state, serialized_response):
else: else:
if state.initial_metadata_allowed: if state.initial_metadata_allowed:
operations = ( operations = (
cygrpc.operation_send_initial_metadata( cygrpc.operation_send_initial_metadata(_EMPTY_METADATA,
_EMPTY_METADATA, _EMPTY_FLAGS), _EMPTY_FLAGS),
cygrpc.operation_send_message(serialized_response, _EMPTY_FLAGS), cygrpc.operation_send_message(serialized_response,
) _EMPTY_FLAGS),)
state.initial_metadata_allowed = False state.initial_metadata_allowed = False
token = _SEND_INITIAL_METADATA_AND_SEND_MESSAGE_TOKEN token = _SEND_INITIAL_METADATA_AND_SEND_MESSAGE_TOKEN
else: else:
operations = ( operations = (cygrpc.operation_send_message(serialized_response,
cygrpc.operation_send_message(serialized_response, _EMPTY_FLAGS), _EMPTY_FLAGS),)
)
token = _SEND_MESSAGE_TOKEN token = _SEND_MESSAGE_TOKEN
rpc_event.operation_call.start_server_batch( rpc_event.operation_call.start_server_batch(
cygrpc.Operations(operations), _send_message(state, token)) cygrpc.Operations(operations), _send_message(state, token))
@ -438,11 +448,12 @@ def _status(rpc_event, state, serialized_response):
] ]
if state.initial_metadata_allowed: if state.initial_metadata_allowed:
operations.append( operations.append(
cygrpc.operation_send_initial_metadata( cygrpc.operation_send_initial_metadata(_EMPTY_METADATA,
_EMPTY_METADATA, _EMPTY_FLAGS)) _EMPTY_FLAGS))
if serialized_response is not None: if serialized_response is not None:
operations.append(cygrpc.operation_send_message( operations.append(
serialized_response, _EMPTY_FLAGS)) cygrpc.operation_send_message(serialized_response,
_EMPTY_FLAGS))
rpc_event.operation_call.start_server_batch( rpc_event.operation_call.start_server_batch(
cygrpc.Operations(operations), cygrpc.Operations(operations),
_send_status_from_server(state, _SEND_STATUS_FROM_SERVER_TOKEN)) _send_status_from_server(state, _SEND_STATUS_FROM_SERVER_TOKEN))
@ -450,13 +461,12 @@ def _status(rpc_event, state, serialized_response):
state.due.add(_SEND_STATUS_FROM_SERVER_TOKEN) state.due.add(_SEND_STATUS_FROM_SERVER_TOKEN)
def _unary_response_in_pool( def _unary_response_in_pool(rpc_event, state, behavior, argument_thunk,
rpc_event, state, behavior, argument_thunk, request_deserializer, request_deserializer, response_serializer):
response_serializer):
argument = argument_thunk() argument = argument_thunk()
if argument is not None: if argument is not None:
response, proceed = _call_behavior( response, proceed = _call_behavior(rpc_event, state, behavior, argument,
rpc_event, state, behavior, argument, request_deserializer) request_deserializer)
if proceed: if proceed:
serialized_response = _serialize_response( serialized_response = _serialize_response(
rpc_event, state, response, response_serializer) rpc_event, state, response, response_serializer)
@ -464,9 +474,8 @@ def _unary_response_in_pool(
_status(rpc_event, state, serialized_response) _status(rpc_event, state, serialized_response)
def _stream_response_in_pool( def _stream_response_in_pool(rpc_event, state, behavior, argument_thunk,
rpc_event, state, behavior, argument_thunk, request_deserializer, request_deserializer, response_serializer):
response_serializer):
argument = argument_thunk() argument = argument_thunk()
if argument is not None: if argument is not None:
response_iterator, proceed = _call_behavior( response_iterator, proceed = _call_behavior(
@ -483,7 +492,8 @@ def _stream_response_in_pool(
serialized_response = _serialize_response( serialized_response = _serialize_response(
rpc_event, state, response, response_serializer) rpc_event, state, response, response_serializer)
if serialized_response is not None: if serialized_response is not None:
proceed = _send_response(rpc_event, state, serialized_response) proceed = _send_response(rpc_event, state,
serialized_response)
if not proceed: if not proceed:
break break
else: else:
@ -493,38 +503,38 @@ def _stream_response_in_pool(
def _handle_unary_unary(rpc_event, state, method_handler, thread_pool): def _handle_unary_unary(rpc_event, state, method_handler, thread_pool):
unary_request = _unary_request( unary_request = _unary_request(rpc_event, state,
rpc_event, state, method_handler.request_deserializer) method_handler.request_deserializer)
thread_pool.submit( thread_pool.submit(_unary_response_in_pool, rpc_event, state,
_unary_response_in_pool, rpc_event, state, method_handler.unary_unary, method_handler.unary_unary, unary_request,
unary_request, method_handler.request_deserializer, method_handler.request_deserializer,
method_handler.response_serializer) method_handler.response_serializer)
def _handle_unary_stream(rpc_event, state, method_handler, thread_pool): def _handle_unary_stream(rpc_event, state, method_handler, thread_pool):
unary_request = _unary_request( unary_request = _unary_request(rpc_event, state,
rpc_event, state, method_handler.request_deserializer) method_handler.request_deserializer)
thread_pool.submit( thread_pool.submit(_stream_response_in_pool, rpc_event, state,
_stream_response_in_pool, rpc_event, state, method_handler.unary_stream, method_handler.unary_stream, unary_request,
unary_request, method_handler.request_deserializer, method_handler.request_deserializer,
method_handler.response_serializer) method_handler.response_serializer)
def _handle_stream_unary(rpc_event, state, method_handler, thread_pool): def _handle_stream_unary(rpc_event, state, method_handler, thread_pool):
request_iterator = _RequestIterator( request_iterator = _RequestIterator(state, rpc_event.operation_call,
state, rpc_event.operation_call, method_handler.request_deserializer) method_handler.request_deserializer)
thread_pool.submit( thread_pool.submit(_unary_response_in_pool, rpc_event, state,
_unary_response_in_pool, rpc_event, state, method_handler.stream_unary, method_handler.stream_unary, lambda: request_iterator,
lambda: request_iterator, method_handler.request_deserializer, method_handler.request_deserializer,
method_handler.response_serializer) method_handler.response_serializer)
def _handle_stream_stream(rpc_event, state, method_handler, thread_pool): def _handle_stream_stream(rpc_event, state, method_handler, thread_pool):
request_iterator = _RequestIterator( request_iterator = _RequestIterator(state, rpc_event.operation_call,
state, rpc_event.operation_call, method_handler.request_deserializer) method_handler.request_deserializer)
thread_pool.submit( thread_pool.submit(_stream_response_in_pool, rpc_event, state,
_stream_response_in_pool, rpc_event, state, method_handler.stream_stream, method_handler.stream_stream, lambda: request_iterator,
lambda: request_iterator, method_handler.request_deserializer, method_handler.request_deserializer,
method_handler.response_serializer) method_handler.response_serializer)
@ -546,11 +556,12 @@ def _handle_unrecognized_method(rpc_event):
cygrpc.operation_receive_close_on_server(_EMPTY_FLAGS), cygrpc.operation_receive_close_on_server(_EMPTY_FLAGS),
cygrpc.operation_send_status_from_server( cygrpc.operation_send_status_from_server(
_EMPTY_METADATA, cygrpc.StatusCode.unimplemented, _EMPTY_METADATA, cygrpc.StatusCode.unimplemented,
b'Method not found!', _EMPTY_FLAGS), b'Method not found!', _EMPTY_FLAGS),)
)
rpc_state = _RPCState() rpc_state = _RPCState()
rpc_event.operation_call.start_server_batch( rpc_event.operation_call.start_server_batch(operations,
operations, lambda ignored_event: (rpc_state, (),)) lambda ignored_event: (
rpc_state,
(),))
return rpc_state return rpc_state
@ -564,14 +575,18 @@ def _handle_with_method_handler(rpc_event, method_handler, thread_pool):
state.due.add(_RECEIVE_CLOSE_ON_SERVER_TOKEN) state.due.add(_RECEIVE_CLOSE_ON_SERVER_TOKEN)
if method_handler.request_streaming: if method_handler.request_streaming:
if method_handler.response_streaming: if method_handler.response_streaming:
_handle_stream_stream(rpc_event, state, method_handler, thread_pool) _handle_stream_stream(rpc_event, state, method_handler,
thread_pool)
else: else:
_handle_stream_unary(rpc_event, state, method_handler, thread_pool) _handle_stream_unary(rpc_event, state, method_handler,
thread_pool)
else: else:
if method_handler.response_streaming: if method_handler.response_streaming:
_handle_unary_stream(rpc_event, state, method_handler, thread_pool) _handle_unary_stream(rpc_event, state, method_handler,
thread_pool)
else: else:
_handle_unary_unary(rpc_event, state, method_handler, thread_pool) _handle_unary_unary(rpc_event, state, method_handler,
thread_pool)
return state return state
@ -583,7 +598,8 @@ def _handle_call(rpc_event, generic_handlers, thread_pool):
if method_handler is None: if method_handler is None:
return _handle_unrecognized_method(rpc_event) return _handle_unrecognized_method(rpc_event)
else: else:
return _handle_with_method_handler(rpc_event, method_handler, thread_pool) return _handle_with_method_handler(rpc_event, method_handler,
thread_pool)
else: else:
return None return None
@ -623,12 +639,13 @@ def _add_insecure_port(state, address):
def _add_secure_port(state, address, server_credentials): def _add_secure_port(state, address, server_credentials):
with state.lock: with state.lock:
return state.server.add_http2_port(address, server_credentials._credentials) return state.server.add_http2_port(address,
server_credentials._credentials)
def _request_call(state): def _request_call(state):
state.server.request_call( state.server.request_call(state.completion_queue, state.completion_queue,
state.completion_queue, state.completion_queue, _REQUEST_CALL_TAG) _REQUEST_CALL_TAG)
state.due.add(_REQUEST_CALL_TAG) state.due.add(_REQUEST_CALL_TAG)
@ -654,8 +671,8 @@ def _serve(state):
elif event.tag is _REQUEST_CALL_TAG: elif event.tag is _REQUEST_CALL_TAG:
with state.lock: with state.lock:
state.due.remove(_REQUEST_CALL_TAG) state.due.remove(_REQUEST_CALL_TAG)
rpc_state = _handle_call( rpc_state = _handle_call(event, state.generic_handlers,
event, state.generic_handlers, state.thread_pool) state.thread_pool)
if rpc_state is not None: if rpc_state is not None:
state.rpc_states.add(rpc_state) state.rpc_states.add(rpc_state)
if state.stage is _ServerStage.STARTED: if state.stage is _ServerStage.STARTED:
@ -696,6 +713,7 @@ def _stop(state, grace):
rpc_state.client = _CANCELLED rpc_state.client = _CANCELLED
rpc_state.condition.notify_all() rpc_state.condition.notify_all()
else: else:
def cancel_all_calls_after_grace(): def cancel_all_calls_after_grace():
shutdown_event.wait(timeout=grace) shutdown_event.wait(timeout=grace)
with state.lock: with state.lock:
@ -705,6 +723,7 @@ def _stop(state, grace):
with rpc_state.condition: with rpc_state.condition:
rpc_state.client = _CANCELLED rpc_state.client = _CANCELLED
rpc_state.condition.notify_all() rpc_state.condition.notify_all()
thread = threading.Thread(target=cancel_all_calls_after_grace) thread = threading.Thread(target=cancel_all_calls_after_grace)
thread.start() thread.start()
return shutdown_event return shutdown_event
@ -719,6 +738,7 @@ def _start(state):
state.server.start() state.server.start()
state.stage = _ServerStage.STARTED state.stage = _ServerStage.STARTED
_request_call(state) _request_call(state)
def cleanup_server(timeout): def cleanup_server(timeout):
if timeout is None: if timeout is None:
_stop(state, _UNEXPECTED_EXIT_SERVER_GRACE).wait() _stop(state, _UNEXPECTED_EXIT_SERVER_GRACE).wait()
@ -729,14 +749,15 @@ def _start(state):
cleanup_server, target=_serve, args=(state,)) cleanup_server, target=_serve, args=(state,))
thread.start() thread.start()
class Server(grpc.Server): class Server(grpc.Server):
def __init__(self, thread_pool, generic_handlers, options): def __init__(self, thread_pool, generic_handlers, options):
completion_queue = cygrpc.CompletionQueue() completion_queue = cygrpc.CompletionQueue()
server = cygrpc.Server(_common.channel_args(options)) server = cygrpc.Server(_common.channel_args(options))
server.register_completion_queue(completion_queue) server.register_completion_queue(completion_queue)
self._state = _ServerState( self._state = _ServerState(completion_queue, server, generic_handlers,
completion_queue, server, generic_handlers, thread_pool) thread_pool)
def add_generic_rpc_handlers(self, generic_rpc_handlers): def add_generic_rpc_handlers(self, generic_rpc_handlers):
_add_generic_handlers(self._state, generic_rpc_handlers) _add_generic_handlers(self._state, generic_rpc_handlers)
@ -745,7 +766,8 @@ class Server(grpc.Server):
return _add_insecure_port(self._state, _common.encode(address)) return _add_insecure_port(self._state, _common.encode(address))
def add_secure_port(self, address, server_credentials): def add_secure_port(self, address, server_credentials):
return _add_secure_port(self._state, _common.encode(address), server_credentials) return _add_secure_port(self._state,
_common.encode(address), server_credentials)
def start(self): def start(self):
_start(self._state) _start(self._state)

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Internal utilities for gRPC Python.""" """Internal utilities for gRPC Python."""
import collections import collections
@ -44,12 +43,15 @@ _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE = (
class RpcMethodHandler( class RpcMethodHandler(
collections.namedtuple( collections.namedtuple('_RpcMethodHandler', (
'_RpcMethodHandler', 'request_streaming',
('request_streaming', 'response_streaming', 'request_deserializer', 'response_streaming',
'response_serializer', 'unary_unary', 'unary_stream', 'stream_unary', 'request_deserializer',
'stream_stream',)), 'response_serializer',
grpc.RpcMethodHandler): 'unary_unary',
'unary_stream',
'stream_unary',
'stream_stream',)), grpc.RpcMethodHandler):
pass pass
@ -59,7 +61,8 @@ class DictionaryGenericHandler(grpc.ServiceRpcHandler):
self._name = service self._name = service
self._method_handlers = { self._method_handlers = {
_common.fully_qualified_method(service, method): method_handler _common.fully_qualified_method(service, method): method_handler
for method, method_handler in six.iteritems(method_handlers)} for method, method_handler in six.iteritems(method_handlers)
}
def service_name(self): def service_name(self):
return self._name return self._name

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Translates gRPC's client-side API into gRPC's client-side Beta API.""" """Translates gRPC's client-side API into gRPC's client-side Beta API."""
import grpc import grpc
@ -38,14 +37,14 @@ from grpc.framework.foundation import future
from grpc.framework.interfaces.face import face from grpc.framework.interfaces.face import face
_STATUS_CODE_TO_ABORTION_KIND_AND_ABORTION_ERROR_CLASS = { _STATUS_CODE_TO_ABORTION_KIND_AND_ABORTION_ERROR_CLASS = {
grpc.StatusCode.CANCELLED: ( grpc.StatusCode.CANCELLED: (face.Abortion.Kind.CANCELLED,
face.Abortion.Kind.CANCELLED, face.CancellationError), face.CancellationError),
grpc.StatusCode.UNKNOWN: ( grpc.StatusCode.UNKNOWN: (face.Abortion.Kind.REMOTE_FAILURE,
face.Abortion.Kind.REMOTE_FAILURE, face.RemoteError), face.RemoteError),
grpc.StatusCode.DEADLINE_EXCEEDED: ( grpc.StatusCode.DEADLINE_EXCEEDED: (face.Abortion.Kind.EXPIRED,
face.Abortion.Kind.EXPIRED, face.ExpirationError), face.ExpirationError),
grpc.StatusCode.UNIMPLEMENTED: ( grpc.StatusCode.UNIMPLEMENTED: (face.Abortion.Kind.LOCAL_FAILURE,
face.Abortion.Kind.LOCAL_FAILURE, face.LocalError), face.LocalError),
} }
@ -65,18 +64,19 @@ def _abortion(rpc_error_call):
code = rpc_error_call.code() code = rpc_error_call.code()
pair = _STATUS_CODE_TO_ABORTION_KIND_AND_ABORTION_ERROR_CLASS.get(code) pair = _STATUS_CODE_TO_ABORTION_KIND_AND_ABORTION_ERROR_CLASS.get(code)
error_kind = face.Abortion.Kind.LOCAL_FAILURE if pair is None else pair[0] error_kind = face.Abortion.Kind.LOCAL_FAILURE if pair is None else pair[0]
return face.Abortion( return face.Abortion(error_kind,
error_kind, rpc_error_call.initial_metadata(), rpc_error_call.initial_metadata(),
rpc_error_call.trailing_metadata(), code, rpc_error_call.details()) rpc_error_call.trailing_metadata(), code,
rpc_error_call.details())
def _abortion_error(rpc_error_call): def _abortion_error(rpc_error_call):
code = rpc_error_call.code() code = rpc_error_call.code()
pair = _STATUS_CODE_TO_ABORTION_KIND_AND_ABORTION_ERROR_CLASS.get(code) pair = _STATUS_CODE_TO_ABORTION_KIND_AND_ABORTION_ERROR_CLASS.get(code)
exception_class = face.AbortionError if pair is None else pair[1] exception_class = face.AbortionError if pair is None else pair[1]
return exception_class( return exception_class(rpc_error_call.initial_metadata(),
rpc_error_call.initial_metadata(), rpc_error_call.trailing_metadata(), rpc_error_call.trailing_metadata(), code,
code, rpc_error_call.details()) rpc_error_call.details())
class _InvocationProtocolContext(interfaces.GRPCInvocationContext): class _InvocationProtocolContext(interfaces.GRPCInvocationContext):
@ -159,9 +159,11 @@ class _Rendezvous(future.Future, face.Call):
return self._call.time_remaining() return self._call.time_remaining()
def add_abortion_callback(self, abortion_callback): def add_abortion_callback(self, abortion_callback):
def done_callback(): def done_callback():
if self.code() is not grpc.StatusCode.OK: if self.code() is not grpc.StatusCode.OK:
abortion_callback(_abortion(self._call)) abortion_callback(_abortion(self._call))
registered = self._call.add_callback(done_callback) registered = self._call.add_callback(done_callback)
return None if registered else done_callback() return None if registered else done_callback()
@ -181,9 +183,9 @@ class _Rendezvous(future.Future, face.Call):
return self._call.details() return self._call.details()
def _blocking_unary_unary( def _blocking_unary_unary(channel, group, method, timeout, with_call,
channel, group, method, timeout, with_call, protocol_options, metadata, protocol_options, metadata, metadata_transformer,
metadata_transformer, request, request_serializer, response_deserializer): request, request_serializer, response_deserializer):
try: try:
multi_callable = channel.unary_unary( multi_callable = channel.unary_unary(
_common.fully_qualified_method(group, method), _common.fully_qualified_method(group, method),
@ -192,48 +194,56 @@ def _blocking_unary_unary(
effective_metadata = _effective_metadata(metadata, metadata_transformer) effective_metadata = _effective_metadata(metadata, metadata_transformer)
if with_call: if with_call:
response, call = multi_callable.with_call( response, call = multi_callable.with_call(
request, timeout=timeout, metadata=effective_metadata, request,
timeout=timeout,
metadata=effective_metadata,
credentials=_credentials(protocol_options)) credentials=_credentials(protocol_options))
return response, _Rendezvous(None, None, call) return response, _Rendezvous(None, None, call)
else: else:
return multi_callable( return multi_callable(
request, timeout=timeout, metadata=effective_metadata, request,
timeout=timeout,
metadata=effective_metadata,
credentials=_credentials(protocol_options)) credentials=_credentials(protocol_options))
except grpc.RpcError as rpc_error_call: except grpc.RpcError as rpc_error_call:
raise _abortion_error(rpc_error_call) raise _abortion_error(rpc_error_call)
def _future_unary_unary( def _future_unary_unary(channel, group, method, timeout, protocol_options,
channel, group, method, timeout, protocol_options, metadata, metadata, metadata_transformer, request,
metadata_transformer, request, request_serializer, response_deserializer): request_serializer, response_deserializer):
multi_callable = channel.unary_unary( multi_callable = channel.unary_unary(
_common.fully_qualified_method(group, method), _common.fully_qualified_method(group, method),
request_serializer=request_serializer, request_serializer=request_serializer,
response_deserializer=response_deserializer) response_deserializer=response_deserializer)
effective_metadata = _effective_metadata(metadata, metadata_transformer) effective_metadata = _effective_metadata(metadata, metadata_transformer)
response_future = multi_callable.future( response_future = multi_callable.future(
request, timeout=timeout, metadata=effective_metadata, request,
timeout=timeout,
metadata=effective_metadata,
credentials=_credentials(protocol_options)) credentials=_credentials(protocol_options))
return _Rendezvous(response_future, None, response_future) return _Rendezvous(response_future, None, response_future)
def _unary_stream( def _unary_stream(channel, group, method, timeout, protocol_options, metadata,
channel, group, method, timeout, protocol_options, metadata, metadata_transformer, request, request_serializer,
metadata_transformer, request, request_serializer, response_deserializer): response_deserializer):
multi_callable = channel.unary_stream( multi_callable = channel.unary_stream(
_common.fully_qualified_method(group, method), _common.fully_qualified_method(group, method),
request_serializer=request_serializer, request_serializer=request_serializer,
response_deserializer=response_deserializer) response_deserializer=response_deserializer)
effective_metadata = _effective_metadata(metadata, metadata_transformer) effective_metadata = _effective_metadata(metadata, metadata_transformer)
response_iterator = multi_callable( response_iterator = multi_callable(
request, timeout=timeout, metadata=effective_metadata, request,
timeout=timeout,
metadata=effective_metadata,
credentials=_credentials(protocol_options)) credentials=_credentials(protocol_options))
return _Rendezvous(None, response_iterator, response_iterator) return _Rendezvous(None, response_iterator, response_iterator)
def _blocking_stream_unary( def _blocking_stream_unary(channel, group, method, timeout, with_call,
channel, group, method, timeout, with_call, protocol_options, metadata, protocol_options, metadata, metadata_transformer,
metadata_transformer, request_iterator, request_serializer, request_iterator, request_serializer,
response_deserializer): response_deserializer):
try: try:
multi_callable = channel.stream_unary( multi_callable = channel.stream_unary(
@ -243,34 +253,38 @@ def _blocking_stream_unary(
effective_metadata = _effective_metadata(metadata, metadata_transformer) effective_metadata = _effective_metadata(metadata, metadata_transformer)
if with_call: if with_call:
response, call = multi_callable.with_call( response, call = multi_callable.with_call(
request_iterator, timeout=timeout, metadata=effective_metadata, request_iterator,
timeout=timeout,
metadata=effective_metadata,
credentials=_credentials(protocol_options)) credentials=_credentials(protocol_options))
return response, _Rendezvous(None, None, call) return response, _Rendezvous(None, None, call)
else: else:
return multi_callable( return multi_callable(
request_iterator, timeout=timeout, metadata=effective_metadata, request_iterator,
timeout=timeout,
metadata=effective_metadata,
credentials=_credentials(protocol_options)) credentials=_credentials(protocol_options))
except grpc.RpcError as rpc_error_call: except grpc.RpcError as rpc_error_call:
raise _abortion_error(rpc_error_call) raise _abortion_error(rpc_error_call)
def _future_stream_unary( def _future_stream_unary(channel, group, method, timeout, protocol_options,
channel, group, method, timeout, protocol_options, metadata, metadata, metadata_transformer, request_iterator,
metadata_transformer, request_iterator, request_serializer, request_serializer, response_deserializer):
response_deserializer):
multi_callable = channel.stream_unary( multi_callable = channel.stream_unary(
_common.fully_qualified_method(group, method), _common.fully_qualified_method(group, method),
request_serializer=request_serializer, request_serializer=request_serializer,
response_deserializer=response_deserializer) response_deserializer=response_deserializer)
effective_metadata = _effective_metadata(metadata, metadata_transformer) effective_metadata = _effective_metadata(metadata, metadata_transformer)
response_future = multi_callable.future( response_future = multi_callable.future(
request_iterator, timeout=timeout, metadata=effective_metadata, request_iterator,
timeout=timeout,
metadata=effective_metadata,
credentials=_credentials(protocol_options)) credentials=_credentials(protocol_options))
return _Rendezvous(response_future, None, response_future) return _Rendezvous(response_future, None, response_future)
def _stream_stream( def _stream_stream(channel, group, method, timeout, protocol_options, metadata,
channel, group, method, timeout, protocol_options, metadata,
metadata_transformer, request_iterator, request_serializer, metadata_transformer, request_iterator, request_serializer,
response_deserializer): response_deserializer):
multi_callable = channel.stream_stream( multi_callable = channel.stream_stream(
@ -279,16 +293,17 @@ def _stream_stream(
response_deserializer=response_deserializer) response_deserializer=response_deserializer)
effective_metadata = _effective_metadata(metadata, metadata_transformer) effective_metadata = _effective_metadata(metadata, metadata_transformer)
response_iterator = multi_callable( response_iterator = multi_callable(
request_iterator, timeout=timeout, metadata=effective_metadata, request_iterator,
timeout=timeout,
metadata=effective_metadata,
credentials=_credentials(protocol_options)) credentials=_credentials(protocol_options))
return _Rendezvous(None, response_iterator, response_iterator) return _Rendezvous(None, response_iterator, response_iterator)
class _UnaryUnaryMultiCallable(face.UnaryUnaryMultiCallable): class _UnaryUnaryMultiCallable(face.UnaryUnaryMultiCallable):
def __init__( def __init__(self, channel, group, method, metadata_transformer,
self, channel, group, method, metadata_transformer, request_serializer, request_serializer, response_deserializer):
response_deserializer):
self._channel = channel self._channel = channel
self._group = group self._group = group
self._method = method self._method = method
@ -296,8 +311,11 @@ class _UnaryUnaryMultiCallable(face.UnaryUnaryMultiCallable):
self._request_serializer = request_serializer self._request_serializer = request_serializer
self._response_deserializer = response_deserializer self._response_deserializer = response_deserializer
def __call__( def __call__(self,
self, request, timeout, metadata=None, with_call=False, request,
timeout,
metadata=None,
with_call=False,
protocol_options=None): protocol_options=None):
return _blocking_unary_unary( return _blocking_unary_unary(
self._channel, self._group, self._method, timeout, with_call, self._channel, self._group, self._method, timeout, with_call,
@ -307,20 +325,23 @@ class _UnaryUnaryMultiCallable(face.UnaryUnaryMultiCallable):
def future(self, request, timeout, metadata=None, protocol_options=None): def future(self, request, timeout, metadata=None, protocol_options=None):
return _future_unary_unary( return _future_unary_unary(
self._channel, self._group, self._method, timeout, protocol_options, self._channel, self._group, self._method, timeout, protocol_options,
metadata, self._metadata_transformer, request, self._request_serializer, metadata, self._metadata_transformer, request,
self._response_deserializer) self._request_serializer, self._response_deserializer)
def event( def event(self,
self, request, receiver, abortion_callback, timeout, request,
metadata=None, protocol_options=None): receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None):
raise NotImplementedError() raise NotImplementedError()
class _UnaryStreamMultiCallable(face.UnaryStreamMultiCallable): class _UnaryStreamMultiCallable(face.UnaryStreamMultiCallable):
def __init__( def __init__(self, channel, group, method, metadata_transformer,
self, channel, group, method, metadata_transformer, request_serializer, request_serializer, response_deserializer):
response_deserializer):
self._channel = channel self._channel = channel
self._group = group self._group = group
self._method = method self._method = method
@ -331,20 +352,23 @@ class _UnaryStreamMultiCallable(face.UnaryStreamMultiCallable):
def __call__(self, request, timeout, metadata=None, protocol_options=None): def __call__(self, request, timeout, metadata=None, protocol_options=None):
return _unary_stream( return _unary_stream(
self._channel, self._group, self._method, timeout, protocol_options, self._channel, self._group, self._method, timeout, protocol_options,
metadata, self._metadata_transformer, request, self._request_serializer, metadata, self._metadata_transformer, request,
self._response_deserializer) self._request_serializer, self._response_deserializer)
def event( def event(self,
self, request, receiver, abortion_callback, timeout, request,
metadata=None, protocol_options=None): receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None):
raise NotImplementedError() raise NotImplementedError()
class _StreamUnaryMultiCallable(face.StreamUnaryMultiCallable): class _StreamUnaryMultiCallable(face.StreamUnaryMultiCallable):
def __init__( def __init__(self, channel, group, method, metadata_transformer,
self, channel, group, method, metadata_transformer, request_serializer, request_serializer, response_deserializer):
response_deserializer):
self._channel = channel self._channel = channel
self._group = group self._group = group
self._method = method self._method = method
@ -352,32 +376,41 @@ class _StreamUnaryMultiCallable(face.StreamUnaryMultiCallable):
self._request_serializer = request_serializer self._request_serializer = request_serializer
self._response_deserializer = response_deserializer self._response_deserializer = response_deserializer
def __call__( def __call__(self,
self, request_iterator, timeout, metadata=None, with_call=False, request_iterator,
timeout,
metadata=None,
with_call=False,
protocol_options=None): protocol_options=None):
return _blocking_stream_unary( return _blocking_stream_unary(
self._channel, self._group, self._method, timeout, with_call, self._channel, self._group, self._method, timeout, with_call,
protocol_options, metadata, self._metadata_transformer, protocol_options, metadata, self._metadata_transformer,
request_iterator, self._request_serializer, self._response_deserializer) request_iterator, self._request_serializer,
self._response_deserializer)
def future( def future(self,
self, request_iterator, timeout, metadata=None, protocol_options=None): request_iterator,
timeout,
metadata=None,
protocol_options=None):
return _future_stream_unary( return _future_stream_unary(
self._channel, self._group, self._method, timeout, protocol_options, self._channel, self._group, self._method, timeout, protocol_options,
metadata, self._metadata_transformer, request_iterator, metadata, self._metadata_transformer, request_iterator,
self._request_serializer, self._response_deserializer) self._request_serializer, self._response_deserializer)
def event( def event(self,
self, receiver, abortion_callback, timeout, metadata=None, receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None): protocol_options=None):
raise NotImplementedError() raise NotImplementedError()
class _StreamStreamMultiCallable(face.StreamStreamMultiCallable): class _StreamStreamMultiCallable(face.StreamStreamMultiCallable):
def __init__( def __init__(self, channel, group, method, metadata_transformer,
self, channel, group, method, metadata_transformer, request_serializer, request_serializer, response_deserializer):
response_deserializer):
self._channel = channel self._channel = channel
self._group = group self._group = group
self._method = method self._method = method
@ -385,133 +418,226 @@ class _StreamStreamMultiCallable(face.StreamStreamMultiCallable):
self._request_serializer = request_serializer self._request_serializer = request_serializer
self._response_deserializer = response_deserializer self._response_deserializer = response_deserializer
def __call__( def __call__(self,
self, request_iterator, timeout, metadata=None, protocol_options=None): request_iterator,
timeout,
metadata=None,
protocol_options=None):
return _stream_stream( return _stream_stream(
self._channel, self._group, self._method, timeout, protocol_options, self._channel, self._group, self._method, timeout, protocol_options,
metadata, self._metadata_transformer, request_iterator, metadata, self._metadata_transformer, request_iterator,
self._request_serializer, self._response_deserializer) self._request_serializer, self._response_deserializer)
def event( def event(self,
self, receiver, abortion_callback, timeout, metadata=None, receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None): protocol_options=None):
raise NotImplementedError() raise NotImplementedError()
class _GenericStub(face.GenericStub): class _GenericStub(face.GenericStub):
def __init__( def __init__(self, channel, metadata_transformer, request_serializers,
self, channel, metadata_transformer, request_serializers,
response_deserializers): response_deserializers):
self._channel = channel self._channel = channel
self._metadata_transformer = metadata_transformer self._metadata_transformer = metadata_transformer
self._request_serializers = request_serializers or {} self._request_serializers = request_serializers or {}
self._response_deserializers = response_deserializers or {} self._response_deserializers = response_deserializers or {}
def blocking_unary_unary( def blocking_unary_unary(self,
self, group, method, request, timeout, metadata=None, group,
with_call=None, protocol_options=None): method,
request_serializer = self._request_serializers.get((group, method,)) request,
response_deserializer = self._response_deserializers.get((group, method,)) timeout,
return _blocking_unary_unary( metadata=None,
self._channel, group, method, timeout, with_call, protocol_options, with_call=None,
metadata, self._metadata_transformer, request, request_serializer, protocol_options=None):
response_deserializer) request_serializer = self._request_serializers.get((
group,
method,))
response_deserializer = self._response_deserializers.get((
group,
method,))
return _blocking_unary_unary(self._channel, group, method, timeout,
with_call, protocol_options, metadata,
self._metadata_transformer, request,
request_serializer, response_deserializer)
def future_unary_unary( def future_unary_unary(self,
self, group, method, request, timeout, metadata=None, group,
method,
request,
timeout,
metadata=None,
protocol_options=None): protocol_options=None):
request_serializer = self._request_serializers.get((group, method,)) request_serializer = self._request_serializers.get((
response_deserializer = self._response_deserializers.get((group, method,)) group,
return _future_unary_unary( method,))
self._channel, group, method, timeout, protocol_options, metadata, response_deserializer = self._response_deserializers.get((
self._metadata_transformer, request, request_serializer, group,
response_deserializer) method,))
return _future_unary_unary(self._channel, group, method, timeout,
protocol_options, metadata,
self._metadata_transformer, request,
request_serializer, response_deserializer)
def inline_unary_stream( def inline_unary_stream(self,
self, group, method, request, timeout, metadata=None, group,
method,
request,
timeout,
metadata=None,
protocol_options=None): protocol_options=None):
request_serializer = self._request_serializers.get((group, method,)) request_serializer = self._request_serializers.get((
response_deserializer = self._response_deserializers.get((group, method,)) group,
return _unary_stream( method,))
self._channel, group, method, timeout, protocol_options, metadata, response_deserializer = self._response_deserializers.get((
self._metadata_transformer, request, request_serializer, group,
response_deserializer) method,))
return _unary_stream(self._channel, group, method, timeout,
protocol_options, metadata,
self._metadata_transformer, request,
request_serializer, response_deserializer)
def blocking_stream_unary( def blocking_stream_unary(self,
self, group, method, request_iterator, timeout, metadata=None, group,
with_call=None, protocol_options=None): method,
request_serializer = self._request_serializers.get((group, method,)) request_iterator,
response_deserializer = self._response_deserializers.get((group, method,)) timeout,
metadata=None,
with_call=None,
protocol_options=None):
request_serializer = self._request_serializers.get((
group,
method,))
response_deserializer = self._response_deserializers.get((
group,
method,))
return _blocking_stream_unary( return _blocking_stream_unary(
self._channel, group, method, timeout, with_call, protocol_options, self._channel, group, method, timeout, with_call, protocol_options,
metadata, self._metadata_transformer, request_iterator, metadata, self._metadata_transformer, request_iterator,
request_serializer, response_deserializer) request_serializer, response_deserializer)
def future_stream_unary( def future_stream_unary(self,
self, group, method, request_iterator, timeout, metadata=None, group,
method,
request_iterator,
timeout,
metadata=None,
protocol_options=None): protocol_options=None):
request_serializer = self._request_serializers.get((group, method,)) request_serializer = self._request_serializers.get((
response_deserializer = self._response_deserializers.get((group, method,)) group,
method,))
response_deserializer = self._response_deserializers.get((
group,
method,))
return _future_stream_unary( return _future_stream_unary(
self._channel, group, method, timeout, protocol_options, metadata, self._channel, group, method, timeout, protocol_options, metadata,
self._metadata_transformer, request_iterator, request_serializer, self._metadata_transformer, request_iterator, request_serializer,
response_deserializer) response_deserializer)
def inline_stream_stream( def inline_stream_stream(self,
self, group, method, request_iterator, timeout, metadata=None, group,
method,
request_iterator,
timeout,
metadata=None,
protocol_options=None): protocol_options=None):
request_serializer = self._request_serializers.get((group, method,)) request_serializer = self._request_serializers.get((
response_deserializer = self._response_deserializers.get((group, method,)) group,
return _stream_stream( method,))
self._channel, group, method, timeout, protocol_options, metadata, response_deserializer = self._response_deserializers.get((
self._metadata_transformer, request_iterator, request_serializer, group,
response_deserializer) method,))
return _stream_stream(self._channel, group, method, timeout,
protocol_options, metadata,
self._metadata_transformer, request_iterator,
request_serializer, response_deserializer)
def event_unary_unary( def event_unary_unary(self,
self, group, method, request, receiver, abortion_callback, timeout, group,
metadata=None, protocol_options=None): method,
request,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None):
raise NotImplementedError() raise NotImplementedError()
def event_unary_stream( def event_unary_stream(self,
self, group, method, request, receiver, abortion_callback, timeout, group,
metadata=None, protocol_options=None): method,
request,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None):
raise NotImplementedError() raise NotImplementedError()
def event_stream_unary( def event_stream_unary(self,
self, group, method, receiver, abortion_callback, timeout, group,
metadata=None, protocol_options=None): method,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None):
raise NotImplementedError() raise NotImplementedError()
def event_stream_stream( def event_stream_stream(self,
self, group, method, receiver, abortion_callback, timeout, group,
metadata=None, protocol_options=None): method,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None):
raise NotImplementedError() raise NotImplementedError()
def unary_unary(self, group, method): def unary_unary(self, group, method):
request_serializer = self._request_serializers.get((group, method,)) request_serializer = self._request_serializers.get((
response_deserializer = self._response_deserializers.get((group, method,)) group,
method,))
response_deserializer = self._response_deserializers.get((
group,
method,))
return _UnaryUnaryMultiCallable( return _UnaryUnaryMultiCallable(
self._channel, group, method, self._metadata_transformer, self._channel, group, method, self._metadata_transformer,
request_serializer, response_deserializer) request_serializer, response_deserializer)
def unary_stream(self, group, method): def unary_stream(self, group, method):
request_serializer = self._request_serializers.get((group, method,)) request_serializer = self._request_serializers.get((
response_deserializer = self._response_deserializers.get((group, method,)) group,
method,))
response_deserializer = self._response_deserializers.get((
group,
method,))
return _UnaryStreamMultiCallable( return _UnaryStreamMultiCallable(
self._channel, group, method, self._metadata_transformer, self._channel, group, method, self._metadata_transformer,
request_serializer, response_deserializer) request_serializer, response_deserializer)
def stream_unary(self, group, method): def stream_unary(self, group, method):
request_serializer = self._request_serializers.get((group, method,)) request_serializer = self._request_serializers.get((
response_deserializer = self._response_deserializers.get((group, method,)) group,
method,))
response_deserializer = self._response_deserializers.get((
group,
method,))
return _StreamUnaryMultiCallable( return _StreamUnaryMultiCallable(
self._channel, group, method, self._metadata_transformer, self._channel, group, method, self._metadata_transformer,
request_serializer, response_deserializer) request_serializer, response_deserializer)
def stream_stream(self, group, method): def stream_stream(self, group, method):
request_serializer = self._request_serializers.get((group, method,)) request_serializer = self._request_serializers.get((
response_deserializer = self._response_deserializers.get((group, method,)) group,
method,))
response_deserializer = self._response_deserializers.get((
group,
method,))
return _StreamStreamMultiCallable( return _StreamStreamMultiCallable(
self._channel, group, method, self._metadata_transformer, self._channel, group, method, self._metadata_transformer,
request_serializer, response_deserializer) request_serializer, response_deserializer)
@ -541,7 +667,8 @@ class _DynamicStub(face.DynamicStub):
elif method_cardinality is cardinality.Cardinality.STREAM_STREAM: elif method_cardinality is cardinality.Cardinality.STREAM_STREAM:
return self._generic_stub.stream_stream(self._group, attr) return self._generic_stub.stream_stream(self._group, attr)
else: else:
raise AttributeError('_DynamicStub object has no attribute "%s"!' % attr) raise AttributeError('_DynamicStub object has no attribute "%s"!' %
attr)
def __enter__(self): def __enter__(self):
return self return self
@ -550,19 +677,14 @@ class _DynamicStub(face.DynamicStub):
return False return False
def generic_stub( def generic_stub(channel, host, metadata_transformer, request_serializers,
channel, host, metadata_transformer, request_serializers,
response_deserializers): response_deserializers):
return _GenericStub( return _GenericStub(channel, metadata_transformer, request_serializers,
channel, metadata_transformer, request_serializers,
response_deserializers) response_deserializers)
def dynamic_stub( def dynamic_stub(channel, service, cardinalities, host, metadata_transformer,
channel, service, cardinalities, host, metadata_transformer,
request_serializers, response_deserializers): request_serializers, response_deserializers):
return _DynamicStub( return _DynamicStub(
_GenericStub( _GenericStub(channel, metadata_transformer, request_serializers,
channel, metadata_transformer, request_serializers, response_deserializers), service, cardinalities)
response_deserializers),
service, cardinalities)

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Affords a connectivity-state-listenable channel.""" """Affords a connectivity-state-listenable channel."""
import threading import threading
@ -41,8 +40,9 @@ _CHANNEL_SUBSCRIPTION_CALLBACK_ERROR_LOG_MESSAGE = (
'Exception calling channel subscription callback!') 'Exception calling channel subscription callback!')
_LOW_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY = { _LOW_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY = {
state: connectivity for state, connectivity in zip( state: connectivity
_types.ConnectivityState, interfaces.ChannelConnectivity) for state, connectivity in zip(_types.ConnectivityState,
interfaces.ChannelConnectivity)
} }
@ -85,7 +85,9 @@ class ConnectivityChannel(object):
def _spawn_delivery(self, connectivity, callbacks): def _spawn_delivery(self, connectivity, callbacks):
delivering_thread = threading.Thread( delivering_thread = threading.Thread(
target=self._deliver, args=(connectivity, callbacks,)) target=self._deliver, args=(
connectivity,
callbacks,))
delivering_thread.start() delivering_thread.start()
self._delivering = True self._delivering = True
@ -97,16 +99,18 @@ class ConnectivityChannel(object):
self._connectivity = _LOW_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[ self._connectivity = _LOW_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[
low_connectivity] low_connectivity]
callbacks = tuple( callbacks = tuple(
callback for callback, unused_but_known_to_be_none_connectivity callback
in self._callbacks_and_connectivities) for callback, unused_but_known_to_be_none_connectivity in
self._callbacks_and_connectivities)
for callback_and_connectivity in self._callbacks_and_connectivities: for callback_and_connectivity in self._callbacks_and_connectivities:
callback_and_connectivity[1] = self._connectivity callback_and_connectivity[1] = self._connectivity
if callbacks: if callbacks:
self._spawn_delivery(self._connectivity, callbacks) self._spawn_delivery(self._connectivity, callbacks)
completion_queue = _low.CompletionQueue() completion_queue = _low.CompletionQueue()
while True: while True:
low_channel.watch_connectivity_state( low_channel.watch_connectivity_state(low_connectivity,
low_connectivity, time.time() + 0.2, completion_queue, None) time.time() + 0.2,
completion_queue, None)
event = completion_queue.next() event = completion_queue.next()
with self._lock: with self._lock:
if not self._callbacks_and_connectivities and not self._try_to_connect: if not self._callbacks_and_connectivities and not self._try_to_connect:
@ -117,7 +121,8 @@ class ConnectivityChannel(object):
try_to_connect = self._try_to_connect try_to_connect = self._try_to_connect
self._try_to_connect = False self._try_to_connect = False
if event.success or try_to_connect: if event.success or try_to_connect:
low_connectivity = low_channel.check_connectivity_state(try_to_connect) low_connectivity = low_channel.check_connectivity_state(
try_to_connect)
with self._lock: with self._lock:
self._connectivity = _LOW_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[ self._connectivity = _LOW_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[
low_connectivity] low_connectivity]
@ -146,8 +151,8 @@ class ConnectivityChannel(object):
def unsubscribe(self, callback): def unsubscribe(self, callback):
with self._lock: with self._lock:
for index, (subscribed_callback, unused_connectivity) in enumerate( for index, (subscribed_callback, unused_connectivity
self._callbacks_and_connectivities): ) in enumerate(self._callbacks_and_connectivities):
if callback == subscribed_callback: if callback == subscribed_callback:
self._callbacks_and_connectivities.pop(index) self._callbacks_and_connectivities.pop(index)
break break

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Translates gRPC's server-side API into gRPC's server-side Beta API.""" """Translates gRPC's server-side API into gRPC's server-side Beta API."""
import collections import collections
@ -96,15 +95,20 @@ class _FaceServicerContext(face.ServicerContext):
def _adapt_unary_request_inline(unary_request_inline): def _adapt_unary_request_inline(unary_request_inline):
def adaptation(request, servicer_context): def adaptation(request, servicer_context):
return unary_request_inline(request, _FaceServicerContext(servicer_context)) return unary_request_inline(request,
_FaceServicerContext(servicer_context))
return adaptation return adaptation
def _adapt_stream_request_inline(stream_request_inline): def _adapt_stream_request_inline(stream_request_inline):
def adaptation(request_iterator, servicer_context): def adaptation(request_iterator, servicer_context):
return stream_request_inline( return stream_request_inline(request_iterator,
request_iterator, _FaceServicerContext(servicer_context)) _FaceServicerContext(servicer_context))
return adaptation return adaptation
@ -165,6 +169,7 @@ class _Callback(stream.Consumer):
def _run_request_pipe_thread(request_iterator, request_consumer, def _run_request_pipe_thread(request_iterator, request_consumer,
servicer_context): servicer_context):
thread_joined = threading.Event() thread_joined = threading.Event()
def pipe_requests(): def pipe_requests():
for request in request_iterator: for request in request_iterator:
if not servicer_context.is_active() or thread_joined.is_set(): if not servicer_context.is_active() or thread_joined.is_set():
@ -183,116 +188,132 @@ def _run_request_pipe_thread(request_iterator, request_consumer,
def _adapt_unary_unary_event(unary_unary_event): def _adapt_unary_unary_event(unary_unary_event):
def adaptation(request, servicer_context): def adaptation(request, servicer_context):
callback = _Callback() callback = _Callback()
if not servicer_context.add_callback(callback.cancel): if not servicer_context.add_callback(callback.cancel):
raise abandonment.Abandoned() raise abandonment.Abandoned()
unary_unary_event( unary_unary_event(request, callback.consume_and_terminate,
request, callback.consume_and_terminate,
_FaceServicerContext(servicer_context)) _FaceServicerContext(servicer_context))
return callback.draw_all_values()[0] return callback.draw_all_values()[0]
return adaptation return adaptation
def _adapt_unary_stream_event(unary_stream_event): def _adapt_unary_stream_event(unary_stream_event):
def adaptation(request, servicer_context): def adaptation(request, servicer_context):
callback = _Callback() callback = _Callback()
if not servicer_context.add_callback(callback.cancel): if not servicer_context.add_callback(callback.cancel):
raise abandonment.Abandoned() raise abandonment.Abandoned()
unary_stream_event( unary_stream_event(request, callback,
request, callback, _FaceServicerContext(servicer_context)) _FaceServicerContext(servicer_context))
while True: while True:
response = callback.draw_one_value() response = callback.draw_one_value()
if response is None: if response is None:
return return
else: else:
yield response yield response
return adaptation return adaptation
def _adapt_stream_unary_event(stream_unary_event): def _adapt_stream_unary_event(stream_unary_event):
def adaptation(request_iterator, servicer_context): def adaptation(request_iterator, servicer_context):
callback = _Callback() callback = _Callback()
if not servicer_context.add_callback(callback.cancel): if not servicer_context.add_callback(callback.cancel):
raise abandonment.Abandoned() raise abandonment.Abandoned()
request_consumer = stream_unary_event( request_consumer = stream_unary_event(
callback.consume_and_terminate, _FaceServicerContext(servicer_context)) callback.consume_and_terminate,
_run_request_pipe_thread( _FaceServicerContext(servicer_context))
request_iterator, request_consumer, servicer_context) _run_request_pipe_thread(request_iterator, request_consumer,
servicer_context)
return callback.draw_all_values()[0] return callback.draw_all_values()[0]
return adaptation return adaptation
def _adapt_stream_stream_event(stream_stream_event): def _adapt_stream_stream_event(stream_stream_event):
def adaptation(request_iterator, servicer_context): def adaptation(request_iterator, servicer_context):
callback = _Callback() callback = _Callback()
if not servicer_context.add_callback(callback.cancel): if not servicer_context.add_callback(callback.cancel):
raise abandonment.Abandoned() raise abandonment.Abandoned()
request_consumer = stream_stream_event( request_consumer = stream_stream_event(
callback, _FaceServicerContext(servicer_context)) callback, _FaceServicerContext(servicer_context))
_run_request_pipe_thread( _run_request_pipe_thread(request_iterator, request_consumer,
request_iterator, request_consumer, servicer_context) servicer_context)
while True: while True:
response = callback.draw_one_value() response = callback.draw_one_value()
if response is None: if response is None:
return return
else: else:
yield response yield response
return adaptation return adaptation
class _SimpleMethodHandler( class _SimpleMethodHandler(
collections.namedtuple( collections.namedtuple('_MethodHandler', (
'_MethodHandler', 'request_streaming',
('request_streaming', 'response_streaming', 'request_deserializer', 'response_streaming',
'response_serializer', 'unary_unary', 'unary_stream', 'stream_unary', 'request_deserializer',
'stream_stream',)), 'response_serializer',
grpc.RpcMethodHandler): 'unary_unary',
'unary_stream',
'stream_unary',
'stream_stream',)), grpc.RpcMethodHandler):
pass pass
def _simple_method_handler( def _simple_method_handler(implementation, request_deserializer,
implementation, request_deserializer, response_serializer): response_serializer):
if implementation.style is style.Service.INLINE: if implementation.style is style.Service.INLINE:
if implementation.cardinality is cardinality.Cardinality.UNARY_UNARY: if implementation.cardinality is cardinality.Cardinality.UNARY_UNARY:
return _SimpleMethodHandler( return _SimpleMethodHandler(
False, False, request_deserializer, response_serializer, False, False, request_deserializer, response_serializer,
_adapt_unary_request_inline(implementation.unary_unary_inline), None, _adapt_unary_request_inline(implementation.unary_unary_inline),
None, None) None, None, None)
elif implementation.cardinality is cardinality.Cardinality.UNARY_STREAM: elif implementation.cardinality is cardinality.Cardinality.UNARY_STREAM:
return _SimpleMethodHandler( return _SimpleMethodHandler(
False, True, request_deserializer, response_serializer, None, False, True, request_deserializer, response_serializer, None,
_adapt_unary_request_inline(implementation.unary_stream_inline), None, _adapt_unary_request_inline(implementation.unary_stream_inline),
None) None, None)
elif implementation.cardinality is cardinality.Cardinality.STREAM_UNARY: elif implementation.cardinality is cardinality.Cardinality.STREAM_UNARY:
return _SimpleMethodHandler( return _SimpleMethodHandler(True, False, request_deserializer,
True, False, request_deserializer, response_serializer, None, None, response_serializer, None, None,
_adapt_stream_request_inline(implementation.stream_unary_inline), _adapt_stream_request_inline(
implementation.stream_unary_inline),
None) None)
elif implementation.cardinality is cardinality.Cardinality.STREAM_STREAM: elif implementation.cardinality is cardinality.Cardinality.STREAM_STREAM:
return _SimpleMethodHandler( return _SimpleMethodHandler(
True, True, request_deserializer, response_serializer, None, None, True, True, request_deserializer, response_serializer, None,
None, None, None,
_adapt_stream_request_inline(implementation.stream_stream_inline)) _adapt_stream_request_inline(
implementation.stream_stream_inline))
elif implementation.style is style.Service.EVENT: elif implementation.style is style.Service.EVENT:
if implementation.cardinality is cardinality.Cardinality.UNARY_UNARY: if implementation.cardinality is cardinality.Cardinality.UNARY_UNARY:
return _SimpleMethodHandler( return _SimpleMethodHandler(
False, False, request_deserializer, response_serializer, False, False, request_deserializer, response_serializer,
_adapt_unary_unary_event(implementation.unary_unary_event), None, _adapt_unary_unary_event(implementation.unary_unary_event),
None, None) None, None, None)
elif implementation.cardinality is cardinality.Cardinality.UNARY_STREAM: elif implementation.cardinality is cardinality.Cardinality.UNARY_STREAM:
return _SimpleMethodHandler( return _SimpleMethodHandler(
False, True, request_deserializer, response_serializer, None, False, True, request_deserializer, response_serializer, None,
_adapt_unary_stream_event(implementation.unary_stream_event), None, _adapt_unary_stream_event(implementation.unary_stream_event),
None) None, None)
elif implementation.cardinality is cardinality.Cardinality.STREAM_UNARY: elif implementation.cardinality is cardinality.Cardinality.STREAM_UNARY:
return _SimpleMethodHandler( return _SimpleMethodHandler(
True, False, request_deserializer, response_serializer, None, None, True, False, request_deserializer, response_serializer, None,
_adapt_stream_unary_event(implementation.stream_unary_event), None) None,
_adapt_stream_unary_event(implementation.stream_unary_event),
None)
elif implementation.cardinality is cardinality.Cardinality.STREAM_STREAM: elif implementation.cardinality is cardinality.Cardinality.STREAM_STREAM:
return _SimpleMethodHandler( return _SimpleMethodHandler(
True, True, request_deserializer, response_serializer, None, None, True, True, request_deserializer, response_serializer, None,
None, _adapt_stream_stream_event(implementation.stream_stream_event)) None, None,
_adapt_stream_stream_event(implementation.stream_stream_event))
def _flatten_method_pair_map(method_pair_map): def _flatten_method_pair_map(method_pair_map):
@ -306,8 +327,7 @@ def _flatten_method_pair_map(method_pair_map):
class _GenericRpcHandler(grpc.GenericRpcHandler): class _GenericRpcHandler(grpc.GenericRpcHandler):
def __init__( def __init__(self, method_implementations, multi_method_implementation,
self, method_implementations, multi_method_implementation,
request_deserializers, response_serializers): request_deserializers, response_serializers):
self._method_implementations = _flatten_method_pair_map( self._method_implementations = _flatten_method_pair_map(
method_implementations) method_implementations)
@ -360,16 +380,18 @@ class _Server(interfaces.Server):
return False return False
def server( def server(service_implementations, multi_method_implementation,
service_implementations, multi_method_implementation, request_deserializers, request_deserializers, response_serializers, thread_pool,
response_serializers, thread_pool, thread_pool_size): thread_pool_size):
generic_rpc_handler = _GenericRpcHandler( generic_rpc_handler = _GenericRpcHandler(
service_implementations, multi_method_implementation, service_implementations, multi_method_implementation,
request_deserializers, response_serializers) request_deserializers, response_serializers)
if thread_pool is None: if thread_pool is None:
effective_thread_pool = logging_pool.pool( effective_thread_pool = logging_pool.pool(_DEFAULT_POOL_SIZE
_DEFAULT_POOL_SIZE if thread_pool_size is None else thread_pool_size) if thread_pool_size is None
else thread_pool_size)
else: else:
effective_thread_pool = thread_pool effective_thread_pool = thread_pool
return _Server( return _Server(
grpc.server(effective_thread_pool, handlers=(generic_rpc_handler,))) grpc.server(
effective_thread_pool, handlers=(generic_rpc_handler,)))

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Entry points into the Beta API of gRPC Python.""" """Entry points into the Beta API of gRPC Python."""
# threading is referenced from specification in this module. # threading is referenced from specification in this module.
@ -43,7 +42,6 @@ from grpc.beta import interfaces
from grpc.framework.common import cardinality # pylint: disable=unused-import from grpc.framework.common import cardinality # pylint: disable=unused-import
from grpc.framework.interfaces.face import face # pylint: disable=unused-import from grpc.framework.interfaces.face import face # pylint: disable=unused-import
ChannelCredentials = grpc.ChannelCredentials ChannelCredentials = grpc.ChannelCredentials
ssl_channel_credentials = grpc.ssl_channel_credentials ssl_channel_credentials = grpc.ssl_channel_credentials
CallCredentials = grpc.CallCredentials CallCredentials = grpc.CallCredentials
@ -61,6 +59,7 @@ def google_call_credentials(credentials):
""" """
return metadata_call_credentials(_auth.GoogleCallCredentials(credentials)) return metadata_call_credentials(_auth.GoogleCallCredentials(credentials))
access_token_call_credentials = grpc.access_token_call_credentials access_token_call_credentials = grpc.access_token_call_credentials
composite_call_credentials = grpc.composite_call_credentials composite_call_credentials = grpc.composite_call_credentials
composite_channel_credentials = grpc.composite_channel_credentials composite_channel_credentials = grpc.composite_channel_credentials
@ -113,8 +112,8 @@ def insecure_channel(host, port):
Returns: Returns:
A Channel to the remote host through which RPCs may be conducted. A Channel to the remote host through which RPCs may be conducted.
""" """
channel = grpc.insecure_channel( channel = grpc.insecure_channel(host
host if port is None else '%s:%d' % (host, port)) if port is None else '%s:%d' % (host, port))
return Channel(channel) return Channel(channel)
@ -130,8 +129,8 @@ def secure_channel(host, port, channel_credentials):
Returns: Returns:
A secure Channel to the remote host through which RPCs may be conducted. A secure Channel to the remote host through which RPCs may be conducted.
""" """
channel = grpc.secure_channel( channel = grpc.secure_channel(host if port is None else
host if port is None else '%s:%d' % (host, port), channel_credentials) '%s:%d' % (host, port), channel_credentials)
return Channel(channel) return Channel(channel)
@ -143,8 +142,7 @@ class StubOptions(object):
functions. functions.
""" """
def __init__( def __init__(self, host, request_serializers, response_deserializers,
self, host, request_serializers, response_deserializers,
metadata_transformer, thread_pool, thread_pool_size): metadata_transformer, thread_pool, thread_pool_size):
self.host = host self.host = host
self.request_serializers = request_serializers self.request_serializers = request_serializers
@ -153,13 +151,16 @@ class StubOptions(object):
self.thread_pool = thread_pool self.thread_pool = thread_pool
self.thread_pool_size = thread_pool_size self.thread_pool_size = thread_pool_size
_EMPTY_STUB_OPTIONS = StubOptions(
None, None, None, None, None, None)
_EMPTY_STUB_OPTIONS = StubOptions(None, None, None, None, None, None)
def stub_options(
host=None, request_serializers=None, response_deserializers=None, def stub_options(host=None,
metadata_transformer=None, thread_pool=None, thread_pool_size=None): request_serializers=None,
response_deserializers=None,
metadata_transformer=None,
thread_pool=None,
thread_pool_size=None):
"""Creates a StubOptions value to be passed at stub creation. """Creates a StubOptions value to be passed at stub creation.
All parameters are optional and should always be passed by keyword. All parameters are optional and should always be passed by keyword.
@ -180,8 +181,7 @@ def stub_options(
Returns: Returns:
A StubOptions value created from the passed parameters. A StubOptions value created from the passed parameters.
""" """
return StubOptions( return StubOptions(host, request_serializers, response_deserializers,
host, request_serializers, response_deserializers,
metadata_transformer, thread_pool, thread_pool_size) metadata_transformer, thread_pool, thread_pool_size)
@ -198,7 +198,8 @@ def generic_stub(channel, options=None):
effective_options = _EMPTY_STUB_OPTIONS if options is None else options effective_options = _EMPTY_STUB_OPTIONS if options is None else options
return _client_adaptations.generic_stub( return _client_adaptations.generic_stub(
channel._channel, # pylint: disable=protected-access channel._channel, # pylint: disable=protected-access
effective_options.host, effective_options.metadata_transformer, effective_options.host,
effective_options.metadata_transformer,
effective_options.request_serializers, effective_options.request_serializers,
effective_options.response_deserializers) effective_options.response_deserializers)
@ -220,7 +221,9 @@ def dynamic_stub(channel, service, cardinalities, options=None):
effective_options = StubOptions() if options is None else options effective_options = StubOptions() if options is None else options
return _client_adaptations.dynamic_stub( return _client_adaptations.dynamic_stub(
channel._channel, # pylint: disable=protected-access channel._channel, # pylint: disable=protected-access
service, cardinalities, effective_options.host, service,
cardinalities,
effective_options.host,
effective_options.metadata_transformer, effective_options.metadata_transformer,
effective_options.request_serializers, effective_options.request_serializers,
effective_options.response_deserializers) effective_options.response_deserializers)
@ -238,10 +241,9 @@ class ServerOptions(object):
functions. functions.
""" """
def __init__( def __init__(self, multi_method_implementation, request_deserializers,
self, multi_method_implementation, request_deserializers, response_serializers, thread_pool, thread_pool_size,
response_serializers, thread_pool, thread_pool_size, default_timeout, default_timeout, maximum_timeout):
maximum_timeout):
self.multi_method_implementation = multi_method_implementation self.multi_method_implementation = multi_method_implementation
self.request_deserializers = request_deserializers self.request_deserializers = request_deserializers
self.response_serializers = response_serializers self.response_serializers = response_serializers
@ -250,14 +252,17 @@ class ServerOptions(object):
self.default_timeout = default_timeout self.default_timeout = default_timeout
self.maximum_timeout = maximum_timeout self.maximum_timeout = maximum_timeout
_EMPTY_SERVER_OPTIONS = ServerOptions(
None, None, None, None, None, None, None) _EMPTY_SERVER_OPTIONS = ServerOptions(None, None, None, None, None, None, None)
def server_options( def server_options(multi_method_implementation=None,
multi_method_implementation=None, request_deserializers=None, request_deserializers=None,
response_serializers=None, thread_pool=None, thread_pool_size=None, response_serializers=None,
default_timeout=None, maximum_timeout=None): thread_pool=None,
thread_pool_size=None,
default_timeout=None,
maximum_timeout=None):
"""Creates a ServerOptions value to be passed at server creation. """Creates a ServerOptions value to be passed at server creation.
All parameters are optional and should always be passed by keyword. All parameters are optional and should always be passed by keyword.
@ -282,9 +287,9 @@ def server_options(
Returns: Returns:
A StubOptions value created from the passed parameters. A StubOptions value created from the passed parameters.
""" """
return ServerOptions( return ServerOptions(multi_method_implementation, request_deserializers,
multi_method_implementation, request_deserializers, response_serializers, response_serializers, thread_pool, thread_pool_size,
thread_pool, thread_pool_size, default_timeout, maximum_timeout) default_timeout, maximum_timeout)
def server(service_implementations, options=None): def server(service_implementations, options=None):

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Constants and interfaces of the Beta API of gRPC Python.""" """Constants and interfaces of the Beta API of gRPC Python."""
import abc import abc
@ -69,6 +68,7 @@ def grpc_call_options(disable_compression=False, credentials=None):
""" """
return GRPCCallOptions(disable_compression, None, credentials) return GRPCCallOptions(disable_compression, None, credentials)
GRPCAuthMetadataContext = grpc.AuthMetadataContext GRPCAuthMetadataContext = grpc.AuthMetadataContext
GRPCAuthMetadataPluginCallback = grpc.AuthMetadataPluginCallback GRPCAuthMetadataPluginCallback = grpc.AuthMetadataPluginCallback
GRPCAuthMetadataPlugin = grpc.AuthMetadataPlugin GRPCAuthMetadataPlugin = grpc.AuthMetadataPlugin

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Utilities for the gRPC Python Beta API.""" """Utilities for the gRPC Python Beta API."""
import threading import threading
@ -161,4 +160,3 @@ def channel_ready_future(channel):
ready_future = _ChannelReadyFuture(channel) ready_future = _ChannelReadyFuture(channel)
ready_future.start() ready_future.start()
return ready_future return ready_future

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Defines an enum for classifying RPC methods by streaming semantics.""" """Defines an enum for classifying RPC methods by streaming semantics."""
import enum import enum

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Defines an enum for classifying RPC methods by control flow semantics.""" """Defines an enum for classifying RPC methods by control flow semantics."""
import enum import enum

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Utilities for indicating abandonment of computation.""" """Utilities for indicating abandonment of computation."""

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Utilities for working with callables.""" """Utilities for working with callables."""
import abc import abc
@ -59,15 +58,15 @@ class Outcome(six.with_metaclass(abc.ABCMeta)):
class _EasyOutcome( class _EasyOutcome(
collections.namedtuple( collections.namedtuple('_EasyOutcome',
'_EasyOutcome', ['kind', 'return_value', 'exception']), ['kind', 'return_value', 'exception']), Outcome):
Outcome):
"""A trivial implementation of Outcome.""" """A trivial implementation of Outcome."""
def _call_logging_exceptions(behavior, message, *args, **kwargs): def _call_logging_exceptions(behavior, message, *args, **kwargs):
try: try:
return _EasyOutcome(Outcome.Kind.RETURNED, behavior(*args, **kwargs), None) return _EasyOutcome(Outcome.Kind.RETURNED,
behavior(*args, **kwargs), None)
except Exception as e: # pylint: disable=broad-except except Exception as e: # pylint: disable=broad-except
logging.exception(message) logging.exception(message)
return _EasyOutcome(Outcome.Kind.RAISED, None, e) return _EasyOutcome(Outcome.Kind.RAISED, None, e)
@ -86,9 +85,11 @@ def with_exceptions_logged(behavior, message):
future.Outcome describing whether the given behavior returned a value or future.Outcome describing whether the given behavior returned a value or
raised an exception. raised an exception.
""" """
@functools.wraps(behavior) @functools.wraps(behavior)
def wrapped_behavior(*args, **kwargs): def wrapped_behavior(*args, **kwargs):
return _call_logging_exceptions(behavior, message, *args, **kwargs) return _call_logging_exceptions(behavior, message, *args, **kwargs)
return wrapped_behavior return wrapped_behavior

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A Future interface. """A Future interface.
Python doesn't have a Future interface in its standard library. In the absence Python doesn't have a Future interface in its standard library. In the absence

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A thread pool that logs exceptions raised by tasks executed within it.""" """A thread pool that logs exceptions raised by tasks executed within it."""
import logging import logging
@ -36,13 +35,16 @@ from concurrent import futures
def _wrap(behavior): def _wrap(behavior):
"""Wraps an arbitrary callable behavior in exception-logging.""" """Wraps an arbitrary callable behavior in exception-logging."""
def _wrapping(*args, **kwargs): def _wrapping(*args, **kwargs):
try: try:
return behavior(*args, **kwargs) return behavior(*args, **kwargs)
except Exception as e: except Exception as e:
logging.exception( logging.exception(
'Unexpected exception from %s executed in logging pool!', behavior) 'Unexpected exception from %s executed in logging pool!',
behavior)
raise raise
return _wrapping return _wrapping
@ -62,8 +64,9 @@ class _LoggingPool(object):
return self._backing_pool.submit(_wrap(fn), *args, **kwargs) return self._backing_pool.submit(_wrap(fn), *args, **kwargs)
def map(self, func, *iterables, **kwargs): def map(self, func, *iterables, **kwargs):
return self._backing_pool.map( return self._backing_pool.map(_wrap(func),
_wrap(func), *iterables, timeout=kwargs.get('timeout', None)) *iterables,
timeout=kwargs.get('timeout', None))
def shutdown(self, wait=True): def shutdown(self, wait=True):
self._backing_pool.shutdown(wait=wait) self._backing_pool.shutdown(wait=wait)

@ -26,13 +26,13 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Interfaces related to streams of values or objects.""" """Interfaces related to streams of values or objects."""
import abc import abc
import six import six
class Consumer(six.with_metaclass(abc.ABCMeta)): class Consumer(six.with_metaclass(abc.ABCMeta)):
"""Interface for consumers of finite streams of values or objects.""" """Interface for consumers of finite streams of values or objects."""

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Helpful utilities related to the stream module.""" """Helpful utilities related to the stream module."""
import logging import logging

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""The base interface of RPC Framework. """The base interface of RPC Framework.
Implementations of this interface support the conduct of "operations": Implementations of this interface support the conduct of "operations":
@ -166,8 +165,10 @@ class Operator(six.with_metaclass(abc.ABCMeta)):
"""An interface through which to participate in an operation.""" """An interface through which to participate in an operation."""
@abc.abstractmethod @abc.abstractmethod
def advance( def advance(self,
self, initial_metadata=None, payload=None, completion=None, initial_metadata=None,
payload=None,
completion=None,
allowance=None): allowance=None):
"""Progresses the operation. """Progresses the operation.
@ -183,6 +184,7 @@ class Operator(six.with_metaclass(abc.ABCMeta)):
""" """
raise NotImplementedError() raise NotImplementedError()
class ProtocolReceiver(six.with_metaclass(abc.ABCMeta)): class ProtocolReceiver(six.with_metaclass(abc.ABCMeta)):
"""A means of receiving protocol values during an operation.""" """A means of receiving protocol values during an operation."""
@ -284,9 +286,15 @@ class End(six.with_metaclass(abc.ABCMeta)):
raise NotImplementedError() raise NotImplementedError()
@abc.abstractmethod @abc.abstractmethod
def operate( def operate(self,
self, group, method, subscription, timeout, initial_metadata=None, group,
payload=None, completion=None, protocol_options=None): method,
subscription,
timeout,
initial_metadata=None,
payload=None,
completion=None,
protocol_options=None):
"""Commences an operation. """Commences an operation.
Args: Args:

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Utilities for use with the base interface of RPC Framework.""" """Utilities for use with the base interface of RPC Framework."""
import collections import collections
@ -34,23 +33,26 @@ import collections
from grpc.framework.interfaces.base import base from grpc.framework.interfaces.base import base
class _Completion( class _Completion(base.Completion,
base.Completion, collections.namedtuple('_Completion', (
collections.namedtuple( 'terminal_metadata',
'_Completion', ('terminal_metadata', 'code', 'message',))): 'code',
'message',))):
"""A trivial implementation of base.Completion.""" """A trivial implementation of base.Completion."""
class _Subscription( class _Subscription(base.Subscription,
base.Subscription, collections.namedtuple('_Subscription', (
collections.namedtuple( 'kind',
'_Subscription', 'termination_callback',
('kind', 'termination_callback', 'allowance', 'operator', 'allowance',
'operator',
'protocol_receiver',))): 'protocol_receiver',))):
"""A trivial implementation of base.Subscription.""" """A trivial implementation of base.Subscription."""
_NONE_SUBSCRIPTION = _Subscription(
base.Subscription.Kind.NONE, None, None, None, None) _NONE_SUBSCRIPTION = _Subscription(base.Subscription.Kind.NONE, None, None,
None, None)
def completion(terminal_metadata, code, message): def completion(terminal_metadata, code, message):
@ -78,5 +80,5 @@ def full_subscription(operator, protocol_receiver):
A base.Subscription of kind base.Subscription.Kind.FULL wrapping the given A base.Subscription of kind base.Subscription.Kind.FULL wrapping the given
base.Operator and base.ProtocolReceiver. base.Operator and base.ProtocolReceiver.
""" """
return _Subscription( return _Subscription(base.Subscription.Kind.FULL, None, None, operator,
base.Subscription.Kind.FULL, None, None, operator, protocol_receiver) protocol_receiver)

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Interfaces defining the Face layer of RPC Framework.""" """Interfaces defining the Face layer of RPC Framework."""
import abc import abc
@ -64,13 +63,18 @@ class NoSuchMethodError(Exception):
self.method = method self.method = method
def __repr__(self): def __repr__(self):
return 'face.NoSuchMethodError(%s, %s)' % (self.group, self.method,) return 'face.NoSuchMethodError(%s, %s)' % (
self.group,
self.method,)
class Abortion( class Abortion(
collections.namedtuple( collections.namedtuple('Abortion', (
'Abortion', 'kind',
('kind', 'initial_metadata', 'terminal_metadata', 'code', 'details',))): 'initial_metadata',
'terminal_metadata',
'code',
'details',))):
"""A value describing RPC abortion. """A value describing RPC abortion.
Attributes: Attributes:
@ -119,8 +123,8 @@ class AbortionError(six.with_metaclass(abc.ABCMeta, Exception)):
self.details = details self.details = details
def __str__(self): def __str__(self):
return '%s(code=%s, details="%s")' % ( return '%s(code=%s, details="%s")' % (self.__class__.__name__,
self.__class__.__name__, self.code, self.details) self.code, self.details)
class CancellationError(AbortionError): class CancellationError(AbortionError):
@ -363,8 +367,11 @@ class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
"""Affords invoking a unary-unary RPC in any call style.""" """Affords invoking a unary-unary RPC in any call style."""
@abc.abstractmethod @abc.abstractmethod
def __call__( def __call__(self,
self, request, timeout, metadata=None, with_call=False, request,
timeout,
metadata=None,
with_call=False,
protocol_options=None): protocol_options=None):
"""Synchronously invokes the underlying RPC. """Synchronously invokes the underlying RPC.
@ -408,9 +415,13 @@ class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
raise NotImplementedError() raise NotImplementedError()
@abc.abstractmethod @abc.abstractmethod
def event( def event(self,
self, request, receiver, abortion_callback, timeout, request,
metadata=None, protocol_options=None): receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None):
"""Asynchronously invokes the underlying RPC. """Asynchronously invokes the underlying RPC.
Args: Args:
@ -453,9 +464,13 @@ class UnaryStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
raise NotImplementedError() raise NotImplementedError()
@abc.abstractmethod @abc.abstractmethod
def event( def event(self,
self, request, receiver, abortion_callback, timeout, request,
metadata=None, protocol_options=None): receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None):
"""Asynchronously invokes the underlying RPC. """Asynchronously invokes the underlying RPC.
Args: Args:
@ -479,9 +494,12 @@ class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
"""Affords invoking a stream-unary RPC in any call style.""" """Affords invoking a stream-unary RPC in any call style."""
@abc.abstractmethod @abc.abstractmethod
def __call__( def __call__(self,
self, request_iterator, timeout, metadata=None, request_iterator,
with_call=False, protocol_options=None): timeout,
metadata=None,
with_call=False,
protocol_options=None):
"""Synchronously invokes the underlying RPC. """Synchronously invokes the underlying RPC.
Args: Args:
@ -504,8 +522,11 @@ class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
raise NotImplementedError() raise NotImplementedError()
@abc.abstractmethod @abc.abstractmethod
def future( def future(self,
self, request_iterator, timeout, metadata=None, protocol_options=None): request_iterator,
timeout,
metadata=None,
protocol_options=None):
"""Asynchronously invokes the underlying RPC. """Asynchronously invokes the underlying RPC.
Args: Args:
@ -525,8 +546,11 @@ class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
raise NotImplementedError() raise NotImplementedError()
@abc.abstractmethod @abc.abstractmethod
def event( def event(self,
self, receiver, abortion_callback, timeout, metadata=None, receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None): protocol_options=None):
"""Asynchronously invokes the underlying RPC. """Asynchronously invokes the underlying RPC.
@ -551,8 +575,11 @@ class StreamStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
"""Affords invoking a stream-stream RPC in any call style.""" """Affords invoking a stream-stream RPC in any call style."""
@abc.abstractmethod @abc.abstractmethod
def __call__( def __call__(self,
self, request_iterator, timeout, metadata=None, protocol_options=None): request_iterator,
timeout,
metadata=None,
protocol_options=None):
"""Invokes the underlying RPC. """Invokes the underlying RPC.
Args: Args:
@ -571,8 +598,11 @@ class StreamStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
raise NotImplementedError() raise NotImplementedError()
@abc.abstractmethod @abc.abstractmethod
def event( def event(self,
self, receiver, abortion_callback, timeout, metadata=None, receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None): protocol_options=None):
"""Asynchronously invokes the underlying RPC. """Asynchronously invokes the underlying RPC.
@ -673,9 +703,14 @@ class GenericStub(six.with_metaclass(abc.ABCMeta)):
"""Affords RPC invocation via generic methods.""" """Affords RPC invocation via generic methods."""
@abc.abstractmethod @abc.abstractmethod
def blocking_unary_unary( def blocking_unary_unary(self,
self, group, method, request, timeout, metadata=None, group,
with_call=False, protocol_options=None): method,
request,
timeout,
metadata=None,
with_call=False,
protocol_options=None):
"""Invokes a unary-request-unary-response method. """Invokes a unary-request-unary-response method.
This method blocks until either returning the response value of the RPC This method blocks until either returning the response value of the RPC
@ -703,8 +738,12 @@ class GenericStub(six.with_metaclass(abc.ABCMeta)):
raise NotImplementedError() raise NotImplementedError()
@abc.abstractmethod @abc.abstractmethod
def future_unary_unary( def future_unary_unary(self,
self, group, method, request, timeout, metadata=None, group,
method,
request,
timeout,
metadata=None,
protocol_options=None): protocol_options=None):
"""Invokes a unary-request-unary-response method. """Invokes a unary-request-unary-response method.
@ -726,8 +765,12 @@ class GenericStub(six.with_metaclass(abc.ABCMeta)):
raise NotImplementedError() raise NotImplementedError()
@abc.abstractmethod @abc.abstractmethod
def inline_unary_stream( def inline_unary_stream(self,
self, group, method, request, timeout, metadata=None, group,
method,
request,
timeout,
metadata=None,
protocol_options=None): protocol_options=None):
"""Invokes a unary-request-stream-response method. """Invokes a unary-request-stream-response method.
@ -748,9 +791,14 @@ class GenericStub(six.with_metaclass(abc.ABCMeta)):
raise NotImplementedError() raise NotImplementedError()
@abc.abstractmethod @abc.abstractmethod
def blocking_stream_unary( def blocking_stream_unary(self,
self, group, method, request_iterator, timeout, metadata=None, group,
with_call=False, protocol_options=None): method,
request_iterator,
timeout,
metadata=None,
with_call=False,
protocol_options=None):
"""Invokes a stream-request-unary-response method. """Invokes a stream-request-unary-response method.
This method blocks until either returning the response value of the RPC This method blocks until either returning the response value of the RPC
@ -778,8 +826,12 @@ class GenericStub(six.with_metaclass(abc.ABCMeta)):
raise NotImplementedError() raise NotImplementedError()
@abc.abstractmethod @abc.abstractmethod
def future_stream_unary( def future_stream_unary(self,
self, group, method, request_iterator, timeout, metadata=None, group,
method,
request_iterator,
timeout,
metadata=None,
protocol_options=None): protocol_options=None):
"""Invokes a stream-request-unary-response method. """Invokes a stream-request-unary-response method.
@ -801,8 +853,12 @@ class GenericStub(six.with_metaclass(abc.ABCMeta)):
raise NotImplementedError() raise NotImplementedError()
@abc.abstractmethod @abc.abstractmethod
def inline_stream_stream( def inline_stream_stream(self,
self, group, method, request_iterator, timeout, metadata=None, group,
method,
request_iterator,
timeout,
metadata=None,
protocol_options=None): protocol_options=None):
"""Invokes a stream-request-stream-response method. """Invokes a stream-request-stream-response method.
@ -823,9 +879,15 @@ class GenericStub(six.with_metaclass(abc.ABCMeta)):
raise NotImplementedError() raise NotImplementedError()
@abc.abstractmethod @abc.abstractmethod
def event_unary_unary( def event_unary_unary(self,
self, group, method, request, receiver, abortion_callback, timeout, group,
metadata=None, protocol_options=None): method,
request,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None):
"""Event-driven invocation of a unary-request-unary-response method. """Event-driven invocation of a unary-request-unary-response method.
Args: Args:
@ -846,9 +908,15 @@ class GenericStub(six.with_metaclass(abc.ABCMeta)):
raise NotImplementedError() raise NotImplementedError()
@abc.abstractmethod @abc.abstractmethod
def event_unary_stream( def event_unary_stream(self,
self, group, method, request, receiver, abortion_callback, timeout, group,
metadata=None, protocol_options=None): method,
request,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None):
"""Event-driven invocation of a unary-request-stream-response method. """Event-driven invocation of a unary-request-stream-response method.
Args: Args:
@ -869,9 +937,14 @@ class GenericStub(six.with_metaclass(abc.ABCMeta)):
raise NotImplementedError() raise NotImplementedError()
@abc.abstractmethod @abc.abstractmethod
def event_stream_unary( def event_stream_unary(self,
self, group, method, receiver, abortion_callback, timeout, group,
metadata=None, protocol_options=None): method,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None):
"""Event-driven invocation of a unary-request-unary-response method. """Event-driven invocation of a unary-request-unary-response method.
Args: Args:
@ -892,9 +965,14 @@ class GenericStub(six.with_metaclass(abc.ABCMeta)):
raise NotImplementedError() raise NotImplementedError()
@abc.abstractmethod @abc.abstractmethod
def event_stream_stream( def event_stream_stream(self,
self, group, method, receiver, abortion_callback, timeout, group,
metadata=None, protocol_options=None): method,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None):
"""Event-driven invocation of a unary-request-stream-response method. """Event-driven invocation of a unary-request-stream-response method.
Args: Args:

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Utilities for RPC Framework's Face interface.""" """Utilities for RPC Framework's Face interface."""
import collections import collections
@ -38,13 +37,19 @@ from grpc.framework.foundation import stream # pylint: disable=unused-import
from grpc.framework.interfaces.face import face from grpc.framework.interfaces.face import face
class _MethodImplementation( class _MethodImplementation(face.MethodImplementation,
face.MethodImplementation, collections.namedtuple('_MethodImplementation', [
collections.namedtuple( 'cardinality',
'_MethodImplementation', 'style',
['cardinality', 'style', 'unary_unary_inline', 'unary_stream_inline', 'unary_unary_inline',
'stream_unary_inline', 'stream_stream_inline', 'unary_unary_event', 'unary_stream_inline',
'unary_stream_event', 'stream_unary_event', 'stream_stream_event',])): 'stream_unary_inline',
'stream_stream_inline',
'unary_unary_event',
'unary_stream_event',
'stream_unary_event',
'stream_stream_event',
])):
pass pass
@ -59,9 +64,9 @@ def unary_unary_inline(behavior):
Returns: Returns:
An face.MethodImplementation derived from the given behavior. An face.MethodImplementation derived from the given behavior.
""" """
return _MethodImplementation( return _MethodImplementation(cardinality.Cardinality.UNARY_UNARY,
cardinality.Cardinality.UNARY_UNARY, style.Service.INLINE, behavior, style.Service.INLINE, behavior, None, None,
None, None, None, None, None, None, None) None, None, None, None, None)
def unary_stream_inline(behavior): def unary_stream_inline(behavior):
@ -75,9 +80,9 @@ def unary_stream_inline(behavior):
Returns: Returns:
An face.MethodImplementation derived from the given behavior. An face.MethodImplementation derived from the given behavior.
""" """
return _MethodImplementation( return _MethodImplementation(cardinality.Cardinality.UNARY_STREAM,
cardinality.Cardinality.UNARY_STREAM, style.Service.INLINE, None, style.Service.INLINE, None, behavior, None,
behavior, None, None, None, None, None, None) None, None, None, None, None)
def stream_unary_inline(behavior): def stream_unary_inline(behavior):
@ -91,9 +96,9 @@ def stream_unary_inline(behavior):
Returns: Returns:
An face.MethodImplementation derived from the given behavior. An face.MethodImplementation derived from the given behavior.
""" """
return _MethodImplementation( return _MethodImplementation(cardinality.Cardinality.STREAM_UNARY,
cardinality.Cardinality.STREAM_UNARY, style.Service.INLINE, None, None, style.Service.INLINE, None, None, behavior,
behavior, None, None, None, None, None) None, None, None, None, None)
def stream_stream_inline(behavior): def stream_stream_inline(behavior):
@ -107,9 +112,9 @@ def stream_stream_inline(behavior):
Returns: Returns:
An face.MethodImplementation derived from the given behavior. An face.MethodImplementation derived from the given behavior.
""" """
return _MethodImplementation( return _MethodImplementation(cardinality.Cardinality.STREAM_STREAM,
cardinality.Cardinality.STREAM_STREAM, style.Service.INLINE, None, None, style.Service.INLINE, None, None, None,
None, behavior, None, None, None, None) behavior, None, None, None, None)
def unary_unary_event(behavior): def unary_unary_event(behavior):
@ -123,9 +128,9 @@ def unary_unary_event(behavior):
Returns: Returns:
An face.MethodImplementation derived from the given behavior. An face.MethodImplementation derived from the given behavior.
""" """
return _MethodImplementation( return _MethodImplementation(cardinality.Cardinality.UNARY_UNARY,
cardinality.Cardinality.UNARY_UNARY, style.Service.EVENT, None, None, style.Service.EVENT, None, None, None, None,
None, None, behavior, None, None, None) behavior, None, None, None)
def unary_stream_event(behavior): def unary_stream_event(behavior):
@ -139,9 +144,9 @@ def unary_stream_event(behavior):
Returns: Returns:
An face.MethodImplementation derived from the given behavior. An face.MethodImplementation derived from the given behavior.
""" """
return _MethodImplementation( return _MethodImplementation(cardinality.Cardinality.UNARY_STREAM,
cardinality.Cardinality.UNARY_STREAM, style.Service.EVENT, None, None, style.Service.EVENT, None, None, None, None,
None, None, None, behavior, None, None) None, behavior, None, None)
def stream_unary_event(behavior): def stream_unary_event(behavior):
@ -156,9 +161,9 @@ def stream_unary_event(behavior):
Returns: Returns:
An face.MethodImplementation derived from the given behavior. An face.MethodImplementation derived from the given behavior.
""" """
return _MethodImplementation( return _MethodImplementation(cardinality.Cardinality.STREAM_UNARY,
cardinality.Cardinality.STREAM_UNARY, style.Service.EVENT, None, None, style.Service.EVENT, None, None, None, None,
None, None, None, None, behavior, None) None, None, behavior, None)
def stream_stream_event(behavior): def stream_stream_event(behavior):
@ -173,6 +178,6 @@ def stream_stream_event(behavior):
Returns: Returns:
An face.MethodImplementation derived from the given behavior. An face.MethodImplementation derived from the given behavior.
""" """
return _MethodImplementation( return _MethodImplementation(cardinality.Cardinality.STREAM_STREAM,
cardinality.Cardinality.STREAM_STREAM, style.Service.EVENT, None, None, style.Service.EVENT, None, None, None, None,
None, None, None, None, None, behavior) None, None, None, behavior)

@ -27,7 +27,6 @@
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os import os
import os.path import os.path
import shutil import shutil
@ -38,7 +37,6 @@ from distutils import errors
import commands import commands
C_PYTHON_DEV = """ C_PYTHON_DEV = """
#include <Python.h> #include <Python.h>
int main(int argc, char **argv) { return 0; } int main(int argc, char **argv) { return 0; }
@ -55,9 +53,8 @@ Could not find <Python.h>. This could mean the following:
(check your environment variables or try re-installing?) (check your environment variables or try re-installing?)
""" """
C_CHECKS = { C_CHECKS = {C_PYTHON_DEV: C_PYTHON_DEV_ERROR_MESSAGE,}
C_PYTHON_DEV: C_PYTHON_DEV_ERROR_MESSAGE,
}
def _compile(compiler, source_string): def _compile(compiler, source_string):
tempdir = tempfile.mkdtemp() tempdir = tempfile.mkdtemp()
@ -71,6 +68,7 @@ def _compile(compiler, source_string):
finally: finally:
shutil.rmtree(tempdir) shutil.rmtree(tempdir)
def _expect_compile(compiler, source_string, error_message): def _expect_compile(compiler, source_string, error_message):
if _compile(compiler, source_string) is not None: if _compile(compiler, source_string) is not None:
sys.stderr.write(error_message) sys.stderr.write(error_message)
@ -78,6 +76,7 @@ def _expect_compile(compiler, source_string, error_message):
"Diagnostics found a compilation environment issue:\n{}" "Diagnostics found a compilation environment issue:\n{}"
.format(error_message)) .format(error_message))
def diagnose_compile_error(build_ext, error): def diagnose_compile_error(build_ext, error):
"""Attempt to diagnose an error during compilation.""" """Attempt to diagnose an error during compilation."""
for c_check, message in C_CHECKS.items(): for c_check, message in C_CHECKS.items():
@ -88,17 +87,16 @@ def diagnose_compile_error(build_ext, error):
] ]
for source in python_sources: for source in python_sources:
if not os.path.isfile(source): if not os.path.isfile(source):
raise commands.CommandError( raise commands.CommandError((
("Diagnostics found a missing Python extension source file:\n{}\n\n" "Diagnostics found a missing Python extension source file:\n{}\n\n"
"This is usually because the Cython sources haven't been transpiled " "This is usually because the Cython sources haven't been transpiled "
"into C yet and you're building from source.\n" "into C yet and you're building from source.\n"
"Try setting the environment variable " "Try setting the environment variable "
"`GRPC_PYTHON_BUILD_WITH_CYTHON=1` when invoking `setup.py` or " "`GRPC_PYTHON_BUILD_WITH_CYTHON=1` when invoking `setup.py` or "
"when using `pip`, e.g.:\n\n" "when using `pip`, e.g.:\n\n"
"pip install -rrequirements.txt\n" "pip install -rrequirements.txt\n"
"GRPC_PYTHON_BUILD_WITH_CYTHON=1 pip install .") "GRPC_PYTHON_BUILD_WITH_CYTHON=1 pip install .").format(source))
.format(source)
)
def diagnose_attribute_error(build_ext, error): def diagnose_attribute_error(build_ext, error):
if any('_needs_stub' in arg for arg in error.args): if any('_needs_stub' in arg for arg in error.args):
@ -106,11 +104,13 @@ def diagnose_attribute_error(build_ext, error):
"We expect a missing `_needs_stub` attribute from older versions of " "We expect a missing `_needs_stub` attribute from older versions of "
"setuptools. Consider upgrading setuptools.") "setuptools. Consider upgrading setuptools.")
_ERROR_DIAGNOSES = { _ERROR_DIAGNOSES = {
errors.CompileError: diagnose_compile_error, errors.CompileError: diagnose_compile_error,
AttributeError: diagnose_attribute_error AttributeError: diagnose_attribute_error
} }
def diagnose_build_ext_error(build_ext, error, formatted): def diagnose_build_ext_error(build_ext, error, formatted):
diagnostic = _ERROR_DIAGNOSES.get(type(error)) diagnostic = _ERROR_DIAGNOSES.get(type(error))
if diagnostic is None: if diagnostic is None:
@ -120,4 +120,3 @@ def diagnose_build_ext_error(build_ext, error, formatted):
"\n\n{}".format(formatted)) "\n\n{}".format(formatted))
else: else:
diagnostic(build_ext, error) diagnostic(build_ext, error)

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Reference implementation for health checking in gRPC Python.""" """Reference implementation for health checking in gRPC Python."""
import threading import threading

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Provides distutils command classes for the GRPC Python setup process.""" """Provides distutils command classes for the GRPC Python setup process."""
import os import os

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Setup module for the GRPC Python package's optional health checking.""" """Setup module for the GRPC Python package's optional health checking."""
import os import os
@ -41,18 +40,14 @@ os.chdir(os.path.dirname(os.path.abspath(__file__)))
import health_commands import health_commands
import grpc_version import grpc_version
PACKAGE_DIRECTORIES = { PACKAGE_DIRECTORIES = {'': '.',}
'': '.',
}
SETUP_REQUIRES = ( SETUP_REQUIRES = (
'grpcio-tools>={version}'.format(version=grpc_version.VERSION), 'grpcio-tools>={version}'.format(version=grpc_version.VERSION),)
)
INSTALL_REQUIRES = ( INSTALL_REQUIRES = (
'protobuf>=3.0.0', 'protobuf>=3.0.0',
'grpcio>={version}'.format(version=grpc_version.VERSION), 'grpcio>={version}'.format(version=grpc_version.VERSION),)
)
COMMAND_CLASS = { COMMAND_CLASS = {
# Run preprocess from the repository *before* doing any packaging! # Run preprocess from the repository *before* doing any packaging!
@ -68,5 +63,4 @@ setuptools.setup(
packages=setuptools.find_packages('.'), packages=setuptools.find_packages('.'),
install_requires=INSTALL_REQUIRES, install_requires=INSTALL_REQUIRES,
setup_requires=SETUP_REQUIRES, setup_requires=SETUP_REQUIRES,
cmdclass=COMMAND_CLASS cmdclass=COMMAND_CLASS)
)

@ -26,4 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,4 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Reference implementation for reflection in gRPC Python.""" """Reference implementation for reflection in gRPC Python."""
import threading import threading
@ -39,13 +38,13 @@ from grpc_reflection.v1alpha import reflection_pb2
_POOL = descriptor_pool.Default() _POOL = descriptor_pool.Default()
def _not_found_error(): def _not_found_error():
return reflection_pb2.ServerReflectionResponse( return reflection_pb2.ServerReflectionResponse(
error_response=reflection_pb2.ErrorResponse( error_response=reflection_pb2.ErrorResponse(
error_code=grpc.StatusCode.NOT_FOUND.value[0], error_code=grpc.StatusCode.NOT_FOUND.value[0],
error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(), error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),))
)
)
def _file_descriptor_response(descriptor): def _file_descriptor_response(descriptor):
proto = descriptor_pb2.FileDescriptorProto() proto = descriptor_pb2.FileDescriptorProto()
@ -53,9 +52,7 @@ def _file_descriptor_response(descriptor):
serialized_proto = proto.SerializeToString() serialized_proto = proto.SerializeToString()
return reflection_pb2.ServerReflectionResponse( return reflection_pb2.ServerReflectionResponse(
file_descriptor_response=reflection_pb2.FileDescriptorResponse( file_descriptor_response=reflection_pb2.FileDescriptorResponse(
file_descriptor_proto=(serialized_proto,) file_descriptor_proto=(serialized_proto,)),)
),
)
class ReflectionServicer(reflection_pb2.ServerReflectionServicer): class ReflectionServicer(reflection_pb2.ServerReflectionServicer):
@ -80,7 +77,8 @@ class ReflectionServicer(reflection_pb2.ServerReflectionServicer):
def _file_containing_symbol(self, fully_qualified_name): def _file_containing_symbol(self, fully_qualified_name):
try: try:
descriptor = self._pool.FindFileContainingSymbol(fully_qualified_name) descriptor = self._pool.FindFileContainingSymbol(
fully_qualified_name)
except KeyError: except KeyError:
return _not_found_error() return _not_found_error()
else: else:
@ -92,9 +90,7 @@ class ReflectionServicer(reflection_pb2.ServerReflectionServicer):
return reflection_pb2.ServerReflectionResponse( return reflection_pb2.ServerReflectionResponse(
error_response=reflection_pb2.ErrorResponse( error_response=reflection_pb2.ErrorResponse(
error_code=grpc.StatusCode.UNIMPLEMENTED.value[0], error_code=grpc.StatusCode.UNIMPLEMENTED.value[0],
error_message=grpc.StatusCode.UNIMPLMENTED.value[1].encode(), error_message=grpc.StatusCode.UNIMPLMENTED.value[1].encode(),))
)
)
def _extension_numbers_of_type(fully_qualified_name): def _extension_numbers_of_type(fully_qualified_name):
# TODO(atash) We're allowed to leave this unsupported according to the # TODO(atash) We're allowed to leave this unsupported according to the
@ -104,26 +100,22 @@ class ReflectionServicer(reflection_pb2.ServerReflectionServicer):
return reflection_pb2.ServerReflectionResponse( return reflection_pb2.ServerReflectionResponse(
error_response=reflection_pb2.ErrorResponse( error_response=reflection_pb2.ErrorResponse(
error_code=grpc.StatusCode.UNIMPLEMENTED.value[0], error_code=grpc.StatusCode.UNIMPLEMENTED.value[0],
error_message=grpc.StatusCode.UNIMPLMENTED.value[1].encode(), error_message=grpc.StatusCode.UNIMPLMENTED.value[1].encode(),))
)
)
def _list_services(self): def _list_services(self):
return reflection_pb2.ServerReflectionResponse( return reflection_pb2.ServerReflectionResponse(
list_services_response=reflection_pb2.ListServiceResponse( list_services_response=reflection_pb2.ListServiceResponse(service=[
service=[
reflection_pb2.ServiceResponse(name=service_name) reflection_pb2.ServiceResponse(name=service_name)
for service_name in self._service_names for service_name in self._service_names
] ]))
)
)
def ServerReflectionInfo(self, request_iterator, context): def ServerReflectionInfo(self, request_iterator, context):
for request in request_iterator: for request in request_iterator:
if request.HasField('file_by_filename'): if request.HasField('file_by_filename'):
yield self._file_by_filename(request.file_by_filename) yield self._file_by_filename(request.file_by_filename)
elif request.HasField('file_containing_symbol'): elif request.HasField('file_containing_symbol'):
yield self._file_containing_symbol(request.file_containing_symbol) yield self._file_containing_symbol(
request.file_containing_symbol)
elif request.HasField('file_containing_extension'): elif request.HasField('file_containing_extension'):
yield self._file_containing_extension( yield self._file_containing_extension(
request.file_containing_extension.containing_type, request.file_containing_extension.containing_type,
@ -137,7 +129,5 @@ class ReflectionServicer(reflection_pb2.ServerReflectionServicer):
yield reflection_pb2.ServerReflectionResponse( yield reflection_pb2.ServerReflectionResponse(
error_response=reflection_pb2.ErrorResponse( error_response=reflection_pb2.ErrorResponse(
error_code=grpc.StatusCode.INVALID_ARGUMENT.value[0], error_code=grpc.StatusCode.INVALID_ARGUMENT.value[0],
error_message=grpc.StatusCode.INVALID_ARGUMENT.value[1].encode(), error_message=grpc.StatusCode.INVALID_ARGUMENT.value[1]
) .encode(),))
)

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Provides distutils command classes for the GRPC Python setup process.""" """Provides distutils command classes for the GRPC Python setup process."""
import os import os
@ -35,7 +34,8 @@ import shutil
import setuptools import setuptools
ROOT_DIR = os.path.abspath(os.path.dirname(os.path.abspath(__file__))) ROOT_DIR = os.path.abspath(os.path.dirname(os.path.abspath(__file__)))
HEALTH_PROTO = os.path.join(ROOT_DIR, '../../proto/grpc/reflection/v1alpha/reflection.proto') HEALTH_PROTO = os.path.join(
ROOT_DIR, '../../proto/grpc/reflection/v1alpha/reflection.proto')
class CopyProtoModules(setuptools.Command): class CopyProtoModules(setuptools.Command):
@ -54,7 +54,8 @@ class CopyProtoModules(setuptools.Command):
if os.path.isfile(HEALTH_PROTO): if os.path.isfile(HEALTH_PROTO):
shutil.copyfile( shutil.copyfile(
HEALTH_PROTO, HEALTH_PROTO,
os.path.join(ROOT_DIR, 'grpc_reflection/v1alpha/reflection.proto')) os.path.join(ROOT_DIR,
'grpc_reflection/v1alpha/reflection.proto'))
class BuildPackageProtos(setuptools.Command): class BuildPackageProtos(setuptools.Command):

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Setup module for the GRPC Python package's optional reflection.""" """Setup module for the GRPC Python package's optional reflection."""
import os import os
@ -41,18 +40,14 @@ os.chdir(os.path.dirname(os.path.abspath(__file__)))
import reflection_commands import reflection_commands
import grpc_version import grpc_version
PACKAGE_DIRECTORIES = { PACKAGE_DIRECTORIES = {'': '.',}
'': '.',
}
SETUP_REQUIRES = ( SETUP_REQUIRES = (
'grpcio-tools>={version}'.format(version=grpc_version.VERSION), 'grpcio-tools>={version}'.format(version=grpc_version.VERSION),)
)
INSTALL_REQUIRES = ( INSTALL_REQUIRES = (
'protobuf>=3.0.0', 'protobuf>=3.0.0',
'grpcio>={version}'.format(version=grpc_version.VERSION), 'grpcio>={version}'.format(version=grpc_version.VERSION),)
)
COMMAND_CLASS = { COMMAND_CLASS = {
# Run preprocess from the repository *before* doing any packaging! # Run preprocess from the repository *before* doing any packaging!
@ -68,5 +63,4 @@ setuptools.setup(
packages=setuptools.find_packages('.'), packages=setuptools.find_packages('.'),
install_requires=INSTALL_REQUIRES, install_requires=INSTALL_REQUIRES,
setup_requires=SETUP_REQUIRES, setup_requires=SETUP_REQUIRES,
cmdclass=COMMAND_CLASS cmdclass=COMMAND_CLASS)
)

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Provides distutils command classes for the gRPC Python setup process.""" """Provides distutils command classes for the gRPC Python setup process."""
import distutils import distutils
@ -122,8 +121,7 @@ class BuildProtoModules(setuptools.Command):
'--grpc_python_out={}'.format(PROTO_STEM), '--grpc_python_out={}'.format(PROTO_STEM),
] + [path] ] + [path]
if protoc.main(command) != 0: if protoc.main(command) != 0:
sys.stderr.write( sys.stderr.write('warning: Command:\n{}\nFailed'.format(
'warning: Command:\n{}\nFailed'.format(
command)) command))
# Generated proto directories dont include __init__.py, but # Generated proto directories dont include __init__.py, but
@ -177,11 +175,9 @@ class TestLite(setuptools.Command):
class RunInterop(test.test): class RunInterop(test.test):
description = 'run interop test client/server' description = 'run interop test client/server'
user_options = [ user_options = [('args=', 'a', 'pass-thru arguments for the client/server'),
('args=', 'a', 'pass-thru arguments for the client/server'),
('client', 'c', 'flag indicating to run the client'), ('client', 'c', 'flag indicating to run the client'),
('server', 's', 'flag indicating to run the server') ('server', 's', 'flag indicating to run the server')]
]
def initialize_options(self): def initialize_options(self):
self.args = '' self.args = ''
@ -190,11 +186,13 @@ class RunInterop(test.test):
def finalize_options(self): def finalize_options(self):
if self.client and self.server: if self.client and self.server:
raise DistutilsOptionError('you may only specify one of client or server') raise DistutilsOptionError(
'you may only specify one of client or server')
def run(self): def run(self):
if self.distribution.install_requires: if self.distribution.install_requires:
self.distribution.fetch_build_eggs(self.distribution.install_requires) self.distribution.fetch_build_eggs(
self.distribution.install_requires)
if self.distribution.tests_require: if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require) self.distribution.fetch_build_eggs(self.distribution.tests_require)
if self.client: if self.client:

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A setup module for the gRPC Python package.""" """A setup module for the gRPC Python package."""
import os import os
@ -48,9 +47,7 @@ import grpc_version
LICENSE = '3-clause BSD' LICENSE = '3-clause BSD'
PACKAGE_DIRECTORIES = { PACKAGE_DIRECTORIES = {'': '.',}
'': '.',
}
INSTALL_REQUIRES = ( INSTALL_REQUIRES = (
'coverage>=4.0', 'coverage>=4.0',
@ -61,13 +58,11 @@ INSTALL_REQUIRES = (
'grpcio-health-checking>={version}'.format(version=grpc_version.VERSION), 'grpcio-health-checking>={version}'.format(version=grpc_version.VERSION),
'oauth2client>=1.4.7', 'oauth2client>=1.4.7',
'protobuf>=3.0.0', 'protobuf>=3.0.0',
'six>=1.10', 'six>=1.10',)
)
COMMAND_CLASS = { COMMAND_CLASS = {
# Run `preprocess` *before* doing any packaging! # Run `preprocess` *before* doing any packaging!
'preprocess': commands.GatherProto, 'preprocess': commands.GatherProto,
'build_package_protos': grpc_tools.command.BuildPackageProtos, 'build_package_protos': grpc_tools.command.BuildPackageProtos,
'build_py': commands.BuildPy, 'build_py': commands.BuildPy,
'run_interop': commands.RunInterop, 'run_interop': commands.RunInterop,
@ -80,9 +75,7 @@ PACKAGE_DATA = {
'credentials/server1.key', 'credentials/server1.key',
'credentials/server1.pem', 'credentials/server1.pem',
], ],
'tests.protoc_plugin.protos.invocation_testing': [ 'tests.protoc_plugin.protos.invocation_testing': ['same.proto',],
'same.proto',
],
'tests.protoc_plugin.protos.invocation_testing.split_messages': [ 'tests.protoc_plugin.protos.invocation_testing.split_messages': [
'messages.proto', 'messages.proto',
], ],
@ -94,9 +87,7 @@ PACKAGE_DATA = {
'credentials/server1.key', 'credentials/server1.key',
'credentials/server1.pem', 'credentials/server1.pem',
], ],
'tests': [ 'tests': ['tests.json'],
'tests.json'
],
} }
TEST_SUITE = 'tests' TEST_SUITE = 'tests'
@ -118,5 +109,4 @@ setuptools.setup(
tests_require=TESTS_REQUIRE, tests_require=TESTS_REQUIRE,
test_suite=TEST_SUITE, test_suite=TEST_SUITE,
test_loader=TEST_LOADER, test_loader=TEST_LOADER,
test_runner=TEST_RUNNER, test_runner=TEST_RUNNER,)
)

@ -116,4 +116,5 @@ def iterate_suite_cases(suite):
elif isinstance(item, unittest.TestCase): elif isinstance(item, unittest.TestCase):
yield item yield item
else: else:
raise ValueError('unexpected suite item of type {}'.format(type(item))) raise ValueError('unexpected suite item of type {}'.format(
type(item)))

@ -41,8 +41,10 @@ from six import moves
from tests import _loader from tests import _loader
class CaseResult(collections.namedtuple('CaseResult', [ class CaseResult(
'id', 'name', 'kind', 'stdout', 'stderr', 'skip_reason', 'traceback'])): collections.namedtuple('CaseResult', [
'id', 'name', 'kind', 'stdout', 'stderr', 'skip_reason', 'traceback'
])):
"""A serializable result of a single test case. """A serializable result of a single test case.
Attributes: Attributes:
@ -69,8 +71,14 @@ class CaseResult(collections.namedtuple('CaseResult', [
EXPECTED_FAILURE = 'expected failure' EXPECTED_FAILURE = 'expected failure'
UNEXPECTED_SUCCESS = 'unexpected success' UNEXPECTED_SUCCESS = 'unexpected success'
def __new__(cls, id=None, name=None, kind=None, stdout=None, stderr=None, def __new__(cls,
skip_reason=None, traceback=None): id=None,
name=None,
kind=None,
stdout=None,
stderr=None,
skip_reason=None,
traceback=None):
"""Helper keyword constructor for the namedtuple. """Helper keyword constructor for the namedtuple.
See this class' attributes for information on the arguments.""" See this class' attributes for information on the arguments."""
@ -94,11 +102,16 @@ class CaseResult(collections.namedtuple('CaseResult', [
pass pass
else: else:
assert False assert False
return super(cls, CaseResult).__new__( return super(cls, CaseResult).__new__(cls, id, name, kind, stdout,
cls, id, name, kind, stdout, stderr, skip_reason, traceback) stderr, skip_reason, traceback)
def updated(self, name=None, kind=None, stdout=None, stderr=None, def updated(self,
skip_reason=None, traceback=None): name=None,
kind=None,
stdout=None,
stderr=None,
skip_reason=None,
traceback=None):
"""Get a new validated CaseResult with the fields updated. """Get a new validated CaseResult with the fields updated.
See this class' attributes for information on the arguments.""" See this class' attributes for information on the arguments."""
@ -108,8 +121,13 @@ class CaseResult(collections.namedtuple('CaseResult', [
stderr = self.stderr if stderr is None else stderr stderr = self.stderr if stderr is None else stderr
skip_reason = self.skip_reason if skip_reason is None else skip_reason skip_reason = self.skip_reason if skip_reason is None else skip_reason
traceback = self.traceback if traceback is None else traceback traceback = self.traceback if traceback is None else traceback
return CaseResult(id=self.id, name=name, kind=kind, stdout=stdout, return CaseResult(
stderr=stderr, skip_reason=skip_reason, id=self.id,
name=name,
kind=kind,
stdout=stdout,
stderr=stderr,
skip_reason=skip_reason,
traceback=traceback) traceback=traceback)
@ -282,9 +300,7 @@ class TerminalResult(CoverageResult):
def startTestRun(self): def startTestRun(self):
"""See unittest.TestResult.startTestRun.""" """See unittest.TestResult.startTestRun."""
super(TerminalResult, self).startTestRun() super(TerminalResult, self).startTestRun()
self.out.write( self.out.write(_Colors.HEADER + 'Testing gRPC Python...\n' +
_Colors.HEADER +
'Testing gRPC Python...\n' +
_Colors.END) _Colors.END)
def stopTestRun(self): def stopTestRun(self):
@ -296,57 +312,46 @@ class TerminalResult(CoverageResult):
def addError(self, test, error): def addError(self, test, error):
"""See unittest.TestResult.addError.""" """See unittest.TestResult.addError."""
super(TerminalResult, self).addError(test, error) super(TerminalResult, self).addError(test, error)
self.out.write( self.out.write(_Colors.FAIL + 'ERROR {}\n'.format(test.id()) +
_Colors.FAIL +
'ERROR {}\n'.format(test.id()) +
_Colors.END) _Colors.END)
self.out.flush() self.out.flush()
def addFailure(self, test, error): def addFailure(self, test, error):
"""See unittest.TestResult.addFailure.""" """See unittest.TestResult.addFailure."""
super(TerminalResult, self).addFailure(test, error) super(TerminalResult, self).addFailure(test, error)
self.out.write( self.out.write(_Colors.FAIL + 'FAILURE {}\n'.format(test.id()) +
_Colors.FAIL +
'FAILURE {}\n'.format(test.id()) +
_Colors.END) _Colors.END)
self.out.flush() self.out.flush()
def addSuccess(self, test): def addSuccess(self, test):
"""See unittest.TestResult.addSuccess.""" """See unittest.TestResult.addSuccess."""
super(TerminalResult, self).addSuccess(test) super(TerminalResult, self).addSuccess(test)
self.out.write( self.out.write(_Colors.OK + 'SUCCESS {}\n'.format(test.id()) +
_Colors.OK +
'SUCCESS {}\n'.format(test.id()) +
_Colors.END) _Colors.END)
self.out.flush() self.out.flush()
def addSkip(self, test, reason): def addSkip(self, test, reason):
"""See unittest.TestResult.addSkip.""" """See unittest.TestResult.addSkip."""
super(TerminalResult, self).addSkip(test, reason) super(TerminalResult, self).addSkip(test, reason)
self.out.write( self.out.write(_Colors.INFO + 'SKIP {}\n'.format(test.id()) +
_Colors.INFO +
'SKIP {}\n'.format(test.id()) +
_Colors.END) _Colors.END)
self.out.flush() self.out.flush()
def addExpectedFailure(self, test, error): def addExpectedFailure(self, test, error):
"""See unittest.TestResult.addExpectedFailure.""" """See unittest.TestResult.addExpectedFailure."""
super(TerminalResult, self).addExpectedFailure(test, error) super(TerminalResult, self).addExpectedFailure(test, error)
self.out.write( self.out.write(_Colors.INFO + 'FAILURE_OK {}\n'.format(test.id()) +
_Colors.INFO +
'FAILURE_OK {}\n'.format(test.id()) +
_Colors.END) _Colors.END)
self.out.flush() self.out.flush()
def addUnexpectedSuccess(self, test): def addUnexpectedSuccess(self, test):
"""See unittest.TestResult.addUnexpectedSuccess.""" """See unittest.TestResult.addUnexpectedSuccess."""
super(TerminalResult, self).addUnexpectedSuccess(test) super(TerminalResult, self).addUnexpectedSuccess(test)
self.out.write( self.out.write(_Colors.INFO + 'UNEXPECTED_OK {}\n'.format(test.id()) +
_Colors.INFO +
'UNEXPECTED_OK {}\n'.format(test.id()) +
_Colors.END) _Colors.END)
self.out.flush() self.out.flush()
def _traceback_string(type, value, trace): def _traceback_string(type, value, trace):
"""Generate a descriptive string of a Python exception traceback. """Generate a descriptive string of a Python exception traceback.
@ -362,6 +367,7 @@ def _traceback_string(type, value, trace):
traceback.print_exception(type, value, trace, file=buffer) traceback.print_exception(type, value, trace, file=buffer)
return buffer.getvalue() return buffer.getvalue()
def summary(result): def summary(result):
"""A summary string of a result object. """A summary string of a result object.
@ -372,56 +378,62 @@ def summary(result):
str: The summary string. str: The summary string.
""" """
assert isinstance(result, AugmentedResult) assert isinstance(result, AugmentedResult)
untested = list(result.augmented_results( untested = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.UNTESTED)) lambda case_result: case_result.kind is CaseResult.Kind.UNTESTED))
running = list(result.augmented_results( running = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.RUNNING)) lambda case_result: case_result.kind is CaseResult.Kind.RUNNING))
failures = list(result.augmented_results( failures = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.FAILURE)) lambda case_result: case_result.kind is CaseResult.Kind.FAILURE))
errors = list(result.augmented_results( errors = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.ERROR)) lambda case_result: case_result.kind is CaseResult.Kind.ERROR))
successes = list(result.augmented_results( successes = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.SUCCESS)) lambda case_result: case_result.kind is CaseResult.Kind.SUCCESS))
skips = list(result.augmented_results( skips = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.SKIP)) lambda case_result: case_result.kind is CaseResult.Kind.SKIP))
expected_failures = list(result.augmented_results( expected_failures = list(
lambda case_result: case_result.kind is CaseResult.Kind.EXPECTED_FAILURE)) result.augmented_results(
unexpected_successes = list(result.augmented_results( lambda case_result: case_result.kind is CaseResult.Kind.EXPECTED_FAILURE
lambda case_result: case_result.kind is CaseResult.Kind.UNEXPECTED_SUCCESS)) ))
unexpected_successes = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.UNEXPECTED_SUCCESS
))
running_names = [case.name for case in running] running_names = [case.name for case in running]
finished_count = (len(failures) + len(errors) + len(successes) + finished_count = (len(failures) + len(errors) + len(successes) +
len(expected_failures) + len(unexpected_successes)) len(expected_failures) + len(unexpected_successes))
statistics = ( statistics = ('{finished} tests finished:\n'
'{finished} tests finished:\n'
'\t{successful} successful\n' '\t{successful} successful\n'
'\t{unsuccessful} unsuccessful\n' '\t{unsuccessful} unsuccessful\n'
'\t{skipped} skipped\n' '\t{skipped} skipped\n'
'\t{expected_fail} expected failures\n' '\t{expected_fail} expected failures\n'
'\t{unexpected_successful} unexpected successes\n' '\t{unexpected_successful} unexpected successes\n'
'Interrupted Tests:\n' 'Interrupted Tests:\n'
'\t{interrupted}\n' '\t{interrupted}\n'.format(
.format(finished=finished_count, finished=finished_count,
successful=len(successes), successful=len(successes),
unsuccessful=(len(failures)+len(errors)), unsuccessful=(len(failures) + len(errors)),
skipped=len(skips), skipped=len(skips),
expected_fail=len(expected_failures), expected_fail=len(expected_failures),
unexpected_successful=len(unexpected_successes), unexpected_successful=len(unexpected_successes),
interrupted=str(running_names))) interrupted=str(running_names)))
tracebacks = '\n\n'.join([ tracebacks = '\n\n'.join(
(_Colors.FAIL + '{test_name}' + _Colors.END + '\n' + [(_Colors.FAIL + '{test_name}' + _Colors.END + '\n' + _Colors.BOLD +
_Colors.BOLD + 'traceback:' + _Colors.END + '\n' + 'traceback:' + _Colors.END + '\n' + '{traceback}\n' + _Colors.BOLD +
'{traceback}\n' + 'stdout:' + _Colors.END + '\n' + '{stdout}\n' + _Colors.BOLD +
_Colors.BOLD + 'stdout:' + _Colors.END + '\n' + 'stderr:' + _Colors.END + '\n' + '{stderr}\n').format(
'{stdout}\n' +
_Colors.BOLD + 'stderr:' + _Colors.END + '\n' +
'{stderr}\n').format(
test_name=result.name, test_name=result.name,
traceback=_traceback_string(*result.traceback), traceback=_traceback_string(*result.traceback),
stdout=result.stdout, stderr=result.stderr) stdout=result.stdout,
for result in itertools.chain(failures, errors) stderr=result.stderr)
]) for result in itertools.chain(failures, errors)])
notes = 'Unexpected successes: {}\n'.format([ notes = 'Unexpected successes: {}\n'.format(
result.name for result in unexpected_successes]) [result.name for result in unexpected_successes])
return statistics + '\nErrors/Failures: \n' + tracebacks + '\n' + notes return statistics + '\nErrors/Failures: \n' + tracebacks + '\n' + notes
@ -441,9 +453,7 @@ def jenkins_junit_xml(result):
}) })
for case in result.cases.values(): for case in result.cases.values():
if case.kind is CaseResult.Kind.SUCCESS: if case.kind is CaseResult.Kind.SUCCESS:
ElementTree.SubElement(suite, 'testcase', { ElementTree.SubElement(suite, 'testcase', {'name': case.name,})
'name': case.name,
})
elif case.kind in (CaseResult.Kind.ERROR, CaseResult.Kind.FAILURE): elif case.kind in (CaseResult.Kind.ERROR, CaseResult.Kind.FAILURE):
case_xml = ElementTree.SubElement(suite, 'testcase', { case_xml = ElementTree.SubElement(suite, 'testcase', {
'name': case.name, 'name': case.name,

@ -114,8 +114,7 @@ class CaptureFile(object):
os.close(self._saved_fd) os.close(self._saved_fd)
class AugmentedCase(collections.namedtuple('AugmentedCase', [ class AugmentedCase(collections.namedtuple('AugmentedCase', ['case', 'id'])):
'case', 'id'])):
"""A test case with a guaranteed unique externally specified identifier. """A test case with a guaranteed unique externally specified identifier.
Attributes: Attributes:
@ -144,8 +143,9 @@ class Runner(object):
# Ensure that every test case has no collision with any other test case in # Ensure that every test case has no collision with any other test case in
# the augmented results. # the augmented results.
augmented_cases = [AugmentedCase(case, uuid.uuid4()) augmented_cases = [
for case in filtered_cases] AugmentedCase(case, uuid.uuid4()) for case in filtered_cases
]
case_id_by_case = dict((augmented_case.case, augmented_case.id) case_id_by_case = dict((augmented_case.case, augmented_case.id)
for augmented_case in augmented_cases) for augmented_case in augmented_cases)
result_out = moves.cStringIO() result_out = moves.cStringIO()
@ -162,9 +162,8 @@ class Runner(object):
def fault_handler(signal_number, frame): def fault_handler(signal_number, frame):
stdout_pipe.write_bypass( stdout_pipe.write_bypass(
'Received fault signal {}\nstdout:\n{}\n\nstderr:{}\n' 'Received fault signal {}\nstdout:\n{}\n\nstderr:{}\n'.format(
.format(signal_number, stdout_pipe.output(), signal_number, stdout_pipe.output(), stderr_pipe.output()))
stderr_pipe.output()))
os._exit(1) os._exit(1)
def check_kill_self(): def check_kill_self():
@ -172,16 +171,18 @@ class Runner(object):
stdout_pipe.write_bypass('Stopping tests short...') stdout_pipe.write_bypass('Stopping tests short...')
result.stopTestRun() result.stopTestRun()
stdout_pipe.write_bypass(result_out.getvalue()) stdout_pipe.write_bypass(result_out.getvalue())
stdout_pipe.write_bypass( stdout_pipe.write_bypass('\ninterrupted stdout:\n{}\n'.format(
'\ninterrupted stdout:\n{}\n'.format(stdout_pipe.output().decode())) stdout_pipe.output().decode()))
stderr_pipe.write_bypass( stderr_pipe.write_bypass('\ninterrupted stderr:\n{}\n'.format(
'\ninterrupted stderr:\n{}\n'.format(stderr_pipe.output().decode())) stderr_pipe.output().decode()))
os._exit(1) os._exit(1)
def try_set_handler(name, handler): def try_set_handler(name, handler):
try: try:
signal.signal(getattr(signal, name), handler) signal.signal(getattr(signal, name), handler)
except AttributeError: except AttributeError:
pass pass
try_set_handler('SIGINT', sigint_handler) try_set_handler('SIGINT', sigint_handler)
try_set_handler('SIGSEGV', fault_handler) try_set_handler('SIGSEGV', fault_handler)
try_set_handler('SIGBUS', fault_handler) try_set_handler('SIGBUS', fault_handler)
@ -195,7 +196,8 @@ class Runner(object):
# Run the tests # Run the tests
result.startTestRun() result.startTestRun()
for augmented_case in augmented_cases: for augmented_case in augmented_cases:
sys.stdout.write('Running {}\n'.format(augmented_case.case.id())) sys.stdout.write('Running {}\n'.format(augmented_case.case.id(
)))
sys.stdout.flush() sys.stdout.flush()
case_thread = threading.Thread( case_thread = threading.Thread(
target=augmented_case.case.run, args=(result,)) target=augmented_case.case.run, args=(result,))
@ -209,8 +211,8 @@ class Runner(object):
except: except:
# re-raise the exception after forcing the with-block to end # re-raise the exception after forcing the with-block to end
raise raise
result.set_output( result.set_output(augmented_case.case,
augmented_case.case, stdout_pipe.output(), stderr_pipe.output()) stdout_pipe.output(), stderr_pipe.output())
sys.stdout.write(result_out.getvalue()) sys.stdout.write(result_out.getvalue())
sys.stdout.flush() sys.stdout.flush()
result_out.truncate(0) result_out.truncate(0)
@ -226,4 +228,3 @@ class Runner(object):
with open('report.xml', 'wb') as report_xml_file: with open('report.xml', 'wb') as report_xml_file:
_result.jenkins_junit_xml(result).write(report_xml_file) _result.jenkins_junit_xml(result).write(report_xml_file)
return result return result

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests of grpc_health.v1.health.""" """Tests of grpc_health.v1.health."""
import unittest import unittest
@ -80,11 +79,11 @@ class HealthServicerTest(unittest.TestCase):
request = health_pb2.HealthCheckRequest( request = health_pb2.HealthCheckRequest(
service='grpc.test.TestServiceNotServing') service='grpc.test.TestServiceNotServing')
resp = self._stub.Check(request) resp = self._stub.Check(request)
self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING, resp.status) self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING,
resp.status)
def test_not_found_service(self): def test_not_found_service(self):
request = health_pb2.HealthCheckRequest( request = health_pb2.HealthCheckRequest(service='not-found')
service='not-found')
with self.assertRaises(grpc.RpcError) as context: with self.assertRaises(grpc.RpcError) as context:
resp = self._stub.Check(request) resp = self._stub.Check(request)

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""The Python client used to test negative http2 conditions.""" """The Python client used to test negative http2 conditions."""
import argparse import argparse
@ -35,30 +34,33 @@ import grpc
from src.proto.grpc.testing import test_pb2 from src.proto.grpc.testing import test_pb2
from src.proto.grpc.testing import messages_pb2 from src.proto.grpc.testing import messages_pb2
def _validate_payload_type_and_length(response, expected_type, expected_length): def _validate_payload_type_and_length(response, expected_type, expected_length):
if response.payload.type is not expected_type: if response.payload.type is not expected_type:
raise ValueError( raise ValueError('expected payload type %s, got %s' %
'expected payload type %s, got %s' %
(expected_type, type(response.payload.type))) (expected_type, type(response.payload.type)))
elif len(response.payload.body) != expected_length: elif len(response.payload.body) != expected_length:
raise ValueError( raise ValueError('expected payload body size %d, got %d' %
'expected payload body size %d, got %d' %
(expected_length, len(response.payload.body))) (expected_length, len(response.payload.body)))
def _expect_status_code(call, expected_code): def _expect_status_code(call, expected_code):
if call.code() != expected_code: if call.code() != expected_code:
raise ValueError( raise ValueError('expected code %s, got %s' %
'expected code %s, got %s' % (expected_code, call.code())) (expected_code, call.code()))
def _expect_status_details(call, expected_details): def _expect_status_details(call, expected_details):
if call.details() != expected_details: if call.details() != expected_details:
raise ValueError( raise ValueError('expected message %s, got %s' %
'expected message %s, got %s' % (expected_details, call.details())) (expected_details, call.details()))
def _validate_status_code_and_details(call, expected_code, expected_details): def _validate_status_code_and_details(call, expected_code, expected_details):
_expect_status_code(call, expected_code) _expect_status_code(call, expected_code)
_expect_status_details(call, expected_details) _expect_status_details(call, expected_details)
# common requests # common requests
_REQUEST_SIZE = 314159 _REQUEST_SIZE = 314159
_RESPONSE_SIZE = 271828 _RESPONSE_SIZE = 271828
@ -68,46 +70,54 @@ _SIMPLE_REQUEST = messages_pb2.SimpleRequest(
response_size=_RESPONSE_SIZE, response_size=_RESPONSE_SIZE,
payload=messages_pb2.Payload(body=b'\x00' * _REQUEST_SIZE)) payload=messages_pb2.Payload(body=b'\x00' * _REQUEST_SIZE))
def _goaway(stub): def _goaway(stub):
first_response = stub.UnaryCall(_SIMPLE_REQUEST) first_response = stub.UnaryCall(_SIMPLE_REQUEST)
_validate_payload_type_and_length(first_response, _validate_payload_type_and_length(first_response, messages_pb2.COMPRESSABLE,
messages_pb2.COMPRESSABLE, _RESPONSE_SIZE) _RESPONSE_SIZE)
second_response = stub.UnaryCall(_SIMPLE_REQUEST) second_response = stub.UnaryCall(_SIMPLE_REQUEST)
_validate_payload_type_and_length(second_response, _validate_payload_type_and_length(second_response,
messages_pb2.COMPRESSABLE, _RESPONSE_SIZE) messages_pb2.COMPRESSABLE, _RESPONSE_SIZE)
def _rst_after_header(stub): def _rst_after_header(stub):
resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST) resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST)
_validate_status_code_and_details(resp_future, grpc.StatusCode.UNAVAILABLE, "") _validate_status_code_and_details(resp_future, grpc.StatusCode.UNAVAILABLE,
"")
def _rst_during_data(stub): def _rst_during_data(stub):
resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST) resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST)
_validate_status_code_and_details(resp_future, grpc.StatusCode.UNKNOWN, "") _validate_status_code_and_details(resp_future, grpc.StatusCode.UNKNOWN, "")
def _rst_after_data(stub): def _rst_after_data(stub):
resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST) resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST)
_validate_payload_type_and_length(next(resp_future), _validate_payload_type_and_length(
messages_pb2.COMPRESSABLE, _RESPONSE_SIZE) next(resp_future), messages_pb2.COMPRESSABLE, _RESPONSE_SIZE)
_validate_status_code_and_details(resp_future, grpc.StatusCode.UNKNOWN, "") _validate_status_code_and_details(resp_future, grpc.StatusCode.UNKNOWN, "")
def _ping(stub): def _ping(stub):
response = stub.UnaryCall(_SIMPLE_REQUEST) response = stub.UnaryCall(_SIMPLE_REQUEST)
_validate_payload_type_and_length(response, _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE,
messages_pb2.COMPRESSABLE, _RESPONSE_SIZE) _RESPONSE_SIZE)
def _max_streams(stub): def _max_streams(stub):
# send one req to ensure server sets MAX_STREAMS # send one req to ensure server sets MAX_STREAMS
response = stub.UnaryCall(_SIMPLE_REQUEST) response = stub.UnaryCall(_SIMPLE_REQUEST)
_validate_payload_type_and_length(response, _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE,
messages_pb2.COMPRESSABLE, _RESPONSE_SIZE) _RESPONSE_SIZE)
# give the streams a workout # give the streams a workout
futures = [] futures = []
for _ in range(15): for _ in range(15):
futures.append(stub.UnaryCall.future(_SIMPLE_REQUEST)) futures.append(stub.UnaryCall.future(_SIMPLE_REQUEST))
for future in futures: for future in futures:
_validate_payload_type_and_length(future.result(), _validate_payload_type_and_length(
messages_pb2.COMPRESSABLE, _RESPONSE_SIZE) future.result(), messages_pb2.COMPRESSABLE, _RESPONSE_SIZE)
def _run_test_case(test_case, stub): def _run_test_case(test_case, stub):
if test_case == 'goaway': if test_case == 'goaway':
@ -118,31 +128,40 @@ def _run_test_case(test_case, stub):
_rst_during_data(stub) _rst_during_data(stub)
elif test_case == 'rst_after_data': elif test_case == 'rst_after_data':
_rst_after_data(stub) _rst_after_data(stub)
elif test_case =='ping': elif test_case == 'ping':
_ping(stub) _ping(stub)
elif test_case == 'max_streams': elif test_case == 'max_streams':
_max_streams(stub) _max_streams(stub)
else: else:
raise ValueError("Invalid test case: %s" % test_case) raise ValueError("Invalid test case: %s" % test_case)
def _args(): def _args():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument( parser.add_argument(
'--server_host', help='the host to which to connect', type=str, '--server_host',
help='the host to which to connect',
type=str,
default="127.0.0.1") default="127.0.0.1")
parser.add_argument( parser.add_argument(
'--server_port', help='the port to which to connect', type=int, '--server_port',
help='the port to which to connect',
type=int,
default="8080") default="8080")
parser.add_argument( parser.add_argument(
'--test_case', help='the test case to execute', type=str, '--test_case',
help='the test case to execute',
type=str,
default="goaway") default="goaway")
return parser.parse_args() return parser.parse_args()
def _stub(server_host, server_port): def _stub(server_host, server_port):
target = '{}:{}'.format(server_host, server_port) target = '{}:{}'.format(server_host, server_port)
channel = grpc.insecure_channel(target) channel = grpc.insecure_channel(target)
return test_pb2.TestServiceStub(channel) return test_pb2.TestServiceStub(channel)
def main(): def main():
args = _args() args = _args()
stub = _stub(args.server_host, args.server_port) stub = _stub(args.server_host, args.server_port)

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Insecure client-server interoperability as a unit test.""" """Insecure client-server interoperability as a unit test."""
from concurrent import futures from concurrent import futures
@ -40,14 +39,13 @@ from tests.interop import methods
from tests.interop import server from tests.interop import server
class InsecureIntraopTest( class InsecureIntraopTest(_intraop_test_case.IntraopTestCase,
_intraop_test_case.IntraopTestCase,
unittest.TestCase): unittest.TestCase):
def setUp(self): def setUp(self):
self.server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) self.server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
test_pb2.add_TestServiceServicer_to_server( test_pb2.add_TestServiceServicer_to_server(methods.TestService(),
methods.TestService(), self.server) self.server)
port = self.server.add_insecure_port('[::]:0') port = self.server.add_insecure_port('[::]:0')
self.server.start() self.server.start()
self.stub = test_pb2.TestServiceStub( self.stub = test_pb2.TestServiceStub(

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Common code for unit tests of the interoperability test code.""" """Common code for unit tests of the interoperability test code."""
from tests.interop import methods from tests.interop import methods
@ -55,10 +54,13 @@ class IntraopTestCase(object):
methods.TestCase.PING_PONG.test_interoperability(self.stub, None) methods.TestCase.PING_PONG.test_interoperability(self.stub, None)
def testCancelAfterBegin(self): def testCancelAfterBegin(self):
methods.TestCase.CANCEL_AFTER_BEGIN.test_interoperability(self.stub, None) methods.TestCase.CANCEL_AFTER_BEGIN.test_interoperability(self.stub,
None)
def testCancelAfterFirstResponse(self): def testCancelAfterFirstResponse(self):
methods.TestCase.CANCEL_AFTER_FIRST_RESPONSE.test_interoperability(self.stub, None) methods.TestCase.CANCEL_AFTER_FIRST_RESPONSE.test_interoperability(
self.stub, None)
def testTimeoutOnSleepingServer(self): def testTimeoutOnSleepingServer(self):
methods.TestCase.TIMEOUT_ON_SLEEPING_SERVER.test_interoperability(self.stub, None) methods.TestCase.TIMEOUT_ON_SLEEPING_SERVER.test_interoperability(
self.stub, None)

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Secure client-server interoperability as a unit test.""" """Secure client-server interoperability as a unit test."""
from concurrent import futures from concurrent import futures
@ -42,23 +41,23 @@ from tests.interop import resources
_SERVER_HOST_OVERRIDE = 'foo.test.google.fr' _SERVER_HOST_OVERRIDE = 'foo.test.google.fr'
class SecureIntraopTest( class SecureIntraopTest(_intraop_test_case.IntraopTestCase, unittest.TestCase):
_intraop_test_case.IntraopTestCase,
unittest.TestCase):
def setUp(self): def setUp(self):
self.server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) self.server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
test_pb2.add_TestServiceServicer_to_server( test_pb2.add_TestServiceServicer_to_server(methods.TestService(),
methods.TestService(), self.server) self.server)
port = self.server.add_secure_port( port = self.server.add_secure_port(
'[::]:0', grpc.ssl_server_credentials( '[::]:0',
grpc.ssl_server_credentials(
[(resources.private_key(), resources.certificate_chain())])) [(resources.private_key(), resources.certificate_chain())]))
self.server.start() self.server.start()
self.stub = test_pb2.TestServiceStub( self.stub = test_pb2.TestServiceStub(
grpc.secure_channel( grpc.secure_channel('localhost:{}'.format(port),
'localhost:{}'.format(port), grpc.ssl_channel_credentials(
grpc.ssl_channel_credentials(resources.test_root_certificates()), resources.test_root_certificates()), ((
(('grpc.ssl_target_name_override', _SERVER_HOST_OVERRIDE,),))) 'grpc.ssl_target_name_override',
_SERVER_HOST_OVERRIDE,),)))
if __name__ == '__main__': if __name__ == '__main__':

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""The Python implementation of the GRPC interoperability test client.""" """The Python implementation of the GRPC interoperability test client."""
import argparse import argparse
@ -43,26 +42,38 @@ from tests.interop import resources
def _args(): def _args():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument( parser.add_argument(
'--server_host', help='the host to which to connect', type=str, '--server_host',
help='the host to which to connect',
type=str,
default="127.0.0.1") default="127.0.0.1")
parser.add_argument( parser.add_argument(
'--server_port', help='the port to which to connect', type=int) '--server_port', help='the port to which to connect', type=int)
parser.add_argument( parser.add_argument(
'--test_case', help='the test case to execute', type=str, '--test_case',
help='the test case to execute',
type=str,
default="large_unary") default="large_unary")
parser.add_argument( parser.add_argument(
'--use_tls', help='require a secure connection', default=False, '--use_tls',
help='require a secure connection',
default=False,
type=resources.parse_bool)
parser.add_argument(
'--use_test_ca',
help='replace platform root CAs with ca.pem',
default=False,
type=resources.parse_bool) type=resources.parse_bool)
parser.add_argument( parser.add_argument(
'--use_test_ca', help='replace platform root CAs with ca.pem', '--server_host_override',
default=False, type=resources.parse_bool) default="foo.test.google.fr",
help='the server host to which to claim to connect',
type=str)
parser.add_argument( parser.add_argument(
'--server_host_override', default="foo.test.google.fr", '--oauth_scope', help='scope for OAuth tokens', type=str)
help='the server host to which to claim to connect', type=str)
parser.add_argument('--oauth_scope', help='scope for OAuth tokens', type=str)
parser.add_argument( parser.add_argument(
'--default_service_account', '--default_service_account',
help='email address of the default service account', type=str) help='email address of the default service account',
type=str)
return parser.parse_args() return parser.parse_args()
@ -74,12 +85,14 @@ def _stub(args):
target = '{}:{}'.format(args.server_host, args.server_port) target = '{}:{}'.format(args.server_host, args.server_port)
if args.test_case == 'oauth2_auth_token': if args.test_case == 'oauth2_auth_token':
google_credentials = _application_default_credentials() google_credentials = _application_default_credentials()
scoped_credentials = google_credentials.create_scoped([args.oauth_scope]) scoped_credentials = google_credentials.create_scoped(
[args.oauth_scope])
access_token = scoped_credentials.get_access_token().access_token access_token = scoped_credentials.get_access_token().access_token
call_credentials = grpc.access_token_call_credentials(access_token) call_credentials = grpc.access_token_call_credentials(access_token)
elif args.test_case == 'compute_engine_creds': elif args.test_case == 'compute_engine_creds':
google_credentials = _application_default_credentials() google_credentials = _application_default_credentials()
scoped_credentials = google_credentials.create_scoped([args.oauth_scope]) scoped_credentials = google_credentials.create_scoped(
[args.oauth_scope])
# TODO(https://github.com/grpc/grpc/issues/6799): Eliminate this last # TODO(https://github.com/grpc/grpc/issues/6799): Eliminate this last
# remaining use of the Beta API. # remaining use of the Beta API.
call_credentials = implementations.google_call_credentials( call_credentials = implementations.google_call_credentials(
@ -103,9 +116,9 @@ def _stub(args):
channel_credentials = grpc.composite_channel_credentials( channel_credentials = grpc.composite_channel_credentials(
channel_credentials, call_credentials) channel_credentials, call_credentials)
channel = grpc.secure_channel( channel = grpc.secure_channel(target, channel_credentials, ((
target, channel_credentials, 'grpc.ssl_target_name_override',
(('grpc.ssl_target_name_override', args.server_host_override,),)) args.server_host_override,),))
else: else:
channel = grpc.insecure_channel(target) channel = grpc.insecure_channel(target)
if args.test_case == "unimplemented_service": if args.test_case == "unimplemented_service":

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Implementations of interoperability test methods.""" """Implementations of interoperability test methods."""
import enum import enum
@ -46,24 +45,27 @@ from src.proto.grpc.testing import test_pb2
_INITIAL_METADATA_KEY = "x-grpc-test-echo-initial" _INITIAL_METADATA_KEY = "x-grpc-test-echo-initial"
_TRAILING_METADATA_KEY = "x-grpc-test-echo-trailing-bin" _TRAILING_METADATA_KEY = "x-grpc-test-echo-trailing-bin"
def _maybe_echo_metadata(servicer_context): def _maybe_echo_metadata(servicer_context):
"""Copies metadata from request to response if it is present.""" """Copies metadata from request to response if it is present."""
invocation_metadata = dict(servicer_context.invocation_metadata()) invocation_metadata = dict(servicer_context.invocation_metadata())
if _INITIAL_METADATA_KEY in invocation_metadata: if _INITIAL_METADATA_KEY in invocation_metadata:
initial_metadatum = ( initial_metadatum = (_INITIAL_METADATA_KEY,
_INITIAL_METADATA_KEY, invocation_metadata[_INITIAL_METADATA_KEY]) invocation_metadata[_INITIAL_METADATA_KEY])
servicer_context.send_initial_metadata((initial_metadatum,)) servicer_context.send_initial_metadata((initial_metadatum,))
if _TRAILING_METADATA_KEY in invocation_metadata: if _TRAILING_METADATA_KEY in invocation_metadata:
trailing_metadatum = ( trailing_metadatum = (_TRAILING_METADATA_KEY,
_TRAILING_METADATA_KEY, invocation_metadata[_TRAILING_METADATA_KEY]) invocation_metadata[_TRAILING_METADATA_KEY])
servicer_context.set_trailing_metadata((trailing_metadatum,)) servicer_context.set_trailing_metadata((trailing_metadatum,))
def _maybe_echo_status_and_message(request, servicer_context): def _maybe_echo_status_and_message(request, servicer_context):
"""Sets the response context code and details if the request asks for them""" """Sets the response context code and details if the request asks for them"""
if request.HasField('response_status'): if request.HasField('response_status'):
servicer_context.set_code(request.response_status.code) servicer_context.set_code(request.response_status.code)
servicer_context.set_details(request.response_status.message) servicer_context.set_details(request.response_status.message)
class TestService(test_pb2.TestServiceServicer): class TestService(test_pb2.TestServiceServicer):
def EmptyCall(self, request, context): def EmptyCall(self, request, context):
@ -73,8 +75,7 @@ class TestService(test_pb2.TestServiceServicer):
def UnaryCall(self, request, context): def UnaryCall(self, request, context):
_maybe_echo_metadata(context) _maybe_echo_metadata(context)
_maybe_echo_status_and_message(request, context) _maybe_echo_status_and_message(request, context)
return messages_pb2.SimpleResponse( return messages_pb2.SimpleResponse(payload=messages_pb2.Payload(
payload=messages_pb2.Payload(
type=messages_pb2.COMPRESSABLE, type=messages_pb2.COMPRESSABLE,
body=b'\x00' * request.response_size)) body=b'\x00' * request.response_size))
@ -112,14 +113,14 @@ class TestService(test_pb2.TestServiceServicer):
def _expect_status_code(call, expected_code): def _expect_status_code(call, expected_code):
if call.code() != expected_code: if call.code() != expected_code:
raise ValueError( raise ValueError('expected code %s, got %s' %
'expected code %s, got %s' % (expected_code, call.code())) (expected_code, call.code()))
def _expect_status_details(call, expected_details): def _expect_status_details(call, expected_details):
if call.details() != expected_details: if call.details() != expected_details:
raise ValueError( raise ValueError('expected message %s, got %s' %
'expected message %s, got %s' % (expected_details, call.details())) (expected_details, call.details()))
def _validate_status_code_and_details(call, expected_code, expected_details): def _validate_status_code_and_details(call, expected_code, expected_details):
@ -129,22 +130,22 @@ def _validate_status_code_and_details(call, expected_code, expected_details):
def _validate_payload_type_and_length(response, expected_type, expected_length): def _validate_payload_type_and_length(response, expected_type, expected_length):
if response.payload.type is not expected_type: if response.payload.type is not expected_type:
raise ValueError( raise ValueError('expected payload type %s, got %s' %
'expected payload type %s, got %s' %
(expected_type, type(response.payload.type))) (expected_type, type(response.payload.type)))
elif len(response.payload.body) != expected_length: elif len(response.payload.body) != expected_length:
raise ValueError( raise ValueError('expected payload body size %d, got %d' %
'expected payload body size %d, got %d' %
(expected_length, len(response.payload.body))) (expected_length, len(response.payload.body)))
def _large_unary_common_behavior( def _large_unary_common_behavior(stub, fill_username, fill_oauth_scope,
stub, fill_username, fill_oauth_scope, call_credentials): call_credentials):
size = 314159 size = 314159
request = messages_pb2.SimpleRequest( request = messages_pb2.SimpleRequest(
response_type=messages_pb2.COMPRESSABLE, response_size=size, response_type=messages_pb2.COMPRESSABLE,
response_size=size,
payload=messages_pb2.Payload(body=b'\x00' * 271828), payload=messages_pb2.Payload(body=b'\x00' * 271828),
fill_username=fill_username, fill_oauth_scope=fill_oauth_scope) fill_username=fill_username,
fill_oauth_scope=fill_oauth_scope)
response_future = stub.UnaryCall.future( response_future = stub.UnaryCall.future(
request, credentials=call_credentials) request, credentials=call_credentials)
response = response_future.result() response = response_future.result()
@ -155,8 +156,8 @@ def _large_unary_common_behavior(
def _empty_unary(stub): def _empty_unary(stub):
response = stub.EmptyCall(empty_pb2.Empty()) response = stub.EmptyCall(empty_pb2.Empty())
if not isinstance(response, empty_pb2.Empty): if not isinstance(response, empty_pb2.Empty):
raise TypeError( raise TypeError('response is of type "%s", not empty_pb2.Empty!',
'response is of type "%s", not empty_pb2.Empty!', type(response)) type(response))
def _large_unary(stub): def _large_unary(stub):
@ -164,21 +165,27 @@ def _large_unary(stub):
def _client_streaming(stub): def _client_streaming(stub):
payload_body_sizes = (27182, 8, 1828, 45904,) payload_body_sizes = (
payloads = ( 27182,
messages_pb2.Payload(body=b'\x00' * size) 8,
1828,
45904,)
payloads = (messages_pb2.Payload(body=b'\x00' * size)
for size in payload_body_sizes) for size in payload_body_sizes)
requests = ( requests = (messages_pb2.StreamingInputCallRequest(payload=payload)
messages_pb2.StreamingInputCallRequest(payload=payload)
for payload in payloads) for payload in payloads)
response = stub.StreamingInputCall(requests) response = stub.StreamingInputCall(requests)
if response.aggregated_payload_size != 74922: if response.aggregated_payload_size != 74922:
raise ValueError( raise ValueError('incorrect size %d!' %
'incorrect size %d!' % response.aggregated_payload_size) response.aggregated_payload_size)
def _server_streaming(stub): def _server_streaming(stub):
sizes = (31415, 9, 2653, 58979,) sizes = (
31415,
9,
2653,
58979,)
request = messages_pb2.StreamingOutputCallRequest( request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE, response_type=messages_pb2.COMPRESSABLE,
@ -186,14 +193,11 @@ def _server_streaming(stub):
messages_pb2.ResponseParameters(size=sizes[0]), messages_pb2.ResponseParameters(size=sizes[0]),
messages_pb2.ResponseParameters(size=sizes[1]), messages_pb2.ResponseParameters(size=sizes[1]),
messages_pb2.ResponseParameters(size=sizes[2]), messages_pb2.ResponseParameters(size=sizes[2]),
messages_pb2.ResponseParameters(size=sizes[3]), messages_pb2.ResponseParameters(size=sizes[3]),))
)
)
response_iterator = stub.StreamingOutputCall(request) response_iterator = stub.StreamingOutputCall(request)
for index, response in enumerate(response_iterator): for index, response in enumerate(response_iterator):
_validate_payload_type_and_length( _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE,
response, messages_pb2.COMPRESSABLE, sizes[index]) sizes[index])
class _Pipe(object): class _Pipe(object):
@ -236,13 +240,21 @@ class _Pipe(object):
def _ping_pong(stub): def _ping_pong(stub):
request_response_sizes = (31415, 9, 2653, 58979,) request_response_sizes = (
request_payload_sizes = (27182, 8, 1828, 45904,) 31415,
9,
2653,
58979,)
request_payload_sizes = (
27182,
8,
1828,
45904,)
with _Pipe() as pipe: with _Pipe() as pipe:
response_iterator = stub.FullDuplexCall(pipe) response_iterator = stub.FullDuplexCall(pipe)
for response_size, payload_size in zip( for response_size, payload_size in zip(request_response_sizes,
request_response_sizes, request_payload_sizes): request_payload_sizes):
request = messages_pb2.StreamingOutputCallRequest( request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE, response_type=messages_pb2.COMPRESSABLE,
response_parameters=( response_parameters=(
@ -265,8 +277,16 @@ def _cancel_after_begin(stub):
def _cancel_after_first_response(stub): def _cancel_after_first_response(stub):
request_response_sizes = (31415, 9, 2653, 58979,) request_response_sizes = (
request_payload_sizes = (27182, 8, 1828, 45904,) 31415,
9,
2653,
58979,)
request_payload_sizes = (
27182,
8,
1828,
45904,)
with _Pipe() as pipe: with _Pipe() as pipe:
response_iterator = stub.FullDuplexCall(pipe) response_iterator = stub.FullDuplexCall(pipe)
@ -331,8 +351,8 @@ def _status_code_and_message(stub):
response_type=messages_pb2.COMPRESSABLE, response_type=messages_pb2.COMPRESSABLE,
response_size=1, response_size=1,
payload=messages_pb2.Payload(body=b'\x00'), payload=messages_pb2.Payload(body=b'\x00'),
response_status=messages_pb2.EchoStatus(code=code, message=details) response_status=messages_pb2.EchoStatus(
) code=code, message=details))
response_future = stub.UnaryCall.future(request) response_future = stub.UnaryCall.future(request)
_validate_status_code_and_details(response_future, status, details) _validate_status_code_and_details(response_future, status, details)
@ -341,10 +361,10 @@ def _status_code_and_message(stub):
response_iterator = stub.FullDuplexCall(pipe) response_iterator = stub.FullDuplexCall(pipe)
request = messages_pb2.StreamingOutputCallRequest( request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE, response_type=messages_pb2.COMPRESSABLE,
response_parameters=( response_parameters=(messages_pb2.ResponseParameters(size=1),),
messages_pb2.ResponseParameters(size=1),),
payload=messages_pb2.Payload(body=b'\x00'), payload=messages_pb2.Payload(body=b'\x00'),
response_status=messages_pb2.EchoStatus(code=code, message=details)) response_status=messages_pb2.EchoStatus(
code=code, message=details))
pipe.add(request) # sends the initial request. pipe.add(request) # sends the initial request.
# Dropping out of with block closes the pipe # Dropping out of with block closes the pipe
_validate_status_code_and_details(response_iterator, status, details) _validate_status_code_and_details(response_iterator, status, details)
@ -365,21 +385,20 @@ def _unimplemented_service(unimplemented_service_stub):
def _custom_metadata(stub): def _custom_metadata(stub):
initial_metadata_value = "test_initial_metadata_value" initial_metadata_value = "test_initial_metadata_value"
trailing_metadata_value = "\x0a\x0b\x0a\x0b\x0a\x0b" trailing_metadata_value = "\x0a\x0b\x0a\x0b\x0a\x0b"
metadata = ( metadata = ((_INITIAL_METADATA_KEY, initial_metadata_value),
(_INITIAL_METADATA_KEY, initial_metadata_value),
(_TRAILING_METADATA_KEY, trailing_metadata_value)) (_TRAILING_METADATA_KEY, trailing_metadata_value))
def _validate_metadata(response): def _validate_metadata(response):
initial_metadata = dict(response.initial_metadata()) initial_metadata = dict(response.initial_metadata())
if initial_metadata[_INITIAL_METADATA_KEY] != initial_metadata_value: if initial_metadata[_INITIAL_METADATA_KEY] != initial_metadata_value:
raise ValueError( raise ValueError('expected initial metadata %s, got %s' %
'expected initial metadata %s, got %s' % ( (initial_metadata_value,
initial_metadata_value, initial_metadata[_INITIAL_METADATA_KEY])) initial_metadata[_INITIAL_METADATA_KEY]))
trailing_metadata = dict(response.trailing_metadata()) trailing_metadata = dict(response.trailing_metadata())
if trailing_metadata[_TRAILING_METADATA_KEY] != trailing_metadata_value: if trailing_metadata[_TRAILING_METADATA_KEY] != trailing_metadata_value:
raise ValueError( raise ValueError('expected trailing metadata %s, got %s' %
'expected trailing metadata %s, got %s' % ( (trailing_metadata_value,
trailing_metadata_value, initial_metadata[_TRAILING_METADATA_KEY])) initial_metadata[_TRAILING_METADATA_KEY]))
# Testing with UnaryCall # Testing with UnaryCall
request = messages_pb2.SimpleRequest( request = messages_pb2.SimpleRequest(
@ -394,19 +413,18 @@ def _custom_metadata(stub):
response_iterator = stub.FullDuplexCall(pipe, metadata=metadata) response_iterator = stub.FullDuplexCall(pipe, metadata=metadata)
request = messages_pb2.StreamingOutputCallRequest( request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE, response_type=messages_pb2.COMPRESSABLE,
response_parameters=( response_parameters=(messages_pb2.ResponseParameters(size=1),))
messages_pb2.ResponseParameters(size=1),))
pipe.add(request) # Sends the request pipe.add(request) # Sends the request
next(response_iterator) # Causes server to send trailing metadata next(response_iterator) # Causes server to send trailing metadata
# Dropping out of the with block closes the pipe # Dropping out of the with block closes the pipe
_validate_metadata(response_iterator) _validate_metadata(response_iterator)
def _compute_engine_creds(stub, args): def _compute_engine_creds(stub, args):
response = _large_unary_common_behavior(stub, True, True, None) response = _large_unary_common_behavior(stub, True, True, None)
if args.default_service_account != response.username: if args.default_service_account != response.username:
raise ValueError( raise ValueError('expected username %s, got %s' %
'expected username %s, got %s' % ( (args.default_service_account, response.username))
args.default_service_account, response.username))
def _oauth2_auth_token(stub, args): def _oauth2_auth_token(stub, args):
@ -415,12 +433,11 @@ def _oauth2_auth_token(stub, args):
wanted_email = json.load(open(json_key_filename, 'rb'))['client_email'] wanted_email = json.load(open(json_key_filename, 'rb'))['client_email']
response = _large_unary_common_behavior(stub, True, True, None) response = _large_unary_common_behavior(stub, True, True, None)
if wanted_email != response.username: if wanted_email != response.username:
raise ValueError( raise ValueError('expected username %s, got %s' %
'expected username %s, got %s' % (wanted_email, response.username)) (wanted_email, response.username))
if args.oauth_scope.find(response.oauth_scope) == -1: if args.oauth_scope.find(response.oauth_scope) == -1:
raise ValueError( raise ValueError('expected to find oauth scope "{}" in received "{}"'.
'expected to find oauth scope "{}" in received "{}"'.format( format(response.oauth_scope, args.oauth_scope))
response.oauth_scope, args.oauth_scope))
def _jwt_token_creds(stub, args): def _jwt_token_creds(stub, args):
@ -429,15 +446,16 @@ def _jwt_token_creds(stub, args):
wanted_email = json.load(open(json_key_filename, 'rb'))['client_email'] wanted_email = json.load(open(json_key_filename, 'rb'))['client_email']
response = _large_unary_common_behavior(stub, True, False, None) response = _large_unary_common_behavior(stub, True, False, None)
if wanted_email != response.username: if wanted_email != response.username:
raise ValueError( raise ValueError('expected username %s, got %s' %
'expected username %s, got %s' % (wanted_email, response.username)) (wanted_email, response.username))
def _per_rpc_creds(stub, args): def _per_rpc_creds(stub, args):
json_key_filename = os.environ[ json_key_filename = os.environ[
oauth2client_client.GOOGLE_APPLICATION_CREDENTIALS] oauth2client_client.GOOGLE_APPLICATION_CREDENTIALS]
wanted_email = json.load(open(json_key_filename, 'rb'))['client_email'] wanted_email = json.load(open(json_key_filename, 'rb'))['client_email']
credentials = oauth2client_client.GoogleCredentials.get_application_default() credentials = oauth2client_client.GoogleCredentials.get_application_default(
)
scoped_credentials = credentials.create_scoped([args.oauth_scope]) scoped_credentials = credentials.create_scoped([args.oauth_scope])
# TODO(https://github.com/grpc/grpc/issues/6799): Eliminate this last # TODO(https://github.com/grpc/grpc/issues/6799): Eliminate this last
# remaining use of the Beta API. # remaining use of the Beta API.
@ -445,8 +463,8 @@ def _per_rpc_creds(stub, args):
scoped_credentials) scoped_credentials)
response = _large_unary_common_behavior(stub, True, False, call_credentials) response = _large_unary_common_behavior(stub, True, False, call_credentials)
if wanted_email != response.username: if wanted_email != response.username:
raise ValueError( raise ValueError('expected username %s, got %s' %
'expected username %s, got %s' % (wanted_email, response.username)) (wanted_email, response.username))
@enum.unique @enum.unique
@ -505,4 +523,5 @@ class TestCase(enum.Enum):
elif self is TestCase.PER_RPC_CREDS: elif self is TestCase.PER_RPC_CREDS:
_per_rpc_creds(stub, args) _per_rpc_creds(stub, args)
else: else:
raise NotImplementedError('Test case "%s" not implemented!' % self.name) raise NotImplementedError('Test case "%s" not implemented!' %
self.name)

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Constants and functions for data used in interoperability testing.""" """Constants and functions for data used in interoperability testing."""
import argparse import argparse
@ -40,8 +39,8 @@ _CERTIFICATE_CHAIN_RESOURCE_PATH = 'credentials/server1.pem'
def test_root_certificates(): def test_root_certificates():
return pkg_resources.resource_string( return pkg_resources.resource_string(__name__,
__name__, _ROOT_CERTIFICATES_RESOURCE_PATH) _ROOT_CERTIFICATES_RESOURCE_PATH)
def private_key(): def private_key():
@ -49,8 +48,8 @@ def private_key():
def certificate_chain(): def certificate_chain():
return pkg_resources.resource_string( return pkg_resources.resource_string(__name__,
__name__, _CERTIFICATE_CHAIN_RESOURCE_PATH) _CERTIFICATE_CHAIN_RESOURCE_PATH)
def parse_bool(value): def parse_bool(value):

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""The Python implementation of the GRPC interoperability test server.""" """The Python implementation of the GRPC interoperability test server."""
import argparse import argparse
@ -45,11 +44,12 @@ _ONE_DAY_IN_SECONDS = 60 * 60 * 24
def serve(): def serve():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('--port', help='the port on which to serve', type=int)
parser.add_argument( parser.add_argument(
'--port', help='the port on which to serve', type=int) '--use_tls',
parser.add_argument( help='require a secure connection',
'--use_tls', help='require a secure connection', default=False,
default=False, type=resources.parse_bool) type=resources.parse_bool)
args = parser.parse_args() args = parser.parse_args()
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
@ -57,8 +57,8 @@ def serve():
if args.use_tls: if args.use_tls:
private_key = resources.private_key() private_key = resources.private_key()
certificate_chain = resources.certificate_chain() certificate_chain = resources.certificate_chain()
credentials = grpc.ssl_server_credentials( credentials = grpc.ssl_server_credentials((
((private_key, certificate_chain),)) (private_key, certificate_chain),))
server.add_secure_port('[::]:{}'.format(args.port), credentials) server.add_secure_port('[::]:{}'.format(args.port), credentials)
else: else:
server.add_insecure_port('[::]:{}'.format(args.port)) server.add_insecure_port('[::]:{}'.format(args.port))
@ -73,5 +73,6 @@ def serve():
server.stop(None) server.stop(None)
logging.info('Server stopped; exiting.') logging.info('Server stopped; exiting.')
if __name__ == '__main__': if __name__ == '__main__':
serve() serve()

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -134,8 +134,10 @@ class _ServicerMethods(object):
class _Service( class _Service(
collections.namedtuple( collections.namedtuple('_Service', (
'_Service', ('servicer_methods', 'server', 'stub',))): 'servicer_methods',
'server',
'stub',))):
"""A live and running service. """A live and running service.
Attributes: Attributes:
@ -238,10 +240,8 @@ class PythonPluginTest(unittest.TestCase):
def testImportAttributes(self): def testImportAttributes(self):
# check that we can access the generated module and its members. # check that we can access the generated module and its members.
self.assertIsNotNone( self.assertIsNotNone(getattr(service_pb2, STUB_IDENTIFIER, None))
getattr(service_pb2, STUB_IDENTIFIER, None)) self.assertIsNotNone(getattr(service_pb2, SERVICER_IDENTIFIER, None))
self.assertIsNotNone(
getattr(service_pb2, SERVICER_IDENTIFIER, None))
self.assertIsNotNone( self.assertIsNotNone(
getattr(service_pb2, ADD_SERVICER_TO_SERVER_IDENTIFIER, None)) getattr(service_pb2, ADD_SERVICER_TO_SERVER_IDENTIFIER, None))
@ -256,8 +256,8 @@ class PythonPluginTest(unittest.TestCase):
request = request_pb2.SimpleRequest(response_size=13) request = request_pb2.SimpleRequest(response_size=13)
with self.assertRaises(grpc.RpcError) as exception_context: with self.assertRaises(grpc.RpcError) as exception_context:
service.stub.UnaryCall(request) service.stub.UnaryCall(request)
self.assertIs( self.assertIs(exception_context.exception.code(),
exception_context.exception.code(), grpc.StatusCode.UNIMPLEMENTED) grpc.StatusCode.UNIMPLEMENTED)
def testUnaryCall(self): def testUnaryCall(self):
service = _CreateService() service = _CreateService()
@ -286,8 +286,8 @@ class PythonPluginTest(unittest.TestCase):
request, timeout=test_constants.SHORT_TIMEOUT) request, timeout=test_constants.SHORT_TIMEOUT)
with self.assertRaises(grpc.RpcError) as exception_context: with self.assertRaises(grpc.RpcError) as exception_context:
response_future.result() response_future.result()
self.assertIs( self.assertIs(exception_context.exception.code(),
exception_context.exception.code(), grpc.StatusCode.DEADLINE_EXCEEDED) grpc.StatusCode.DEADLINE_EXCEEDED)
self.assertIs(response_future.code(), grpc.StatusCode.DEADLINE_EXCEEDED) self.assertIs(response_future.code(), grpc.StatusCode.DEADLINE_EXCEEDED)
def testUnaryCallFutureCancelled(self): def testUnaryCallFutureCancelled(self):
@ -313,8 +313,8 @@ class PythonPluginTest(unittest.TestCase):
responses = service.stub.StreamingOutputCall(request) responses = service.stub.StreamingOutputCall(request)
expected_responses = service.servicer_methods.StreamingOutputCall( expected_responses = service.servicer_methods.StreamingOutputCall(
request, 'not a real RpcContext!') request, 'not a real RpcContext!')
for expected_response, response in moves.zip_longest( for expected_response, response in moves.zip_longest(expected_responses,
expected_responses, responses): responses):
self.assertEqual(expected_response, response) self.assertEqual(expected_response, response)
def testStreamingOutputCallExpired(self): def testStreamingOutputCallExpired(self):
@ -325,8 +325,8 @@ class PythonPluginTest(unittest.TestCase):
request, timeout=test_constants.SHORT_TIMEOUT) request, timeout=test_constants.SHORT_TIMEOUT)
with self.assertRaises(grpc.RpcError) as exception_context: with self.assertRaises(grpc.RpcError) as exception_context:
list(responses) list(responses)
self.assertIs( self.assertIs(exception_context.exception.code(),
exception_context.exception.code(), grpc.StatusCode.DEADLINE_EXCEEDED) grpc.StatusCode.DEADLINE_EXCEEDED)
def testStreamingOutputCallCancelled(self): def testStreamingOutputCallCancelled(self):
service = _CreateService() service = _CreateService()
@ -346,15 +346,15 @@ class PythonPluginTest(unittest.TestCase):
self.assertIsNotNone(responses) self.assertIsNotNone(responses)
with self.assertRaises(grpc.RpcError) as exception_context: with self.assertRaises(grpc.RpcError) as exception_context:
next(responses) next(responses)
self.assertIs(exception_context.exception.code(), grpc.StatusCode.UNKNOWN) self.assertIs(exception_context.exception.code(),
grpc.StatusCode.UNKNOWN)
def testStreamingInputCall(self): def testStreamingInputCall(self):
service = _CreateService() service = _CreateService()
response = service.stub.StreamingInputCall( response = service.stub.StreamingInputCall(
_streaming_input_request_iterator()) _streaming_input_request_iterator())
expected_response = service.servicer_methods.StreamingInputCall( expected_response = service.servicer_methods.StreamingInputCall(
_streaming_input_request_iterator(), _streaming_input_request_iterator(), 'not a real RpcContext!')
'not a real RpcContext!')
self.assertEqual(expected_response, response) self.assertEqual(expected_response, response)
def testStreamingInputCallFuture(self): def testStreamingInputCallFuture(self):
@ -364,8 +364,7 @@ class PythonPluginTest(unittest.TestCase):
_streaming_input_request_iterator()) _streaming_input_request_iterator())
response = response_future.result() response = response_future.result()
expected_response = service.servicer_methods.StreamingInputCall( expected_response = service.servicer_methods.StreamingInputCall(
_streaming_input_request_iterator(), _streaming_input_request_iterator(), 'not a real RpcContext!')
'not a real RpcContext!')
self.assertEqual(expected_response, response) self.assertEqual(expected_response, response)
def testStreamingInputCallFutureExpired(self): def testStreamingInputCallFutureExpired(self):
@ -377,10 +376,10 @@ class PythonPluginTest(unittest.TestCase):
with self.assertRaises(grpc.RpcError) as exception_context: with self.assertRaises(grpc.RpcError) as exception_context:
response_future.result() response_future.result()
self.assertIsInstance(response_future.exception(), grpc.RpcError) self.assertIsInstance(response_future.exception(), grpc.RpcError)
self.assertIs( self.assertIs(response_future.exception().code(),
response_future.exception().code(), grpc.StatusCode.DEADLINE_EXCEEDED) grpc.StatusCode.DEADLINE_EXCEEDED)
self.assertIs( self.assertIs(exception_context.exception.code(),
exception_context.exception.code(), grpc.StatusCode.DEADLINE_EXCEEDED) grpc.StatusCode.DEADLINE_EXCEEDED)
def testStreamingInputCallFutureCancelled(self): def testStreamingInputCallFutureCancelled(self):
service = _CreateService() service = _CreateService()
@ -402,13 +401,11 @@ class PythonPluginTest(unittest.TestCase):
def testFullDuplexCall(self): def testFullDuplexCall(self):
service = _CreateService() service = _CreateService()
responses = service.stub.FullDuplexCall( responses = service.stub.FullDuplexCall(_full_duplex_request_iterator())
_full_duplex_request_iterator())
expected_responses = service.servicer_methods.FullDuplexCall( expected_responses = service.servicer_methods.FullDuplexCall(
_full_duplex_request_iterator(), _full_duplex_request_iterator(), 'not a real RpcContext!')
'not a real RpcContext!') for expected_response, response in moves.zip_longest(expected_responses,
for expected_response, response in moves.zip_longest( responses):
expected_responses, responses):
self.assertEqual(expected_response, response) self.assertEqual(expected_response, response)
def testFullDuplexCallExpired(self): def testFullDuplexCallExpired(self):
@ -419,8 +416,8 @@ class PythonPluginTest(unittest.TestCase):
request_iterator, timeout=test_constants.SHORT_TIMEOUT) request_iterator, timeout=test_constants.SHORT_TIMEOUT)
with self.assertRaises(grpc.RpcError) as exception_context: with self.assertRaises(grpc.RpcError) as exception_context:
list(responses) list(responses)
self.assertIs( self.assertIs(exception_context.exception.code(),
exception_context.exception.code(), grpc.StatusCode.DEADLINE_EXCEEDED) grpc.StatusCode.DEADLINE_EXCEEDED)
def testFullDuplexCallCancelled(self): def testFullDuplexCallCancelled(self):
service = _CreateService() service = _CreateService()
@ -430,8 +427,8 @@ class PythonPluginTest(unittest.TestCase):
responses.cancel() responses.cancel()
with self.assertRaises(grpc.RpcError) as exception_context: with self.assertRaises(grpc.RpcError) as exception_context:
next(responses) next(responses)
self.assertIs( self.assertIs(exception_context.exception.code(),
exception_context.exception.code(), grpc.StatusCode.CANCELLED) grpc.StatusCode.CANCELLED)
def testFullDuplexCallFailed(self): def testFullDuplexCallFailed(self):
request_iterator = _full_duplex_request_iterator() request_iterator = _full_duplex_request_iterator()
@ -440,10 +437,12 @@ class PythonPluginTest(unittest.TestCase):
responses = service.stub.FullDuplexCall(request_iterator) responses = service.stub.FullDuplexCall(request_iterator)
with self.assertRaises(grpc.RpcError) as exception_context: with self.assertRaises(grpc.RpcError) as exception_context:
next(responses) next(responses)
self.assertIs(exception_context.exception.code(), grpc.StatusCode.UNKNOWN) self.assertIs(exception_context.exception.code(),
grpc.StatusCode.UNKNOWN)
def testHalfDuplexCall(self): def testHalfDuplexCall(self):
service = _CreateService() service = _CreateService()
def half_duplex_request_iterator(): def half_duplex_request_iterator():
request = request_pb2.StreamingOutputCallRequest() request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=1, interval_us=0) request.response_parameters.add(size=1, interval_us=0)
@ -452,16 +451,18 @@ class PythonPluginTest(unittest.TestCase):
request.response_parameters.add(size=2, interval_us=0) request.response_parameters.add(size=2, interval_us=0)
request.response_parameters.add(size=3, interval_us=0) request.response_parameters.add(size=3, interval_us=0)
yield request yield request
responses = service.stub.HalfDuplexCall(half_duplex_request_iterator()) responses = service.stub.HalfDuplexCall(half_duplex_request_iterator())
expected_responses = service.servicer_methods.HalfDuplexCall( expected_responses = service.servicer_methods.HalfDuplexCall(
half_duplex_request_iterator(), 'not a real RpcContext!') half_duplex_request_iterator(), 'not a real RpcContext!')
for expected_response, response in moves.zip_longest( for expected_response, response in moves.zip_longest(expected_responses,
expected_responses, responses): responses):
self.assertEqual(expected_response, response) self.assertEqual(expected_response, response)
def testHalfDuplexCallWedged(self): def testHalfDuplexCallWedged(self):
condition = threading.Condition() condition = threading.Condition()
wait_cell = [False] wait_cell = [False]
@contextlib.contextmanager @contextlib.contextmanager
def wait(): # pylint: disable=invalid-name def wait(): # pylint: disable=invalid-name
# Where's Python 3's 'nonlocal' statement when you need it? # Where's Python 3's 'nonlocal' statement when you need it?
@ -471,6 +472,7 @@ class PythonPluginTest(unittest.TestCase):
with condition: with condition:
wait_cell[0] = False wait_cell[0] = False
condition.notify_all() condition.notify_all()
def half_duplex_request_iterator(): def half_duplex_request_iterator():
request = request_pb2.StreamingOutputCallRequest() request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=1, interval_us=0) request.response_parameters.add(size=1, interval_us=0)
@ -478,15 +480,17 @@ class PythonPluginTest(unittest.TestCase):
with condition: with condition:
while wait_cell[0]: while wait_cell[0]:
condition.wait() condition.wait()
service = _CreateService() service = _CreateService()
with wait(): with wait():
responses = service.stub.HalfDuplexCall( responses = service.stub.HalfDuplexCall(
half_duplex_request_iterator(), timeout=test_constants.SHORT_TIMEOUT) half_duplex_request_iterator(),
timeout=test_constants.SHORT_TIMEOUT)
# half-duplex waits for the client to send all info # half-duplex waits for the client to send all info
with self.assertRaises(grpc.RpcError) as exception_context: with self.assertRaises(grpc.RpcError) as exception_context:
next(responses) next(responses)
self.assertIs( self.assertIs(exception_context.exception.code(),
exception_context.exception.code(), grpc.StatusCode.DEADLINE_EXCEEDED) grpc.StatusCode.DEADLINE_EXCEEDED)
if __name__ == '__main__': if __name__ == '__main__':

@ -49,6 +49,7 @@ from tests.unit.framework.common import test_constants
_MESSAGES_IMPORT = b'import "messages.proto";' _MESSAGES_IMPORT = b'import "messages.proto";'
@contextlib.contextmanager @contextlib.contextmanager
def _system_path(path): def _system_path(path):
old_system_path = sys.path[:] old_system_path = sys.path[:]
@ -96,8 +97,7 @@ class SeparateTestMixin(object):
server = grpc.server( server = grpc.server(
futures.ThreadPoolExecutor(max_workers=test_constants.POOL_SIZE)) futures.ThreadPoolExecutor(max_workers=test_constants.POOL_SIZE))
pb2_grpc.add_TestServiceServicer_to_server( pb2_grpc.add_TestServiceServicer_to_server(
DummySplitServicer( DummySplitServicer(pb2.Request, pb2.Response), server)
pb2.Request, pb2.Response), server)
port = server.add_insecure_port('[::]:0') port = server.add_insecure_port('[::]:0')
server.start() server.start()
channel = grpc.insecure_channel('localhost:{}'.format(port)) channel = grpc.insecure_channel('localhost:{}'.format(port))
@ -137,8 +137,7 @@ class CommonTestMixin(object):
server = grpc.server( server = grpc.server(
futures.ThreadPoolExecutor(max_workers=test_constants.POOL_SIZE)) futures.ThreadPoolExecutor(max_workers=test_constants.POOL_SIZE))
pb2_grpc.add_TestServiceServicer_to_server( pb2_grpc.add_TestServiceServicer_to_server(
DummySplitServicer( DummySplitServicer(pb2.Request, pb2.Response), server)
pb2.Request, pb2.Response), server)
port = server.add_insecure_port('[::]:0') port = server.add_insecure_port('[::]:0')
server.start() server.start()
channel = grpc.insecure_channel('localhost:{}'.format(port)) channel = grpc.insecure_channel('localhost:{}'.format(port))
@ -157,23 +156,28 @@ class SameSeparateTest(unittest.TestCase, SeparateTestMixin):
self.directory = tempfile.mkdtemp(suffix='same_separate', dir='.') self.directory = tempfile.mkdtemp(suffix='same_separate', dir='.')
self.proto_directory = os.path.join(self.directory, 'proto_path') self.proto_directory = os.path.join(self.directory, 'proto_path')
self.python_out_directory = os.path.join(self.directory, 'python_out') self.python_out_directory = os.path.join(self.directory, 'python_out')
self.grpc_python_out_directory = os.path.join(self.directory, 'grpc_python_out') self.grpc_python_out_directory = os.path.join(self.directory,
'grpc_python_out')
os.makedirs(self.proto_directory) os.makedirs(self.proto_directory)
os.makedirs(self.python_out_directory) os.makedirs(self.python_out_directory)
os.makedirs(self.grpc_python_out_directory) os.makedirs(self.grpc_python_out_directory)
same_proto_file = os.path.join(self.proto_directory, 'same_separate.proto') same_proto_file = os.path.join(self.proto_directory,
'same_separate.proto')
open(same_proto_file, 'wb').write(same_proto_contents) open(same_proto_file, 'wb').write(same_proto_contents)
protoc_result = protoc.main([ protoc_result = protoc.main([
'', '',
'--proto_path={}'.format(self.proto_directory), '--proto_path={}'.format(self.proto_directory),
'--python_out={}'.format(self.python_out_directory), '--python_out={}'.format(self.python_out_directory),
'--grpc_python_out=grpc_2_0:{}'.format(self.grpc_python_out_directory), '--grpc_python_out=grpc_2_0:{}'.format(
self.grpc_python_out_directory),
same_proto_file, same_proto_file,
]) ])
if protoc_result != 0: if protoc_result != 0:
raise Exception("unexpected protoc error") raise Exception("unexpected protoc error")
open(os.path.join(self.grpc_python_out_directory, '__init__.py'), 'w').write('') open(os.path.join(self.grpc_python_out_directory, '__init__.py'),
open(os.path.join(self.python_out_directory, '__init__.py'), 'w').write('') 'w').write('')
open(os.path.join(self.python_out_directory, '__init__.py'),
'w').write('')
self.pb2_import = 'same_separate_pb2' self.pb2_import = 'same_separate_pb2'
self.pb2_grpc_import = 'same_separate_pb2_grpc' self.pb2_grpc_import = 'same_separate_pb2_grpc'
self.should_find_services_in_pb2 = False self.should_find_services_in_pb2 = False
@ -193,7 +197,8 @@ class SameCommonTest(unittest.TestCase, CommonTestMixin):
self.grpc_python_out_directory = self.python_out_directory self.grpc_python_out_directory = self.python_out_directory
os.makedirs(self.proto_directory) os.makedirs(self.proto_directory)
os.makedirs(self.python_out_directory) os.makedirs(self.python_out_directory)
same_proto_file = os.path.join(self.proto_directory, 'same_common.proto') same_proto_file = os.path.join(self.proto_directory,
'same_common.proto')
open(same_proto_file, 'wb').write(same_proto_contents) open(same_proto_file, 'wb').write(same_proto_contents)
protoc_result = protoc.main([ protoc_result = protoc.main([
'', '',
@ -204,7 +209,8 @@ class SameCommonTest(unittest.TestCase, CommonTestMixin):
]) ])
if protoc_result != 0: if protoc_result != 0:
raise Exception("unexpected protoc error") raise Exception("unexpected protoc error")
open(os.path.join(self.python_out_directory, '__init__.py'), 'w').write('') open(os.path.join(self.python_out_directory, '__init__.py'),
'w').write('')
self.pb2_import = 'same_common_pb2' self.pb2_import = 'same_common_pb2'
self.pb2_grpc_import = 'same_common_pb2_grpc' self.pb2_grpc_import = 'same_common_pb2_grpc'
self.should_find_services_in_pb2 = True self.should_find_services_in_pb2 = True
@ -232,10 +238,9 @@ class SplitCommonTest(unittest.TestCase, CommonTestMixin):
'split_common_services.proto') 'split_common_services.proto')
messages_proto_file = os.path.join(self.proto_directory, messages_proto_file = os.path.join(self.proto_directory,
'split_common_messages.proto') 'split_common_messages.proto')
open(services_proto_file, 'wb').write(services_proto_contents.replace( open(services_proto_file, 'wb').write(
_MESSAGES_IMPORT, services_proto_contents.replace(
b'import "split_common_messages.proto";' _MESSAGES_IMPORT, b'import "split_common_messages.proto";'))
))
open(messages_proto_file, 'wb').write(messages_proto_contents) open(messages_proto_file, 'wb').write(messages_proto_contents)
protoc_result = protoc.main([ protoc_result = protoc.main([
'', '',
@ -247,7 +252,8 @@ class SplitCommonTest(unittest.TestCase, CommonTestMixin):
]) ])
if protoc_result != 0: if protoc_result != 0:
raise Exception("unexpected protoc error") raise Exception("unexpected protoc error")
open(os.path.join(self.python_out_directory, '__init__.py'), 'w').write('') open(os.path.join(self.python_out_directory, '__init__.py'),
'w').write('')
self.pb2_import = 'split_common_messages_pb2' self.pb2_import = 'split_common_messages_pb2'
self.pb2_grpc_import = 'split_common_services_pb2_grpc' self.pb2_grpc_import = 'split_common_services_pb2_grpc'
self.should_find_services_in_pb2 = False self.should_find_services_in_pb2 = False
@ -268,7 +274,8 @@ class SplitSeparateTest(unittest.TestCase, SeparateTestMixin):
self.directory = tempfile.mkdtemp(suffix='split_separate', dir='.') self.directory = tempfile.mkdtemp(suffix='split_separate', dir='.')
self.proto_directory = os.path.join(self.directory, 'proto_path') self.proto_directory = os.path.join(self.directory, 'proto_path')
self.python_out_directory = os.path.join(self.directory, 'python_out') self.python_out_directory = os.path.join(self.directory, 'python_out')
self.grpc_python_out_directory = os.path.join(self.directory, 'grpc_python_out') self.grpc_python_out_directory = os.path.join(self.directory,
'grpc_python_out')
os.makedirs(self.proto_directory) os.makedirs(self.proto_directory)
os.makedirs(self.python_out_directory) os.makedirs(self.python_out_directory)
os.makedirs(self.grpc_python_out_directory) os.makedirs(self.grpc_python_out_directory)
@ -276,22 +283,23 @@ class SplitSeparateTest(unittest.TestCase, SeparateTestMixin):
'split_separate_services.proto') 'split_separate_services.proto')
messages_proto_file = os.path.join(self.proto_directory, messages_proto_file = os.path.join(self.proto_directory,
'split_separate_messages.proto') 'split_separate_messages.proto')
open(services_proto_file, 'wb').write(services_proto_contents.replace( open(services_proto_file, 'wb').write(
_MESSAGES_IMPORT, services_proto_contents.replace(
b'import "split_separate_messages.proto";' _MESSAGES_IMPORT, b'import "split_separate_messages.proto";'))
))
open(messages_proto_file, 'wb').write(messages_proto_contents) open(messages_proto_file, 'wb').write(messages_proto_contents)
protoc_result = protoc.main([ protoc_result = protoc.main([
'', '',
'--proto_path={}'.format(self.proto_directory), '--proto_path={}'.format(self.proto_directory),
'--python_out={}'.format(self.python_out_directory), '--python_out={}'.format(self.python_out_directory),
'--grpc_python_out=grpc_2_0:{}'.format(self.grpc_python_out_directory), '--grpc_python_out=grpc_2_0:{}'.format(
self.grpc_python_out_directory),
services_proto_file, services_proto_file,
messages_proto_file, messages_proto_file,
]) ])
if protoc_result != 0: if protoc_result != 0:
raise Exception("unexpected protoc error") raise Exception("unexpected protoc error")
open(os.path.join(self.python_out_directory, '__init__.py'), 'w').write('') open(os.path.join(self.python_out_directory, '__init__.py'),
'w').write('')
self.pb2_import = 'split_separate_messages_pb2' self.pb2_import = 'split_separate_messages_pb2'
self.pb2_grpc_import = 'split_separate_services_pb2_grpc' self.pb2_grpc_import = 'split_separate_services_pb2_grpc'
self.should_find_services_in_pb2 = False self.should_find_services_in_pb2 = False

@ -244,10 +244,8 @@ class PythonPluginTest(unittest.TestCase):
def testImportAttributes(self): def testImportAttributes(self):
# check that we can access the generated module and its members. # check that we can access the generated module and its members.
self.assertIsNotNone( self.assertIsNotNone(getattr(service_pb2, SERVICER_IDENTIFIER, None))
getattr(service_pb2, SERVICER_IDENTIFIER, None)) self.assertIsNotNone(getattr(service_pb2, STUB_IDENTIFIER, None))
self.assertIsNotNone(
getattr(service_pb2, STUB_IDENTIFIER, None))
self.assertIsNotNone( self.assertIsNotNone(
getattr(service_pb2, SERVER_FACTORY_IDENTIFIER, None)) getattr(service_pb2, SERVER_FACTORY_IDENTIFIER, None))
self.assertIsNotNone( self.assertIsNotNone(
@ -263,7 +261,8 @@ class PythonPluginTest(unittest.TestCase):
try: try:
stub.UnaryCall(request, test_constants.LONG_TIMEOUT) stub.UnaryCall(request, test_constants.LONG_TIMEOUT)
except face.AbortionError as error: except face.AbortionError as error:
self.assertEqual(interfaces.StatusCode.UNIMPLEMENTED, error.code) self.assertEqual(interfaces.StatusCode.UNIMPLEMENTED,
error.code)
def testUnaryCall(self): def testUnaryCall(self):
with _CreateService() as (methods, stub): with _CreateService() as (methods, stub):
@ -311,8 +310,8 @@ class PythonPluginTest(unittest.TestCase):
def testStreamingOutputCall(self): def testStreamingOutputCall(self):
with _CreateService() as (methods, stub): with _CreateService() as (methods, stub):
request = _streaming_output_request() request = _streaming_output_request()
responses = stub.StreamingOutputCall( responses = stub.StreamingOutputCall(request,
request, test_constants.LONG_TIMEOUT) test_constants.LONG_TIMEOUT)
expected_responses = methods.StreamingOutputCall( expected_responses = methods.StreamingOutputCall(
request, 'not a real RpcContext!') request, 'not a real RpcContext!')
for expected_response, response in moves.zip_longest( for expected_response, response in moves.zip_longest(
@ -331,8 +330,8 @@ class PythonPluginTest(unittest.TestCase):
def testStreamingOutputCallCancelled(self): def testStreamingOutputCallCancelled(self):
with _CreateService() as (methods, stub): with _CreateService() as (methods, stub):
request = _streaming_output_request() request = _streaming_output_request()
responses = stub.StreamingOutputCall( responses = stub.StreamingOutputCall(request,
request, test_constants.LONG_TIMEOUT) test_constants.LONG_TIMEOUT)
next(responses) next(responses)
responses.cancel() responses.cancel()
with self.assertRaises(face.CancellationError): with self.assertRaises(face.CancellationError):
@ -353,8 +352,7 @@ class PythonPluginTest(unittest.TestCase):
_streaming_input_request_iterator(), _streaming_input_request_iterator(),
test_constants.LONG_TIMEOUT) test_constants.LONG_TIMEOUT)
expected_response = methods.StreamingInputCall( expected_response = methods.StreamingInputCall(
_streaming_input_request_iterator(), _streaming_input_request_iterator(), 'not a real RpcContext!')
'not a real RpcContext!')
self.assertEqual(expected_response, response) self.assertEqual(expected_response, response)
def testStreamingInputCallFuture(self): def testStreamingInputCallFuture(self):
@ -365,8 +363,7 @@ class PythonPluginTest(unittest.TestCase):
test_constants.LONG_TIMEOUT) test_constants.LONG_TIMEOUT)
response = response_future.result() response = response_future.result()
expected_response = methods.StreamingInputCall( expected_response = methods.StreamingInputCall(
_streaming_input_request_iterator(), _streaming_input_request_iterator(), 'not a real RpcContext!')
'not a real RpcContext!')
self.assertEqual(expected_response, response) self.assertEqual(expected_response, response)
def testStreamingInputCallFutureExpired(self): def testStreamingInputCallFutureExpired(self):
@ -377,8 +374,8 @@ class PythonPluginTest(unittest.TestCase):
test_constants.SHORT_TIMEOUT) test_constants.SHORT_TIMEOUT)
with self.assertRaises(face.ExpirationError): with self.assertRaises(face.ExpirationError):
response_future.result() response_future.result()
self.assertIsInstance( self.assertIsInstance(response_future.exception(),
response_future.exception(), face.ExpirationError) face.ExpirationError)
def testStreamingInputCallFutureCancelled(self): def testStreamingInputCallFutureCancelled(self):
with _CreateService() as (methods, stub): with _CreateService() as (methods, stub):
@ -401,12 +398,10 @@ class PythonPluginTest(unittest.TestCase):
def testFullDuplexCall(self): def testFullDuplexCall(self):
with _CreateService() as (methods, stub): with _CreateService() as (methods, stub):
responses = stub.FullDuplexCall( responses = stub.FullDuplexCall(_full_duplex_request_iterator(),
_full_duplex_request_iterator(),
test_constants.LONG_TIMEOUT) test_constants.LONG_TIMEOUT)
expected_responses = methods.FullDuplexCall( expected_responses = methods.FullDuplexCall(
_full_duplex_request_iterator(), _full_duplex_request_iterator(), 'not a real RpcContext!')
'not a real RpcContext!')
for expected_response, response in moves.zip_longest( for expected_response, response in moves.zip_longest(
expected_responses, responses): expected_responses, responses):
self.assertEqual(expected_response, response) self.assertEqual(expected_response, response)
@ -415,16 +410,16 @@ class PythonPluginTest(unittest.TestCase):
request_iterator = _full_duplex_request_iterator() request_iterator = _full_duplex_request_iterator()
with _CreateService() as (methods, stub): with _CreateService() as (methods, stub):
with methods.pause(): with methods.pause():
responses = stub.FullDuplexCall( responses = stub.FullDuplexCall(request_iterator,
request_iterator, test_constants.SHORT_TIMEOUT) test_constants.SHORT_TIMEOUT)
with self.assertRaises(face.ExpirationError): with self.assertRaises(face.ExpirationError):
list(responses) list(responses)
def testFullDuplexCallCancelled(self): def testFullDuplexCallCancelled(self):
with _CreateService() as (methods, stub): with _CreateService() as (methods, stub):
request_iterator = _full_duplex_request_iterator() request_iterator = _full_duplex_request_iterator()
responses = stub.FullDuplexCall( responses = stub.FullDuplexCall(request_iterator,
request_iterator, test_constants.LONG_TIMEOUT) test_constants.LONG_TIMEOUT)
next(responses) next(responses)
responses.cancel() responses.cancel()
with self.assertRaises(face.CancellationError): with self.assertRaises(face.CancellationError):
@ -434,14 +429,15 @@ class PythonPluginTest(unittest.TestCase):
request_iterator = _full_duplex_request_iterator() request_iterator = _full_duplex_request_iterator()
with _CreateService() as (methods, stub): with _CreateService() as (methods, stub):
with methods.fail(): with methods.fail():
responses = stub.FullDuplexCall( responses = stub.FullDuplexCall(request_iterator,
request_iterator, test_constants.LONG_TIMEOUT) test_constants.LONG_TIMEOUT)
self.assertIsNotNone(responses) self.assertIsNotNone(responses)
with self.assertRaises(face.RemoteError): with self.assertRaises(face.RemoteError):
next(responses) next(responses)
def testHalfDuplexCall(self): def testHalfDuplexCall(self):
with _CreateService() as (methods, stub): with _CreateService() as (methods, stub):
def half_duplex_request_iterator(): def half_duplex_request_iterator():
request = request_pb2.StreamingOutputCallRequest() request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=1, interval_us=0) request.response_parameters.add(size=1, interval_us=0)
@ -450,8 +446,9 @@ class PythonPluginTest(unittest.TestCase):
request.response_parameters.add(size=2, interval_us=0) request.response_parameters.add(size=2, interval_us=0)
request.response_parameters.add(size=3, interval_us=0) request.response_parameters.add(size=3, interval_us=0)
yield request yield request
responses = stub.HalfDuplexCall(
half_duplex_request_iterator(), test_constants.LONG_TIMEOUT) responses = stub.HalfDuplexCall(half_duplex_request_iterator(),
test_constants.LONG_TIMEOUT)
expected_responses = methods.HalfDuplexCall( expected_responses = methods.HalfDuplexCall(
half_duplex_request_iterator(), 'not a real RpcContext!') half_duplex_request_iterator(), 'not a real RpcContext!')
for check in moves.zip_longest(expected_responses, responses): for check in moves.zip_longest(expected_responses, responses):
@ -461,6 +458,7 @@ class PythonPluginTest(unittest.TestCase):
def testHalfDuplexCallWedged(self): def testHalfDuplexCallWedged(self):
condition = threading.Condition() condition = threading.Condition()
wait_cell = [False] wait_cell = [False]
@contextlib.contextmanager @contextlib.contextmanager
def wait(): # pylint: disable=invalid-name def wait(): # pylint: disable=invalid-name
# Where's Python 3's 'nonlocal' statement when you need it? # Where's Python 3's 'nonlocal' statement when you need it?
@ -470,6 +468,7 @@ class PythonPluginTest(unittest.TestCase):
with condition: with condition:
wait_cell[0] = False wait_cell[0] = False
condition.notify_all() condition.notify_all()
def half_duplex_request_iterator(): def half_duplex_request_iterator():
request = request_pb2.StreamingOutputCallRequest() request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=1, interval_us=0) request.response_parameters.add(size=1, interval_us=0)
@ -477,10 +476,11 @@ class PythonPluginTest(unittest.TestCase):
with condition: with condition:
while wait_cell[0]: while wait_cell[0]:
condition.wait() condition.wait()
with _CreateService() as (methods, stub): with _CreateService() as (methods, stub):
with wait(): with wait():
responses = stub.HalfDuplexCall( responses = stub.HalfDuplexCall(half_duplex_request_iterator(),
half_duplex_request_iterator(), test_constants.SHORT_TIMEOUT) test_constants.SHORT_TIMEOUT)
# half-duplex waits for the client to send all info # half-duplex waits for the client to send all info
with self.assertRaises(face.ExpirationError): with self.assertRaises(face.ExpirationError):
next(responses) next(responses)

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save