Merge github.com:grpc/grpc into metadata_filter

reviewable/pr8842/r14
Craig Tiller 8 years ago
commit 5acc826ace
  1. 1
      .gitignore
  2. 3
      BUILD
  3. 7
      CMakeLists.txt
  4. 9
      Makefile
  5. 39
      bazel/BUILD
  6. 1
      bazel/cc_grpc_library.bzl
  7. 4
      bazel/generate_cc.bzl
  8. 1
      build.yaml
  9. 42
      examples/cpp/helloworld/BUILD
  10. 4
      examples/cpp/helloworld/greeter_client.cc
  11. 4
      examples/cpp/helloworld/greeter_server.cc
  12. 52
      examples/protos/BUILD
  13. 2
      gRPC-Core.podspec
  14. 2
      grpc.gemspec
  15. 84
      include/grpc/impl/codegen/gpr_slice.h
  16. 19
      include/grpc/impl/codegen/slice.h
  17. 48
      package.xml
  18. 4
      setup.cfg
  19. 3
      src/core/ext/client_channel/connector.h
  20. 57
      src/core/ext/client_channel/subchannel.c
  21. 13
      src/core/ext/client_channel/subchannel.h
  22. 12
      src/core/ext/client_channel/subchannel_index.c
  23. 12
      src/core/ext/lb_policy/pick_first/pick_first.c
  24. 12
      src/core/ext/lb_policy/round_robin/round_robin.c
  25. 7
      src/core/ext/transport/chttp2/client/chttp2_connector.c
  26. 7
      src/php/README.md
  27. 30
      src/proto/grpc/testing/BUILD
  28. 30
      src/proto/grpc/testing/duplicate/BUILD
  29. 46
      src/python/grpcio/_spawn_patch.py
  30. 352
      src/python/grpcio/commands.py
  31. 744
      src/python/grpcio/grpc/__init__.py
  32. 83
      src/python/grpcio/grpc/_auth.py
  33. 1529
      src/python/grpcio/grpc/_channel.py
  34. 123
      src/python/grpcio/grpc/_common.py
  35. 18
      src/python/grpcio/grpc/_credential_composition.py
  36. 123
      src/python/grpcio/grpc/_plugin_wrapping.py
  37. 1096
      src/python/grpcio/grpc/_server.py
  38. 235
      src/python/grpcio/grpc/_utilities.py
  39. 1028
      src/python/grpcio/grpc/beta/_client_adaptations.py
  40. 213
      src/python/grpcio/grpc/beta/_connectivity_channel.py
  41. 544
      src/python/grpcio/grpc/beta/_server_adaptations.py
  42. 173
      src/python/grpcio/grpc/beta/implementations.py
  43. 78
      src/python/grpcio/grpc/beta/interfaces.py
  44. 202
      src/python/grpcio/grpc/beta/utilities.py
  45. 2
      src/python/grpcio/grpc/framework/__init__.py
  46. 2
      src/python/grpcio/grpc/framework/common/__init__.py
  47. 11
      src/python/grpcio/grpc/framework/common/cardinality.py
  48. 7
      src/python/grpcio/grpc/framework/common/style.py
  49. 2
      src/python/grpcio/grpc/framework/foundation/__init__.py
  50. 3
      src/python/grpcio/grpc/framework/foundation/abandonment.py
  51. 47
      src/python/grpcio/grpc/framework/foundation/callable_util.py
  52. 129
      src/python/grpcio/grpc/framework/foundation/future.py
  53. 55
      src/python/grpcio/grpc/framework/foundation/logging_pool.py
  54. 28
      src/python/grpcio/grpc/framework/foundation/stream.py
  55. 227
      src/python/grpcio/grpc/framework/foundation/stream_util.py
  56. 2
      src/python/grpcio/grpc/framework/interfaces/__init__.py
  57. 2
      src/python/grpcio/grpc/framework/interfaces/base/__init__.py
  58. 184
      src/python/grpcio/grpc/framework/interfaces/base/base.py
  59. 42
      src/python/grpcio/grpc/framework/interfaces/base/utilities.py
  60. 2
      src/python/grpcio/grpc/framework/interfaces/face/__init__.py
  61. 610
      src/python/grpcio/grpc/framework/interfaces/face/face.py
  62. 87
      src/python/grpcio/grpc/framework/interfaces/face/utilities.py
  63. 103
      src/python/grpcio/support.py
  64. 2
      src/python/grpcio_health_checking/grpc_health/__init__.py
  65. 2
      src/python/grpcio_health_checking/grpc_health/v1/__init__.py
  66. 33
      src/python/grpcio_health_checking/grpc_health/v1/health.py
  67. 53
      src/python/grpcio_health_checking/health_commands.py
  68. 14
      src/python/grpcio_health_checking/setup.py
  69. 1
      src/python/grpcio_reflection/grpc_reflection/__init__.py
  70. 1
      src/python/grpcio_reflection/grpc_reflection/v1alpha/__init__.py
  71. 168
      src/python/grpcio_reflection/grpc_reflection/v1alpha/reflection.py
  72. 57
      src/python/grpcio_reflection/reflection_commands.py
  73. 14
      src/python/grpcio_reflection/setup.py
  74. 266
      src/python/grpcio_tests/commands.py
  75. 42
      src/python/grpcio_tests/setup.py
  76. 89
      src/python/grpcio_tests/tests/_loader.py
  77. 618
      src/python/grpcio_tests/tests/_result.py
  78. 273
      src/python/grpcio_tests/tests/_runner.py
  79. 87
      src/python/grpcio_tests/tests/health_check/_health_servicer_test.py
  80. 175
      src/python/grpcio_tests/tests/http2/_negative_http2_client.py
  81. 2
      src/python/grpcio_tests/tests/interop/__init__.py
  82. 24
      src/python/grpcio_tests/tests/interop/_insecure_intraop_test.py
  83. 38
      src/python/grpcio_tests/tests/interop/_intraop_test_case.py
  84. 35
      src/python/grpcio_tests/tests/interop/_secure_intraop_test.py
  85. 163
      src/python/grpcio_tests/tests/interop/client.py
  86. 759
      src/python/grpcio_tests/tests/interop/methods.py
  87. 21
      src/python/grpcio_tests/tests/interop/resources.py
  88. 57
      src/python/grpcio_tests/tests/interop/server.py
  89. 2
      src/python/grpcio_tests/tests/protoc_plugin/__init__.py
  90. 754
      src/python/grpcio_tests/tests/protoc_plugin/_python_plugin_test.py
  91. 446
      src/python/grpcio_tests/tests/protoc_plugin/_split_definitions_test.py
  92. 716
      src/python/grpcio_tests/tests/protoc_plugin/beta_python_plugin_test.py
  93. 2
      src/python/grpcio_tests/tests/protoc_plugin/protos/__init__.py
  94. 2
      src/python/grpcio_tests/tests/protoc_plugin/protos/invocation_testing/__init__.py
  95. 2
      src/python/grpcio_tests/tests/protoc_plugin/protos/invocation_testing/split_messages/__init__.py
  96. 2
      src/python/grpcio_tests/tests/protoc_plugin/protos/invocation_testing/split_services/__init__.py
  97. 2
      src/python/grpcio_tests/tests/protoc_plugin/protos/payload/__init__.py
  98. 2
      src/python/grpcio_tests/tests/protoc_plugin/protos/requests/__init__.py
  99. 2
      src/python/grpcio_tests/tests/protoc_plugin/protos/requests/r/__init__.py
  100. 2
      src/python/grpcio_tests/tests/protoc_plugin/protos/responses/__init__.py
  101. Some files were not shown because too many files have changed in this diff Show More

1
.gitignore vendored

@ -8,6 +8,7 @@ objs
# Python items
cython_debug/
python_build/
python_format_venv/
.coverage*
.eggs
htmlcov/

@ -401,6 +401,7 @@ grpc_cc_library(
"include/grpc/impl/codegen/atm_gcc_atomic.h",
"include/grpc/impl/codegen/atm_gcc_sync.h",
"include/grpc/impl/codegen/atm_windows.h",
"include/grpc/impl/codegen/gpr_slice.h",
"include/grpc/impl/codegen/gpr_types.h",
"include/grpc/impl/codegen/port_platform.h",
"include/grpc/impl/codegen/slice.h",
@ -1083,6 +1084,7 @@ grpc_cc_library(
"src/cpp/common/completion_queue_cc.cc",
"src/cpp/common/core_codegen.cc",
"src/cpp/common/rpc_method.cc",
"src/cpp/common/version_cc.cc",
"src/cpp/server/async_generic_service.cc",
"src/cpp/server/create_default_thread_pool.cc",
"src/cpp/server/dynamic_thread_pool.cc",
@ -1091,6 +1093,7 @@ grpc_cc_library(
"src/cpp/server/server_context.cc",
"src/cpp/server/server_credentials.cc",
"src/cpp/server/server_posix.cc",
"src/cpp/thread_manager/thread_manager.cc",
"src/cpp/util/byte_buffer_cc.cc",
"src/cpp/util/slice_cc.cc",
"src/cpp/util/status.cc",

@ -264,6 +264,7 @@ foreach(_hdr
include/grpc/impl/codegen/atm_gcc_atomic.h
include/grpc/impl/codegen/atm_gcc_sync.h
include/grpc/impl/codegen/atm_windows.h
include/grpc/impl/codegen/gpr_slice.h
include/grpc/impl/codegen/gpr_types.h
include/grpc/impl/codegen/port_platform.h
include/grpc/impl/codegen/slice.h
@ -548,6 +549,7 @@ foreach(_hdr
include/grpc/impl/codegen/atm_gcc_atomic.h
include/grpc/impl/codegen/atm_gcc_sync.h
include/grpc/impl/codegen/atm_windows.h
include/grpc/impl/codegen/gpr_slice.h
include/grpc/impl/codegen/gpr_types.h
include/grpc/impl/codegen/port_platform.h
include/grpc/impl/codegen/slice.h
@ -804,6 +806,7 @@ foreach(_hdr
include/grpc/impl/codegen/atm_gcc_atomic.h
include/grpc/impl/codegen/atm_gcc_sync.h
include/grpc/impl/codegen/atm_windows.h
include/grpc/impl/codegen/gpr_slice.h
include/grpc/impl/codegen/gpr_types.h
include/grpc/impl/codegen/port_platform.h
include/grpc/impl/codegen/slice.h
@ -1060,6 +1063,7 @@ foreach(_hdr
include/grpc/impl/codegen/atm_gcc_atomic.h
include/grpc/impl/codegen/atm_gcc_sync.h
include/grpc/impl/codegen/atm_windows.h
include/grpc/impl/codegen/gpr_slice.h
include/grpc/impl/codegen/gpr_types.h
include/grpc/impl/codegen/port_platform.h
include/grpc/impl/codegen/slice.h
@ -1225,6 +1229,7 @@ foreach(_hdr
include/grpc/impl/codegen/atm_gcc_atomic.h
include/grpc/impl/codegen/atm_gcc_sync.h
include/grpc/impl/codegen/atm_windows.h
include/grpc/impl/codegen/gpr_slice.h
include/grpc/impl/codegen/gpr_types.h
include/grpc/impl/codegen/port_platform.h
include/grpc/impl/codegen/slice.h
@ -1564,6 +1569,7 @@ foreach(_hdr
include/grpc/impl/codegen/atm_gcc_atomic.h
include/grpc/impl/codegen/atm_gcc_sync.h
include/grpc/impl/codegen/atm_windows.h
include/grpc/impl/codegen/gpr_slice.h
include/grpc/impl/codegen/gpr_types.h
include/grpc/impl/codegen/port_platform.h
include/grpc/impl/codegen/slice.h
@ -1733,6 +1739,7 @@ foreach(_hdr
include/grpc/impl/codegen/atm_gcc_atomic.h
include/grpc/impl/codegen/atm_gcc_sync.h
include/grpc/impl/codegen/atm_windows.h
include/grpc/impl/codegen/gpr_slice.h
include/grpc/impl/codegen/gpr_types.h
include/grpc/impl/codegen/port_platform.h
include/grpc/impl/codegen/slice.h

@ -2539,6 +2539,7 @@ PUBLIC_HEADERS_C += \
include/grpc/impl/codegen/atm_gcc_atomic.h \
include/grpc/impl/codegen/atm_gcc_sync.h \
include/grpc/impl/codegen/atm_windows.h \
include/grpc/impl/codegen/gpr_slice.h \
include/grpc/impl/codegen/gpr_types.h \
include/grpc/impl/codegen/port_platform.h \
include/grpc/impl/codegen/slice.h \
@ -2851,6 +2852,7 @@ PUBLIC_HEADERS_C += \
include/grpc/impl/codegen/atm_gcc_atomic.h \
include/grpc/impl/codegen/atm_gcc_sync.h \
include/grpc/impl/codegen/atm_windows.h \
include/grpc/impl/codegen/gpr_slice.h \
include/grpc/impl/codegen/gpr_types.h \
include/grpc/impl/codegen/port_platform.h \
include/grpc/impl/codegen/slice.h \
@ -3126,6 +3128,7 @@ PUBLIC_HEADERS_C += \
include/grpc/impl/codegen/atm_gcc_atomic.h \
include/grpc/impl/codegen/atm_gcc_sync.h \
include/grpc/impl/codegen/atm_windows.h \
include/grpc/impl/codegen/gpr_slice.h \
include/grpc/impl/codegen/gpr_types.h \
include/grpc/impl/codegen/port_platform.h \
include/grpc/impl/codegen/slice.h \
@ -3348,6 +3351,7 @@ PUBLIC_HEADERS_C += \
include/grpc/impl/codegen/atm_gcc_atomic.h \
include/grpc/impl/codegen/atm_gcc_sync.h \
include/grpc/impl/codegen/atm_windows.h \
include/grpc/impl/codegen/gpr_slice.h \
include/grpc/impl/codegen/gpr_types.h \
include/grpc/impl/codegen/port_platform.h \
include/grpc/impl/codegen/slice.h \
@ -3643,6 +3647,7 @@ PUBLIC_HEADERS_C += \
include/grpc/impl/codegen/atm_gcc_atomic.h \
include/grpc/impl/codegen/atm_gcc_sync.h \
include/grpc/impl/codegen/atm_windows.h \
include/grpc/impl/codegen/gpr_slice.h \
include/grpc/impl/codegen/gpr_types.h \
include/grpc/impl/codegen/port_platform.h \
include/grpc/impl/codegen/slice.h \
@ -3889,6 +3894,7 @@ PUBLIC_HEADERS_CXX += \
include/grpc/impl/codegen/atm_gcc_atomic.h \
include/grpc/impl/codegen/atm_gcc_sync.h \
include/grpc/impl/codegen/atm_windows.h \
include/grpc/impl/codegen/gpr_slice.h \
include/grpc/impl/codegen/gpr_types.h \
include/grpc/impl/codegen/port_platform.h \
include/grpc/impl/codegen/slice.h \
@ -4257,6 +4263,7 @@ PUBLIC_HEADERS_CXX += \
include/grpc/impl/codegen/atm_gcc_atomic.h \
include/grpc/impl/codegen/atm_gcc_sync.h \
include/grpc/impl/codegen/atm_windows.h \
include/grpc/impl/codegen/gpr_slice.h \
include/grpc/impl/codegen/gpr_types.h \
include/grpc/impl/codegen/port_platform.h \
include/grpc/impl/codegen/slice.h \
@ -4614,6 +4621,7 @@ PUBLIC_HEADERS_CXX += \
include/grpc/impl/codegen/atm_gcc_atomic.h \
include/grpc/impl/codegen/atm_gcc_sync.h \
include/grpc/impl/codegen/atm_windows.h \
include/grpc/impl/codegen/gpr_slice.h \
include/grpc/impl/codegen/gpr_types.h \
include/grpc/impl/codegen/port_platform.h \
include/grpc/impl/codegen/slice.h \
@ -4794,6 +4802,7 @@ PUBLIC_HEADERS_CXX += \
include/grpc/impl/codegen/atm_gcc_atomic.h \
include/grpc/impl/codegen/atm_gcc_sync.h \
include/grpc/impl/codegen/atm_windows.h \
include/grpc/impl/codegen/gpr_slice.h \
include/grpc/impl/codegen/gpr_types.h \
include/grpc/impl/codegen/port_platform.h \
include/grpc/impl/codegen/slice.h \

@ -1,9 +1,46 @@
# Copyright 2017, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
licenses(["notice"]) # 3-clause BSD
package(default_visibility = ["//:__subpackages__"])
load(":cc_grpc_library.bzl", "cc_grpc_library")
proto_library(
name = "well_known_protos_list",
srcs = ["@submodule_protobuf//:well_known_protos"],
)
cc_grpc_library(
name = "well_known_protos",
srcs = "@submodule_protobuf//:well_known_protos",
srcs = "well_known_protos_list",
deps = [],
proto_only = True,
)

@ -44,7 +44,6 @@ def cc_grpc_library(name, srcs, deps, proto_only, **kwargs):
**kwargs
)
if not proto_only:
native.cc_library(
name = name,
srcs = [":" + codegen_grpc_target, ":" + codegen_target],

@ -24,13 +24,15 @@ def generate_cc_impl(ctx):
if ctx.executable.plugin:
arguments += ["--plugin=protoc-gen-PLUGIN=" + ctx.executable.plugin.path]
arguments += ["--PLUGIN_out=" + ",".join(ctx.attr.flags) + ":" + dir_out]
additional_input = [ctx.executable.plugin]
else:
arguments += ["--cpp_out=" + ",".join(ctx.attr.flags) + ":" + dir_out]
additional_input = []
arguments += ["-I{0}={0}".format(include.path) for include in includes]
arguments += [proto.path for proto in protos]
ctx.action(
inputs = protos + includes,
inputs = protos + includes + additional_input,
outputs = out_files,
executable = ctx.executable._protoc,
arguments = arguments,

@ -144,6 +144,7 @@ filegroups:
- include/grpc/impl/codegen/atm_gcc_atomic.h
- include/grpc/impl/codegen/atm_gcc_sync.h
- include/grpc/impl/codegen/atm_windows.h
- include/grpc/impl/codegen/gpr_slice.h
- include/grpc/impl/codegen/gpr_types.h
- include/grpc/impl/codegen/port_platform.h
- include/grpc/impl/codegen/slice.h

@ -0,0 +1,42 @@
# Copyright 2017, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
cc_binary(
name = "greeter_client",
srcs = ["greeter_client.cc"],
deps = ["//examples/protos:helloworld"],
defines = ["BAZEL_BUILD"],
)
cc_binary(
name = "greeter_server",
srcs = ["greeter_server.cc"],
deps = ["//examples/protos:helloworld"],
defines = ["BAZEL_BUILD"],
)

@ -37,7 +37,11 @@
#include <grpc++/grpc++.h>
#ifdef BAZEL_BUILD
#include "examples/protos/helloworld.grpc.pb.h"
#else
#include "helloworld.grpc.pb.h"
#endif
using grpc::Channel;
using grpc::ClientContext;

@ -37,7 +37,11 @@
#include <grpc++/grpc++.h>
#ifdef BAZEL_BUILD
#include "examples/protos/helloworld.grpc.pb.h"
#else
#include "helloworld.grpc.pb.h"
#endif
using grpc::Server;
using grpc::ServerBuilder;

@ -0,0 +1,52 @@
# Copyright 2017, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package(default_visibility = ["//visibility:public"])
load("//bazel:grpc_build_system.bzl", "grpc_proto_library")
grpc_proto_library(
name = "auth_sample",
srcs = ["auth_sample.proto"],
)
grpc_proto_library(
name = "hellostreamingworld",
srcs = ["hellostreamingworld.proto"],
)
grpc_proto_library(
name = "helloworld",
srcs = ["helloworld.proto"],
)
grpc_proto_library(
name = "route_guide",
srcs = ["route_guide.proto"],
)

@ -148,6 +148,7 @@ Pod::Spec.new do |s|
'include/grpc/impl/codegen/atm_gcc_atomic.h',
'include/grpc/impl/codegen/atm_gcc_sync.h',
'include/grpc/impl/codegen/atm_windows.h',
'include/grpc/impl/codegen/gpr_slice.h',
'include/grpc/impl/codegen/gpr_types.h',
'include/grpc/impl/codegen/port_platform.h',
'include/grpc/impl/codegen/slice.h',
@ -175,6 +176,7 @@ Pod::Spec.new do |s|
'include/grpc/impl/codegen/atm_gcc_atomic.h',
'include/grpc/impl/codegen/atm_gcc_sync.h',
'include/grpc/impl/codegen/atm_windows.h',
'include/grpc/impl/codegen/gpr_slice.h',
'include/grpc/impl/codegen/gpr_types.h',
'include/grpc/impl/codegen/port_platform.h',
'include/grpc/impl/codegen/slice.h',

@ -73,6 +73,7 @@ Gem::Specification.new do |s|
s.files += %w( include/grpc/impl/codegen/atm_gcc_atomic.h )
s.files += %w( include/grpc/impl/codegen/atm_gcc_sync.h )
s.files += %w( include/grpc/impl/codegen/atm_windows.h )
s.files += %w( include/grpc/impl/codegen/gpr_slice.h )
s.files += %w( include/grpc/impl/codegen/gpr_types.h )
s.files += %w( include/grpc/impl/codegen/port_platform.h )
s.files += %w( include/grpc/impl/codegen/slice.h )
@ -156,6 +157,7 @@ Gem::Specification.new do |s|
s.files += %w( include/grpc/impl/codegen/atm_gcc_atomic.h )
s.files += %w( include/grpc/impl/codegen/atm_gcc_sync.h )
s.files += %w( include/grpc/impl/codegen/atm_windows.h )
s.files += %w( include/grpc/impl/codegen/gpr_slice.h )
s.files += %w( include/grpc/impl/codegen/gpr_types.h )
s.files += %w( include/grpc/impl/codegen/port_platform.h )
s.files += %w( include/grpc/impl/codegen/slice.h )

@ -0,0 +1,84 @@
/*
*
* Copyright 2016, Google Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#ifndef GRPC_IMPL_CODEGEN_GPR_SLICE_H
#define GRPC_IMPL_CODEGEN_GPR_SLICE_H
/* WARNING: Please do not use this header. This was added as a temporary measure
* to not break some of the external projects that depend on gpr_slice_*
* functions. We are actively working on moving all the gpr_slice_* references
* to grpc_slice_* and this file will be removed
* */
/* TODO (sreek) - Allowed by default but will be very soon turned off */
#define GRPC_ALLOW_GPR_SLICE_FUNCTIONS 1
#ifdef GRPC_ALLOW_GPR_SLICE_FUNCTIONS
#define gpr_slice_refcount grpc_slice_refcount
#define gpr_slice grpc_slice
#define gpr_slice_buffer grpc_slice_buffer
#define gpr_slice_ref grpc_slice_ref
#define gpr_slice_unref grpc_slice_unref
#define gpr_slice_new grpc_slice_new
#define gpr_slice_new_with_user_data grpc_slice_new_with_user_data
#define gpr_slice_new_with_len grpc_slice_new_with_len
#define gpr_slice_malloc grpc_slice_malloc
#define gpr_slice_from_copied_string grpc_slice_from_copied_string
#define gpr_slice_from_copied_buffer grpc_slice_from_copied_buffer
#define gpr_slice_from_static_string grpc_slice_from_static_string
#define gpr_slice_sub grpc_slice_sub
#define gpr_slice_sub_no_ref grpc_slice_sub_no_ref
#define gpr_slice_split_tail grpc_slice_split_tail
#define gpr_slice_split_head grpc_slice_split_head
#define gpr_slice_cmp grpc_slice_cmp
#define gpr_slice_str_cmp grpc_slice_str_cmp
#define gpr_slice_buffer grpc_slice_buffer
#define gpr_slice_buffer_init grpc_slice_buffer_init
#define gpr_slice_buffer_destroy grpc_slice_buffer_destroy
#define gpr_slice_buffer_add grpc_slice_buffer_add
#define gpr_slice_buffer_add_indexed grpc_slice_buffer_add_indexed
#define gpr_slice_buffer_addn grpc_slice_buffer_addn
#define gpr_slice_buffer_tiny_add grpc_slice_buffer_tiny_add
#define gpr_slice_buffer_pop grpc_slice_buffer_pop
#define gpr_slice_buffer_reset_and_unref grpc_slice_buffer_reset_and_unref
#define gpr_slice_buffer_swap grpc_slice_buffer_swap
#define gpr_slice_buffer_move_into grpc_slice_buffer_move_into
#define gpr_slice_buffer_trim_end grpc_slice_buffer_trim_end
#define gpr_slice_buffer_move_first grpc_slice_buffer_move_first
#define gpr_slice_buffer_take_first grpc_slice_buffer_take_first
#endif /* GRPC_ALLOW_GPR_SLICE_FUNCTIONS */
#endif /* GRPC_IMPL_CODEGEN_GPR_SLICE_H */

@ -38,6 +38,7 @@
#include <stdint.h>
#include <grpc/impl/codegen/exec_ctx_fwd.h>
#include <grpc/impl/codegen/gpr_slice.h>
typedef struct grpc_slice grpc_slice;
@ -130,4 +131,22 @@ typedef struct {
GRPC_SLICE_START_PTR(slice) + GRPC_SLICE_LENGTH(slice)
#define GRPC_SLICE_IS_EMPTY(slice) (GRPC_SLICE_LENGTH(slice) == 0)
#ifdef GRPC_ALLOW_GPR_SLICE_FUNCTIONS
/* Duplicate GPR_* definitions */
#define GPR_SLICE_START_PTR(slice) \
((slice).refcount ? (slice).data.refcounted.bytes \
: (slice).data.inlined.bytes)
#define GPR_SLICE_LENGTH(slice) \
((slice).refcount ? (slice).data.refcounted.length \
: (slice).data.inlined.length)
#define GPR_SLICE_SET_LENGTH(slice, newlen) \
((slice).refcount ? ((slice).data.refcounted.length = (size_t)(newlen)) \
: ((slice).data.inlined.length = (uint8_t)(newlen)))
#define GPR_SLICE_END_PTR(slice) \
GRPC_SLICE_START_PTR(slice) + GRPC_SLICE_LENGTH(slice)
#define GPR_SLICE_IS_EMPTY(slice) (GRPC_SLICE_LENGTH(slice) == 0)
#endif /* GRPC_ALLOW_GPR_SLICE_FUNCTIONS */
#endif /* GRPC_IMPL_CODEGEN_SLICE_H */

@ -10,19 +10,20 @@
<email>grpc-packages@google.com</email>
<active>yes</active>
</lead>
<date>2016-08-22</date>
<date>2017-01-13</date>
<time>16:06:07</time>
<version>
<release>1.1.0dev</release>
<api>1.1.0dev</api>
</version>
<stability>
<release>stable</release>
<api>stable</api>
<release>beta</release>
<api>beta</api>
</stability>
<license>BSD</license>
<notes>
- Reject metadata keys which are not legal #7881
- PHP Proto3 adoption #8179
- Various bug fixes
</notes>
<contents>
<dir baseinstalldir="/" name="/">
@ -81,6 +82,7 @@
<file baseinstalldir="/" name="include/grpc/impl/codegen/atm_gcc_atomic.h" role="src" />
<file baseinstalldir="/" name="include/grpc/impl/codegen/atm_gcc_sync.h" role="src" />
<file baseinstalldir="/" name="include/grpc/impl/codegen/atm_windows.h" role="src" />
<file baseinstalldir="/" name="include/grpc/impl/codegen/gpr_slice.h" role="src" />
<file baseinstalldir="/" name="include/grpc/impl/codegen/gpr_types.h" role="src" />
<file baseinstalldir="/" name="include/grpc/impl/codegen/port_platform.h" role="src" />
<file baseinstalldir="/" name="include/grpc/impl/codegen/slice.h" role="src" />
@ -164,6 +166,7 @@
<file baseinstalldir="/" name="include/grpc/impl/codegen/atm_gcc_atomic.h" role="src" />
<file baseinstalldir="/" name="include/grpc/impl/codegen/atm_gcc_sync.h" role="src" />
<file baseinstalldir="/" name="include/grpc/impl/codegen/atm_windows.h" role="src" />
<file baseinstalldir="/" name="include/grpc/impl/codegen/gpr_slice.h" role="src" />
<file baseinstalldir="/" name="include/grpc/impl/codegen/gpr_types.h" role="src" />
<file baseinstalldir="/" name="include/grpc/impl/codegen/port_platform.h" role="src" />
<file baseinstalldir="/" name="include/grpc/impl/codegen/slice.h" role="src" />
@ -1220,18 +1223,49 @@ Update to wrap gRPC C Core version 0.10.0
</release>
<release>
<version>
<release>1.1.0dev</release>
<api>1.1.0dev</api>
<release>1.0.1RC1</release>
<api>1.0.1RC1</api>
</version>
<stability>
<release>beta</release>
<api>beta</api>
</stability>
<date>2016-10-06</date>
<license>BSD</license>
<notes>
- Reject metadata keys which are not legal #7881
</notes>
</release>
<release>
<version>
<release>1.0.1</release>
<api>1.0.1</api>
</version>
<stability>
<release>stable</release>
<api>stable</api>
</stability>
<date>2016-08-22</date>
<date>2016-10-27</date>
<license>BSD</license>
<notes>
- Reject metadata keys which are not legal #7881
</notes>
</release>
<release>
<version>
<release>1.1.0dev</release>
<api>1.1.0dev</api>
</version>
<stability>
<release>beta</release>
<api>beta</api>
</stability>
<date>2017-01-13</date>
<license>BSD</license>
<notes>
- PHP Proto3 adoption #8179
- Various bug fixes
</notes>
</release>
</changelog>
</package>

@ -11,3 +11,7 @@ inplace=1
[build_package_protos]
exclude=.*protoc_plugin/protoc_plugin_test\.proto$
# Style settings
[yapf]
based_on_style = google

@ -48,9 +48,6 @@ struct grpc_connector {
typedef struct {
/** set of pollsets interested in this connection */
grpc_pollset_set *interested_parties;
/** address to connect to */
const grpc_resolved_address *addr;
size_t addr_len;
/** initial connect string to send */
grpc_slice initial_connect_string;
/** deadline for connection */

@ -38,12 +38,16 @@
#include <grpc/support/alloc.h>
#include <grpc/support/avl.h>
#include <grpc/support/string_util.h>
#include "src/core/ext/client_channel/client_channel.h"
#include "src/core/ext/client_channel/initial_connect_string.h"
#include "src/core/ext/client_channel/parse_address.h"
#include "src/core/ext/client_channel/subchannel_index.h"
#include "src/core/ext/client_channel/uri_parser.h"
#include "src/core/lib/channel/channel_args.h"
#include "src/core/lib/channel/connected_channel.h"
#include "src/core/lib/iomgr/sockaddr_utils.h"
#include "src/core/lib/iomgr/timer.h"
#include "src/core/lib/profiling/timers.h"
#include "src/core/lib/slice/slice_internal.h"
@ -95,8 +99,6 @@ struct grpc_subchannel {
size_t num_filters;
/** channel arguments */
grpc_channel_args *args;
/** address to connect to */
grpc_resolved_address *addr;
grpc_subchannel_key *key;
@ -211,7 +213,6 @@ static void subchannel_destroy(grpc_exec_ctx *exec_ctx, void *arg,
grpc_subchannel *c = arg;
gpr_free((void *)c->filters);
grpc_channel_args_destroy(exec_ctx, c->args);
gpr_free(c->addr);
grpc_slice_unref_internal(exec_ctx, c->initial_connect_string);
grpc_connectivity_state_destroy(exec_ctx, &c->state_tracker);
grpc_connector_unref(exec_ctx, c->connector);
@ -327,12 +328,17 @@ grpc_subchannel *grpc_subchannel_create(grpc_exec_ctx *exec_ctx,
} else {
c->filters = NULL;
}
c->addr = gpr_malloc(sizeof(grpc_resolved_address));
if (args->addr->len)
memcpy(c->addr, args->addr, sizeof(grpc_resolved_address));
c->pollset_set = grpc_pollset_set_create();
grpc_set_initial_connect_string(&c->addr, &c->initial_connect_string);
c->args = grpc_channel_args_copy(args->args);
grpc_resolved_address *addr = gpr_malloc(sizeof(*addr));
grpc_get_subchannel_address_arg(args->args, addr);
grpc_set_initial_connect_string(&addr, &c->initial_connect_string);
static const char *keys_to_remove[] = {GRPC_ARG_SUBCHANNEL_ADDRESS};
grpc_arg new_arg = grpc_create_subchannel_address_arg(addr);
gpr_free(addr);
c->args = grpc_channel_args_copy_and_add_and_remove(
args->args, keys_to_remove, GPR_ARRAY_SIZE(keys_to_remove), &new_arg, 1);
gpr_free(new_arg.value.string);
c->root_external_state_watcher.next = c->root_external_state_watcher.prev =
&c->root_external_state_watcher;
grpc_closure_init(&c->connected, subchannel_connected, c,
@ -385,7 +391,6 @@ static void continue_connect_locked(grpc_exec_ctx *exec_ctx,
grpc_connect_in_args args;
args.interested_parties = c->pollset_set;
args.addr = c->addr;
args.deadline = c->next_attempt;
args.channel_args = c->args;
args.initial_connect_string = c->initial_connect_string;
@ -769,3 +774,37 @@ grpc_call_stack *grpc_subchannel_call_get_call_stack(
grpc_subchannel_call *subchannel_call) {
return SUBCHANNEL_CALL_TO_CALL_STACK(subchannel_call);
}
static void grpc_uri_to_sockaddr(char *uri_str, grpc_resolved_address *addr) {
grpc_uri *uri = grpc_uri_parse(uri_str, 0 /* suppress_errors */);
GPR_ASSERT(uri != NULL);
if (strcmp(uri->scheme, "ipv4") == 0) {
GPR_ASSERT(parse_ipv4(uri, addr));
} else if (strcmp(uri->scheme, "ipv6") == 0) {
GPR_ASSERT(parse_ipv6(uri, addr));
} else {
GPR_ASSERT(parse_unix(uri, addr));
}
grpc_uri_destroy(uri);
}
void grpc_get_subchannel_address_arg(const grpc_channel_args *args,
grpc_resolved_address *addr) {
const grpc_arg *addr_arg =
grpc_channel_args_find(args, GRPC_ARG_SUBCHANNEL_ADDRESS);
GPR_ASSERT(addr_arg != NULL); // Should have been set by LB policy.
GPR_ASSERT(addr_arg->type == GRPC_ARG_STRING);
memset(addr, 0, sizeof(*addr));
if (*addr_arg->value.string != '\0') {
grpc_uri_to_sockaddr(addr_arg->value.string, addr);
}
}
grpc_arg grpc_create_subchannel_address_arg(const grpc_resolved_address *addr) {
grpc_arg new_arg;
new_arg.key = GRPC_ARG_SUBCHANNEL_ADDRESS;
new_arg.type = GRPC_ARG_STRING;
new_arg.value.string =
addr->len > 0 ? grpc_sockaddr_to_uri(addr) : gpr_strdup("");
return new_arg;
}

@ -40,6 +40,9 @@
#include "src/core/lib/transport/connectivity_state.h"
#include "src/core/lib/transport/metadata.h"
// Channel arg containing a grpc_resolved_address to connect to.
#define GRPC_ARG_SUBCHANNEL_ADDRESS "grpc.subchannel_address"
/** A (sub-)channel that knows how to connect to exactly one target
address. Provides a target for load balancing. */
typedef struct grpc_subchannel grpc_subchannel;
@ -164,8 +167,6 @@ struct grpc_subchannel_args {
size_t filter_count;
/** Channel arguments to be supplied to the newly created channel */
const grpc_channel_args *args;
/** Address to connect to */
grpc_resolved_address *addr;
};
/** create a subchannel given a connector */
@ -173,4 +174,12 @@ grpc_subchannel *grpc_subchannel_create(grpc_exec_ctx *exec_ctx,
grpc_connector *connector,
const grpc_subchannel_args *args);
/// Sets \a addr from \a args.
void grpc_get_subchannel_address_arg(const grpc_channel_args *args,
grpc_resolved_address *addr);
/// Returns a new channel arg encoding the subchannel address as a string.
/// Caller is responsible for freeing the string.
grpc_arg grpc_create_subchannel_address_arg(const grpc_resolved_address *addr);
#endif /* GRPC_CORE_EXT_CLIENT_CHANNEL_SUBCHANNEL_H */

@ -86,11 +86,6 @@ static grpc_subchannel_key *create_key(
} else {
k->args.filters = NULL;
}
k->args.addr = gpr_malloc(sizeof(grpc_resolved_address));
k->args.addr->len = args->addr->len;
if (k->args.addr->len > 0) {
memcpy(k->args.addr, args->addr, sizeof(grpc_resolved_address));
}
k->args.args = copy_channel_args(args->args);
return k;
}
@ -108,14 +103,8 @@ static int subchannel_key_compare(grpc_subchannel_key *a,
grpc_subchannel_key *b) {
int c = GPR_ICMP(a->connector, b->connector);
if (c != 0) return c;
c = GPR_ICMP(a->args.addr->len, b->args.addr->len);
if (c != 0) return c;
c = GPR_ICMP(a->args.filter_count, b->args.filter_count);
if (c != 0) return c;
if (a->args.addr->len) {
c = memcmp(a->args.addr->addr, b->args.addr->addr, a->args.addr->len);
if (c != 0) return c;
}
if (a->args.filter_count > 0) {
c = memcmp(a->args.filters, b->args.filters,
a->args.filter_count * sizeof(*a->args.filters));
@ -129,7 +118,6 @@ void grpc_subchannel_key_destroy(grpc_exec_ctx *exec_ctx,
grpc_connector_unref(exec_ctx, k->connector);
gpr_free((grpc_channel_args *)k->args.filters);
grpc_channel_args_destroy(exec_ctx, (grpc_channel_args *)k->args.args);
gpr_free(k->args.addr);
gpr_free(k);
}

@ -36,7 +36,9 @@
#include <grpc/support/alloc.h>
#include "src/core/ext/client_channel/lb_policy_registry.h"
#include "src/core/ext/client_channel/subchannel.h"
#include "src/core/lib/channel/channel_args.h"
#include "src/core/lib/iomgr/sockaddr_utils.h"
#include "src/core/lib/transport/connectivity_state.h"
typedef struct pending_pick {
@ -466,11 +468,15 @@ static grpc_lb_policy *create_pick_first(grpc_exec_ctx *exec_ctx,
}
memset(&sc_args, 0, sizeof(grpc_subchannel_args));
sc_args.addr = &addresses->addresses[i].address;
sc_args.args = args->args;
grpc_arg addr_arg =
grpc_create_subchannel_address_arg(&addresses->addresses[i].address);
grpc_channel_args *new_args =
grpc_channel_args_copy_and_add(args->args, &addr_arg, 1);
gpr_free(addr_arg.value.string);
sc_args.args = new_args;
grpc_subchannel *subchannel = grpc_client_channel_factory_create_subchannel(
exec_ctx, args->client_channel_factory, &sc_args);
grpc_channel_args_destroy(exec_ctx, new_args);
if (subchannel != NULL) {
p->subchannels[subchannel_idx++] = subchannel;

@ -64,8 +64,10 @@
#include <grpc/support/alloc.h>
#include "src/core/ext/client_channel/lb_policy_registry.h"
#include "src/core/ext/client_channel/subchannel.h"
#include "src/core/lib/channel/channel_args.h"
#include "src/core/lib/debug/trace.h"
#include "src/core/lib/iomgr/sockaddr_utils.h"
#include "src/core/lib/transport/connectivity_state.h"
#include "src/core/lib/transport/static_metadata.h"
@ -729,11 +731,15 @@ static grpc_lb_policy *round_robin_create(grpc_exec_ctx *exec_ctx,
if (addresses->addresses[i].is_balancer) continue;
memset(&sc_args, 0, sizeof(grpc_subchannel_args));
sc_args.addr = &addresses->addresses[i].address;
sc_args.args = args->args;
grpc_arg addr_arg =
grpc_create_subchannel_address_arg(&addresses->addresses[i].address);
grpc_channel_args *new_args =
grpc_channel_args_copy_and_add(args->args, &addr_arg, 1);
gpr_free(addr_arg.value.string);
sc_args.args = new_args;
grpc_subchannel *subchannel = grpc_client_channel_factory_create_subchannel(
exec_ctx, args->client_channel_factory, &sc_args);
grpc_channel_args_destroy(exec_ctx, new_args);
if (subchannel != NULL) {
subchannel_data *sd = gpr_malloc(sizeof(*sd));

@ -43,6 +43,7 @@
#include "src/core/ext/client_channel/connector.h"
#include "src/core/ext/client_channel/http_connect_handshaker.h"
#include "src/core/ext/client_channel/subchannel.h"
#include "src/core/ext/transport/chttp2/transport/chttp2_transport.h"
#include "src/core/lib/channel/channel_args.h"
#include "src/core/lib/channel/handshaker.h"
@ -220,6 +221,8 @@ static void chttp2_connector_connect(grpc_exec_ctx *exec_ctx,
grpc_connect_out_args *result,
grpc_closure *notify) {
chttp2_connector *c = (chttp2_connector *)con;
grpc_resolved_address addr;
grpc_get_subchannel_address_arg(args->channel_args, &addr);
gpr_mu_lock(&c->mu);
GPR_ASSERT(c->notify == NULL);
c->notify = notify;
@ -231,8 +234,8 @@ static void chttp2_connector_connect(grpc_exec_ctx *exec_ctx,
GPR_ASSERT(!c->connecting);
c->connecting = true;
grpc_tcp_client_connect(exec_ctx, &c->connected, &c->endpoint,
args->interested_parties, args->channel_args,
args->addr, args->deadline);
args->interested_parties, args->channel_args, &addr,
args->deadline);
gpr_mu_unlock(&c->mu);
}

@ -163,6 +163,13 @@ of this repo. The plugin can be found in the `bins/opt` directory. We are
planning to provide a better way to download and install the plugin
in the future.
You can also just build the gRPC PHP protoc plugin by running:
```sh
$ cd grpc
$ make grpc_php_plugin
```
### Client Stub

@ -1,3 +1,33 @@
# Copyright 2017, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
licenses(["notice"]) # 3-clause BSD
package(default_visibility = ["//visibility:public"])

@ -1,3 +1,33 @@
# Copyright 2017, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
licenses(["notice"]) # 3-clause BSD
package(default_visibility = ["//visibility:public"])

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Patches the spawn() command for windows compilers.
Windows has an 8191 character command line limit, but some compilers
@ -45,29 +44,32 @@ MAX_COMMAND_LENGTH = 8191
_classic_spawn = ccompiler.CCompiler.spawn
def _commandfile_spawn(self, command):
command_length = sum([len(arg) for arg in command])
if os.name == 'nt' and command_length > MAX_COMMAND_LENGTH:
# Even if this command doesn't support the @command_file, it will
# fail as is so we try blindly
print('Command line length exceeded, using command file')
print(' '.join(command))
temporary_directory = tempfile.mkdtemp()
command_filename = os.path.abspath(
os.path.join(temporary_directory, 'command'))
with open(command_filename, 'w') as command_file:
escaped_args = ['"' + arg.replace('\\', '\\\\') + '"' for arg in command[1:]]
command_file.write(' '.join(escaped_args))
modified_command = command[:1] + ['@{}'.format(command_filename)]
try:
_classic_spawn(self, modified_command)
finally:
shutil.rmtree(temporary_directory)
else:
_classic_spawn(self, command)
command_length = sum([len(arg) for arg in command])
if os.name == 'nt' and command_length > MAX_COMMAND_LENGTH:
# Even if this command doesn't support the @command_file, it will
# fail as is so we try blindly
print('Command line length exceeded, using command file')
print(' '.join(command))
temporary_directory = tempfile.mkdtemp()
command_filename = os.path.abspath(
os.path.join(temporary_directory, 'command'))
with open(command_filename, 'w') as command_file:
escaped_args = [
'"' + arg.replace('\\', '\\\\') + '"' for arg in command[1:]
]
command_file.write(' '.join(escaped_args))
modified_command = command[:1] + ['@{}'.format(command_filename)]
try:
_classic_spawn(self, modified_command)
finally:
shutil.rmtree(temporary_directory)
else:
_classic_spawn(self, command)
def monkeypatch_spawn():
"""Monkeypatching is dumb, but it's either that or we become maintainers of
"""Monkeypatching is dumb, but it's either that or we become maintainers of
something much, much bigger."""
ccompiler.CCompiler.spawn = _commandfile_spawn
ccompiler.CCompiler.spawn = _commandfile_spawn

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Provides distutils command classes for the GRPC Python setup process."""
import distutils
@ -87,138 +86,144 @@ Glossary
class CommandError(Exception):
"""Simple exception class for GRPC custom commands."""
"""Simple exception class for GRPC custom commands."""
# TODO(atash): Remove this once PyPI has better Linux bdist support. See
# https://bitbucket.org/pypa/pypi/issues/120/binary-wheels-for-linux-are-not-supported
def _get_grpc_custom_bdist(decorated_basename, target_bdist_basename):
"""Returns a string path to a bdist file for Linux to install.
"""Returns a string path to a bdist file for Linux to install.
If we can retrieve a pre-compiled bdist from online, uses it. Else, emits a
warning and builds from source.
"""
# TODO(atash): somehow the name that's returned from `wheel` is different
# between different versions of 'wheel' (but from a compatibility standpoint,
# the names are compatible); we should have some way of determining name
# compatibility in the same way `wheel` does to avoid having to rename all of
# the custom wheels that we build/upload to GCS.
# Break import style to ensure that setup.py has had a chance to install the
# relevant package.
from six.moves.urllib import request
decorated_path = decorated_basename + GRPC_CUSTOM_BDIST_EXT
try:
url = BINARIES_REPOSITORY + '/{target}'.format(target=decorated_path)
bdist_data = request.urlopen(url).read()
except IOError as error:
raise CommandError(
'{}\n\nCould not find the bdist {}: {}'
.format(traceback.format_exc(), decorated_path, error.message))
# Our chosen local bdist path.
bdist_path = target_bdist_basename + GRPC_CUSTOM_BDIST_EXT
try:
with open(bdist_path, 'w') as bdist_file:
bdist_file.write(bdist_data)
except IOError as error:
raise CommandError(
'{}\n\nCould not write grpcio bdist: {}'
.format(traceback.format_exc(), error.message))
return bdist_path
# TODO(atash): somehow the name that's returned from `wheel` is different
# between different versions of 'wheel' (but from a compatibility standpoint,
# the names are compatible); we should have some way of determining name
# compatibility in the same way `wheel` does to avoid having to rename all of
# the custom wheels that we build/upload to GCS.
# Break import style to ensure that setup.py has had a chance to install the
# relevant package.
from six.moves.urllib import request
decorated_path = decorated_basename + GRPC_CUSTOM_BDIST_EXT
try:
url = BINARIES_REPOSITORY + '/{target}'.format(target=decorated_path)
bdist_data = request.urlopen(url).read()
except IOError as error:
raise CommandError('{}\n\nCould not find the bdist {}: {}'.format(
traceback.format_exc(), decorated_path, error.message))
# Our chosen local bdist path.
bdist_path = target_bdist_basename + GRPC_CUSTOM_BDIST_EXT
try:
with open(bdist_path, 'w') as bdist_file:
bdist_file.write(bdist_data)
except IOError as error:
raise CommandError('{}\n\nCould not write grpcio bdist: {}'
.format(traceback.format_exc(), error.message))
return bdist_path
class SphinxDocumentation(setuptools.Command):
"""Command to generate documentation via sphinx."""
description = 'generate sphinx documentation'
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
# We import here to ensure that setup.py has had a chance to install the
# relevant package eggs first.
import sphinx
import sphinx.apidoc
metadata = self.distribution.metadata
src_dir = os.path.join(PYTHON_STEM, 'grpc')
sys.path.append(src_dir)
sphinx.apidoc.main([
'', '--force', '--full', '-H', metadata.name, '-A', metadata.author,
'-V', metadata.version, '-R', metadata.version,
'-o', os.path.join('doc', 'src'), src_dir])
conf_filepath = os.path.join('doc', 'src', 'conf.py')
with open(conf_filepath, 'a') as conf_file:
conf_file.write(CONF_PY_ADDENDUM)
glossary_filepath = os.path.join('doc', 'src', 'grpc.rst')
with open(glossary_filepath, 'a') as glossary_filepath:
glossary_filepath.write(API_GLOSSARY)
sphinx.main(['', os.path.join('doc', 'src'), os.path.join('doc', 'build')])
"""Command to generate documentation via sphinx."""
description = 'generate sphinx documentation'
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
# We import here to ensure that setup.py has had a chance to install the
# relevant package eggs first.
import sphinx
import sphinx.apidoc
metadata = self.distribution.metadata
src_dir = os.path.join(PYTHON_STEM, 'grpc')
sys.path.append(src_dir)
sphinx.apidoc.main([
'', '--force', '--full', '-H', metadata.name, '-A', metadata.author,
'-V', metadata.version, '-R', metadata.version, '-o',
os.path.join('doc', 'src'), src_dir
])
conf_filepath = os.path.join('doc', 'src', 'conf.py')
with open(conf_filepath, 'a') as conf_file:
conf_file.write(CONF_PY_ADDENDUM)
glossary_filepath = os.path.join('doc', 'src', 'grpc.rst')
with open(glossary_filepath, 'a') as glossary_filepath:
glossary_filepath.write(API_GLOSSARY)
sphinx.main(
['', os.path.join('doc', 'src'), os.path.join('doc', 'build')])
class BuildProjectMetadata(setuptools.Command):
"""Command to generate project metadata in a module."""
"""Command to generate project metadata in a module."""
description = 'build grpcio project metadata files'
user_options = []
description = 'build grpcio project metadata files'
user_options = []
def initialize_options(self):
pass
def initialize_options(self):
pass
def finalize_options(self):
pass
def finalize_options(self):
pass
def run(self):
with open(os.path.join(PYTHON_STEM, 'grpc/_grpcio_metadata.py'), 'w') as module_file:
module_file.write('__version__ = """{}"""'.format(
self.distribution.get_version()))
def run(self):
with open(os.path.join(PYTHON_STEM, 'grpc/_grpcio_metadata.py'),
'w') as module_file:
module_file.write('__version__ = """{}"""'.format(
self.distribution.get_version()))
class BuildPy(build_py.build_py):
"""Custom project build command."""
"""Custom project build command."""
def run(self):
self.run_command('build_project_metadata')
build_py.build_py.run(self)
def run(self):
self.run_command('build_project_metadata')
build_py.build_py.run(self)
def _poison_extensions(extensions, message):
"""Includes a file that will always fail to compile in all extensions."""
poison_filename = os.path.join(PYTHON_STEM, 'poison.c')
with open(poison_filename, 'w') as poison:
poison.write('#error {}'.format(message))
for extension in extensions:
extension.sources = [poison_filename]
"""Includes a file that will always fail to compile in all extensions."""
poison_filename = os.path.join(PYTHON_STEM, 'poison.c')
with open(poison_filename, 'w') as poison:
poison.write('#error {}'.format(message))
for extension in extensions:
extension.sources = [poison_filename]
def check_and_update_cythonization(extensions):
"""Replace .pyx files with their generated counterparts and return whether or
"""Replace .pyx files with their generated counterparts and return whether or
not cythonization still needs to occur."""
for extension in extensions:
generated_pyx_sources = []
other_sources = []
for source in extension.sources:
base, file_ext = os.path.splitext(source)
if file_ext == '.pyx':
generated_pyx_source = next(
(base + gen_ext for gen_ext in ('.c', '.cpp',)
if os.path.isfile(base + gen_ext)), None)
if generated_pyx_source:
generated_pyx_sources.append(generated_pyx_source)
else:
sys.stderr.write('Cython-generated files are missing...\n')
return False
else:
other_sources.append(source)
extension.sources = generated_pyx_sources + other_sources
sys.stderr.write('Found cython-generated files...\n')
return True
for extension in extensions:
generated_pyx_sources = []
other_sources = []
for source in extension.sources:
base, file_ext = os.path.splitext(source)
if file_ext == '.pyx':
generated_pyx_source = next((base + gen_ext
for gen_ext in (
'.c',
'.cpp',)
if os.path.isfile(base + gen_ext)),
None)
if generated_pyx_source:
generated_pyx_sources.append(generated_pyx_source)
else:
sys.stderr.write('Cython-generated files are missing...\n')
return False
else:
other_sources.append(source)
extension.sources = generated_pyx_sources + other_sources
sys.stderr.write('Found cython-generated files...\n')
return True
def try_cythonize(extensions, linetracing=False, mandatory=True):
"""Attempt to cythonize the extensions.
"""Attempt to cythonize the extensions.
Args:
extensions: A list of `distutils.extension.Extension`.
@ -226,78 +231,83 @@ def try_cythonize(extensions, linetracing=False, mandatory=True):
mandatory: Whether or not having Cython-generated files is mandatory. If it
is, extensions will be poisoned when they can't be fully generated.
"""
try:
# Break import style to ensure we have access to Cython post-setup_requires
import Cython.Build
except ImportError:
if mandatory:
sys.stderr.write(
"This package needs to generate C files with Cython but it cannot. "
"Poisoning extension sources to disallow extension commands...")
_poison_extensions(
extensions,
"Extensions have been poisoned due to missing Cython-generated code.")
return extensions
cython_compiler_directives = {}
if linetracing:
additional_define_macros = [('CYTHON_TRACE_NOGIL', '1')]
cython_compiler_directives['linetrace'] = True
return Cython.Build.cythonize(
extensions,
include_path=[
include_dir for extension in extensions for include_dir in extension.include_dirs
] + [CYTHON_STEM],
compiler_directives=cython_compiler_directives
)
try:
# Break import style to ensure we have access to Cython post-setup_requires
import Cython.Build
except ImportError:
if mandatory:
sys.stderr.write(
"This package needs to generate C files with Cython but it cannot. "
"Poisoning extension sources to disallow extension commands...")
_poison_extensions(
extensions,
"Extensions have been poisoned due to missing Cython-generated code."
)
return extensions
cython_compiler_directives = {}
if linetracing:
additional_define_macros = [('CYTHON_TRACE_NOGIL', '1')]
cython_compiler_directives['linetrace'] = True
return Cython.Build.cythonize(
extensions,
include_path=[
include_dir
for extension in extensions
for include_dir in extension.include_dirs
] + [CYTHON_STEM],
compiler_directives=cython_compiler_directives)
class BuildExt(build_ext.build_ext):
"""Custom build_ext command to enable compiler-specific flags."""
C_OPTIONS = {
'unix': ('-pthread', '-std=gnu99'),
'msvc': (),
}
LINK_OPTIONS = {}
def build_extensions(self):
compiler = self.compiler.compiler_type
if compiler in BuildExt.C_OPTIONS:
for extension in self.extensions:
extension.extra_compile_args += list(BuildExt.C_OPTIONS[compiler])
if compiler in BuildExt.LINK_OPTIONS:
for extension in self.extensions:
extension.extra_link_args += list(BuildExt.LINK_OPTIONS[compiler])
if not check_and_update_cythonization(self.extensions):
self.extensions = try_cythonize(self.extensions)
try:
build_ext.build_ext.build_extensions(self)
except Exception as error:
formatted_exception = traceback.format_exc()
support.diagnose_build_ext_error(self, error, formatted_exception)
raise CommandError(
"Failed `build_ext` step:\n{}".format(formatted_exception))
"""Custom build_ext command to enable compiler-specific flags."""
C_OPTIONS = {
'unix': ('-pthread', '-std=gnu99'),
'msvc': (),
}
LINK_OPTIONS = {}
def build_extensions(self):
compiler = self.compiler.compiler_type
if compiler in BuildExt.C_OPTIONS:
for extension in self.extensions:
extension.extra_compile_args += list(BuildExt.C_OPTIONS[
compiler])
if compiler in BuildExt.LINK_OPTIONS:
for extension in self.extensions:
extension.extra_link_args += list(BuildExt.LINK_OPTIONS[
compiler])
if not check_and_update_cythonization(self.extensions):
self.extensions = try_cythonize(self.extensions)
try:
build_ext.build_ext.build_extensions(self)
except Exception as error:
formatted_exception = traceback.format_exc()
support.diagnose_build_ext_error(self, error, formatted_exception)
raise CommandError("Failed `build_ext` step:\n{}".format(
formatted_exception))
class Gather(setuptools.Command):
"""Command to gather project dependencies."""
description = 'gather dependencies for grpcio'
user_options = [
('test', 't', 'flag indicating to gather test dependencies'),
('install', 'i', 'flag indicating to gather install dependencies')
]
def initialize_options(self):
self.test = False
self.install = False
def finalize_options(self):
# distutils requires this override.
pass
def run(self):
if self.install and self.distribution.install_requires:
self.distribution.fetch_build_eggs(self.distribution.install_requires)
if self.test and self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
"""Command to gather project dependencies."""
description = 'gather dependencies for grpcio'
user_options = [
('test', 't', 'flag indicating to gather test dependencies'),
('install', 'i', 'flag indicating to gather install dependencies')
]
def initialize_options(self):
self.test = False
self.install = False
def finalize_options(self):
# distutils requires this override.
pass
def run(self):
if self.install and self.distribution.install_requires:
self.distribution.fetch_build_eggs(
self.distribution.install_requires)
if self.test and self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)

File diff suppressed because it is too large Load Diff

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""GRPCAuthMetadataPlugins for standard authentication."""
import inspect
@ -36,51 +35,53 @@ import grpc
def _sign_request(callback, token, error):
metadata = (('authorization', 'Bearer {}'.format(token)),)
callback(metadata, error)
metadata = (('authorization', 'Bearer {}'.format(token)),)
callback(metadata, error)
class GoogleCallCredentials(grpc.AuthMetadataPlugin):
"""Metadata wrapper for GoogleCredentials from the oauth2client library."""
def __init__(self, credentials):
self._credentials = credentials
self._pool = futures.ThreadPoolExecutor(max_workers=1)
# Hack to determine if these are JWT creds and we need to pass
# additional_claims when getting a token
if 'additional_claims' in inspect.getargspec(
credentials.get_access_token).args:
self._is_jwt = True
else:
self._is_jwt = False
def __call__(self, context, callback):
# MetadataPlugins cannot block (see grpc.beta.interfaces.py)
if self._is_jwt:
future = self._pool.submit(self._credentials.get_access_token,
additional_claims={'aud': context.service_url})
else:
future = self._pool.submit(self._credentials.get_access_token)
future.add_done_callback(lambda x: self._get_token_callback(callback, x))
def _get_token_callback(self, callback, future):
try:
access_token = future.result().access_token
except Exception as e:
_sign_request(callback, None, e)
else:
_sign_request(callback, access_token, None)
def __del__(self):
self._pool.shutdown(wait=False)
"""Metadata wrapper for GoogleCredentials from the oauth2client library."""
def __init__(self, credentials):
self._credentials = credentials
self._pool = futures.ThreadPoolExecutor(max_workers=1)
# Hack to determine if these are JWT creds and we need to pass
# additional_claims when getting a token
if 'additional_claims' in inspect.getargspec(
credentials.get_access_token).args:
self._is_jwt = True
else:
self._is_jwt = False
def __call__(self, context, callback):
# MetadataPlugins cannot block (see grpc.beta.interfaces.py)
if self._is_jwt:
future = self._pool.submit(
self._credentials.get_access_token,
additional_claims={'aud': context.service_url})
else:
future = self._pool.submit(self._credentials.get_access_token)
future.add_done_callback(
lambda x: self._get_token_callback(callback, x))
def _get_token_callback(self, callback, future):
try:
access_token = future.result().access_token
except Exception as e:
_sign_request(callback, None, e)
else:
_sign_request(callback, access_token, None)
def __del__(self):
self._pool.shutdown(wait=False)
class AccessTokenCallCredentials(grpc.AuthMetadataPlugin):
"""Metadata wrapper for raw access token credentials."""
"""Metadata wrapper for raw access token credentials."""
def __init__(self, access_token):
self._access_token = access_token
def __init__(self, access_token):
self._access_token = access_token
def __call__(self, context, callback):
_sign_request(callback, self._access_token, None)
def __call__(self, context, callback):
_sign_request(callback, self._access_token, None)

File diff suppressed because it is too large Load Diff

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Shared implementation."""
import logging
@ -45,9 +44,8 @@ CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY = {
cygrpc.ConnectivityState.connecting: grpc.ChannelConnectivity.CONNECTING,
cygrpc.ConnectivityState.ready: grpc.ChannelConnectivity.READY,
cygrpc.ConnectivityState.transient_failure:
grpc.ChannelConnectivity.TRANSIENT_FAILURE,
cygrpc.ConnectivityState.shutdown:
grpc.ChannelConnectivity.SHUTDOWN,
grpc.ChannelConnectivity.TRANSIENT_FAILURE,
cygrpc.ConnectivityState.shutdown: grpc.ChannelConnectivity.SHUTDOWN,
}
CYGRPC_STATUS_CODE_TO_STATUS_CODE = {
@ -77,83 +75,88 @@ STATUS_CODE_TO_CYGRPC_STATUS_CODE = {
def encode(s):
if isinstance(s, bytes):
return s
else:
return s.encode('ascii')
if isinstance(s, bytes):
return s
else:
return s.encode('ascii')
def decode(b):
if isinstance(b, str):
return b
else:
try:
return b.decode('utf8')
except UnicodeDecodeError:
logging.exception('Invalid encoding on {}'.format(b))
return b.decode('latin1')
if isinstance(b, str):
return b
else:
try:
return b.decode('utf8')
except UnicodeDecodeError:
logging.exception('Invalid encoding on {}'.format(b))
return b.decode('latin1')
def channel_args(options):
channel_args = []
for key, value in options:
if isinstance(value, six.string_types):
channel_args.append(cygrpc.ChannelArg(encode(key), encode(value)))
else:
channel_args.append(cygrpc.ChannelArg(encode(key), value))
return cygrpc.ChannelArgs(channel_args)
channel_args = []
for key, value in options:
if isinstance(value, six.string_types):
channel_args.append(cygrpc.ChannelArg(encode(key), encode(value)))
else:
channel_args.append(cygrpc.ChannelArg(encode(key), value))
return cygrpc.ChannelArgs(channel_args)
def cygrpc_metadata(application_metadata):
return _EMPTY_METADATA if application_metadata is None else cygrpc.Metadata(
cygrpc.Metadatum(encode(key), encode(value))
for key, value in application_metadata)
return _EMPTY_METADATA if application_metadata is None else cygrpc.Metadata(
cygrpc.Metadatum(encode(key), encode(value))
for key, value in application_metadata)
def application_metadata(cygrpc_metadata):
if cygrpc_metadata is None:
return ()
else:
return tuple(
(decode(key), value if key[-4:] == b'-bin' else decode(value))
for key, value in cygrpc_metadata)
if cygrpc_metadata is None:
return ()
else:
return tuple((decode(key), value
if key[-4:] == b'-bin' else decode(value))
for key, value in cygrpc_metadata)
def _transform(message, transformer, exception_message):
if transformer is None:
return message
else:
try:
return transformer(message)
except Exception: # pylint: disable=broad-except
logging.exception(exception_message)
return None
if transformer is None:
return message
else:
try:
return transformer(message)
except Exception: # pylint: disable=broad-except
logging.exception(exception_message)
return None
def serialize(message, serializer):
return _transform(message, serializer, 'Exception serializing message!')
return _transform(message, serializer, 'Exception serializing message!')
def deserialize(serialized_message, deserializer):
return _transform(serialized_message, deserializer,
'Exception deserializing message!')
return _transform(serialized_message, deserializer,
'Exception deserializing message!')
def fully_qualified_method(group, method):
return '/{}/{}'.format(group, method)
return '/{}/{}'.format(group, method)
class CleanupThread(threading.Thread):
"""A threading.Thread subclass supporting custom behavior on join().
"""A threading.Thread subclass supporting custom behavior on join().
On Python Interpreter exit, Python will attempt to join outstanding threads
prior to garbage collection. We may need to do additional cleanup, and
we accomplish this by overriding the join() method.
"""
def __init__(self, behavior, group=None, target=None, name=None,
args=(), kwargs={}):
"""Constructor.
def __init__(self,
behavior,
group=None,
target=None,
name=None,
args=(),
kwargs={}):
"""Constructor.
Args:
behavior (function): Function called on join() with a single
@ -169,15 +172,15 @@ class CleanupThread(threading.Thread):
kwargs (dict[str,object]): A dictionary of keyword arguments to
pass to `target`.
"""
super(CleanupThread, self).__init__(group=group, target=target,
name=name, args=args, kwargs=kwargs)
self._behavior = behavior
def join(self, timeout=None):
start_time = time.time()
self._behavior(timeout)
end_time = time.time()
if timeout is not None:
timeout -= end_time - start_time
timeout = max(timeout, 0)
super(CleanupThread, self).join(timeout)
super(CleanupThread, self).__init__(
group=group, target=target, name=name, args=args, kwargs=kwargs)
self._behavior = behavior
def join(self, timeout=None):
start_time = time.time()
self._behavior(timeout)
end_time = time.time()
if timeout is not None:
timeout -= end_time - start_time
timeout = max(timeout, 0)
super(CleanupThread, self).join(timeout)

@ -31,18 +31,18 @@ from grpc._cython import cygrpc
def _call(call_credentialses):
call_credentials_iterator = iter(call_credentialses)
composition = next(call_credentials_iterator)
for additional_call_credentials in call_credentials_iterator:
composition = cygrpc.call_credentials_composite(
composition, additional_call_credentials)
return composition
call_credentials_iterator = iter(call_credentialses)
composition = next(call_credentials_iterator)
for additional_call_credentials in call_credentials_iterator:
composition = cygrpc.call_credentials_composite(
composition, additional_call_credentials)
return composition
def call(call_credentialses):
return _call(call_credentialses)
return _call(call_credentialses)
def channel(channel_credentials, call_credentialses):
return cygrpc.channel_credentials_composite(
channel_credentials, _call(call_credentialses))
return cygrpc.channel_credentials_composite(channel_credentials,
_call(call_credentialses))

@ -36,82 +36,82 @@ from grpc._cython import cygrpc
class AuthMetadataContext(
collections.namedtuple(
'AuthMetadataContext', ('service_url', 'method_name',)),
grpc.AuthMetadataContext):
pass
collections.namedtuple('AuthMetadataContext', (
'service_url',
'method_name',)), grpc.AuthMetadataContext):
pass
class AuthMetadataPluginCallback(grpc.AuthMetadataContext):
def __init__(self, callback):
self._callback = callback
def __init__(self, callback):
self._callback = callback
def __call__(self, metadata, error):
self._callback(metadata, error)
def __call__(self, metadata, error):
self._callback(metadata, error)
class _WrappedCygrpcCallback(object):
def __init__(self, cygrpc_callback):
self.is_called = False
self.error = None
self.is_called_lock = threading.Lock()
self.cygrpc_callback = cygrpc_callback
def _invoke_failure(self, error):
# TODO(atash) translate different Exception superclasses into different
# status codes.
self.cygrpc_callback(
_common.EMPTY_METADATA, cygrpc.StatusCode.internal,
_common.encode(str(error)))
def _invoke_success(self, metadata):
try:
cygrpc_metadata = _common.cygrpc_metadata(metadata)
except Exception as error:
self._invoke_failure(error)
return
self.cygrpc_callback(cygrpc_metadata, cygrpc.StatusCode.ok, b'')
def __call__(self, metadata, error):
with self.is_called_lock:
if self.is_called:
raise RuntimeError('callback should only ever be invoked once')
if self.error:
self._invoke_failure(self.error)
return
self.is_called = True
if error is None:
self._invoke_success(metadata)
else:
self._invoke_failure(error)
def notify_failure(self, error):
with self.is_called_lock:
if not self.is_called:
self.error = error
def __init__(self, cygrpc_callback):
self.is_called = False
self.error = None
self.is_called_lock = threading.Lock()
self.cygrpc_callback = cygrpc_callback
def _invoke_failure(self, error):
# TODO(atash) translate different Exception superclasses into different
# status codes.
self.cygrpc_callback(_common.EMPTY_METADATA, cygrpc.StatusCode.internal,
_common.encode(str(error)))
def _invoke_success(self, metadata):
try:
cygrpc_metadata = _common.cygrpc_metadata(metadata)
except Exception as error:
self._invoke_failure(error)
return
self.cygrpc_callback(cygrpc_metadata, cygrpc.StatusCode.ok, b'')
def __call__(self, metadata, error):
with self.is_called_lock:
if self.is_called:
raise RuntimeError('callback should only ever be invoked once')
if self.error:
self._invoke_failure(self.error)
return
self.is_called = True
if error is None:
self._invoke_success(metadata)
else:
self._invoke_failure(error)
def notify_failure(self, error):
with self.is_called_lock:
if not self.is_called:
self.error = error
class _WrappedPlugin(object):
def __init__(self, plugin):
self.plugin = plugin
def __init__(self, plugin):
self.plugin = plugin
def __call__(self, context, cygrpc_callback):
wrapped_cygrpc_callback = _WrappedCygrpcCallback(cygrpc_callback)
wrapped_context = AuthMetadataContext(
_common.decode(context.service_url), _common.decode(context.method_name))
try:
self.plugin(
wrapped_context, AuthMetadataPluginCallback(wrapped_cygrpc_callback))
except Exception as error:
wrapped_cygrpc_callback.notify_failure(error)
raise
def __call__(self, context, cygrpc_callback):
wrapped_cygrpc_callback = _WrappedCygrpcCallback(cygrpc_callback)
wrapped_context = AuthMetadataContext(
_common.decode(context.service_url),
_common.decode(context.method_name))
try:
self.plugin(wrapped_context,
AuthMetadataPluginCallback(wrapped_cygrpc_callback))
except Exception as error:
wrapped_cygrpc_callback.notify_failure(error)
raise
def call_credentials_metadata_plugin(plugin, name):
"""
"""
Args:
plugin: A callable accepting a grpc.AuthMetadataContext
object and a callback (itself accepting a list of metadata key/value
@ -119,5 +119,6 @@ def call_credentials_metadata_plugin(plugin, name):
called, but need not be called in plugin's invocation.
plugin's invocation must be non-blocking.
"""
return cygrpc.call_credentials_metadata_plugin(
cygrpc.CredentialsMetadataPlugin(_WrappedPlugin(plugin), _common.encode(name)))
return cygrpc.call_credentials_metadata_plugin(
cygrpc.CredentialsMetadataPlugin(
_WrappedPlugin(plugin), _common.encode(name)))

File diff suppressed because it is too large Load Diff

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Internal utilities for gRPC Python."""
import collections
@ -44,132 +43,136 @@ _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE = (
class RpcMethodHandler(
collections.namedtuple(
'_RpcMethodHandler',
('request_streaming', 'response_streaming', 'request_deserializer',
'response_serializer', 'unary_unary', 'unary_stream', 'stream_unary',
'stream_stream',)),
grpc.RpcMethodHandler):
pass
collections.namedtuple('_RpcMethodHandler', (
'request_streaming',
'response_streaming',
'request_deserializer',
'response_serializer',
'unary_unary',
'unary_stream',
'stream_unary',
'stream_stream',)), grpc.RpcMethodHandler):
pass
class DictionaryGenericHandler(grpc.ServiceRpcHandler):
def __init__(self, service, method_handlers):
self._name = service
self._method_handlers = {
_common.fully_qualified_method(service, method): method_handler
for method, method_handler in six.iteritems(method_handlers)}
def __init__(self, service, method_handlers):
self._name = service
self._method_handlers = {
_common.fully_qualified_method(service, method): method_handler
for method, method_handler in six.iteritems(method_handlers)
}
def service_name(self):
return self._name
def service_name(self):
return self._name
def service(self, handler_call_details):
return self._method_handlers.get(handler_call_details.method)
def service(self, handler_call_details):
return self._method_handlers.get(handler_call_details.method)
class _ChannelReadyFuture(grpc.Future):
def __init__(self, channel):
self._condition = threading.Condition()
self._channel = channel
self._matured = False
self._cancelled = False
self._done_callbacks = []
def _block(self, timeout):
until = None if timeout is None else time.time() + timeout
with self._condition:
while True:
if self._cancelled:
raise grpc.FutureCancelledError()
elif self._matured:
return
else:
if until is None:
self._condition.wait()
else:
remaining = until - time.time()
if remaining < 0:
raise grpc.FutureTimeoutError()
def __init__(self, channel):
self._condition = threading.Condition()
self._channel = channel
self._matured = False
self._cancelled = False
self._done_callbacks = []
def _block(self, timeout):
until = None if timeout is None else time.time() + timeout
with self._condition:
while True:
if self._cancelled:
raise grpc.FutureCancelledError()
elif self._matured:
return
else:
if until is None:
self._condition.wait()
else:
remaining = until - time.time()
if remaining < 0:
raise grpc.FutureTimeoutError()
else:
self._condition.wait(timeout=remaining)
def _update(self, connectivity):
with self._condition:
if (not self._cancelled and
connectivity is grpc.ChannelConnectivity.READY):
self._matured = True
self._channel.unsubscribe(self._update)
self._condition.notify_all()
done_callbacks = tuple(self._done_callbacks)
self._done_callbacks = None
else:
return
for done_callback in done_callbacks:
callable_util.call_logging_exceptions(
done_callback, _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE, self)
def cancel(self):
with self._condition:
if not self._matured:
self._cancelled = True
self._channel.unsubscribe(self._update)
self._condition.notify_all()
done_callbacks = tuple(self._done_callbacks)
self._done_callbacks = None
else:
self._condition.wait(timeout=remaining)
def _update(self, connectivity):
with self._condition:
if (not self._cancelled and
connectivity is grpc.ChannelConnectivity.READY):
self._matured = True
self._channel.unsubscribe(self._update)
self._condition.notify_all()
done_callbacks = tuple(self._done_callbacks)
self._done_callbacks = None
else:
return
for done_callback in done_callbacks:
callable_util.call_logging_exceptions(
done_callback, _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE, self)
def cancel(self):
with self._condition:
if not self._matured:
self._cancelled = True
self._channel.unsubscribe(self._update)
self._condition.notify_all()
done_callbacks = tuple(self._done_callbacks)
self._done_callbacks = None
else:
return False
for done_callback in done_callbacks:
callable_util.call_logging_exceptions(
done_callback, _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE, self)
def cancelled(self):
with self._condition:
return self._cancelled
def running(self):
with self._condition:
return not self._cancelled and not self._matured
def done(self):
with self._condition:
return self._cancelled or self._matured
def result(self, timeout=None):
self._block(timeout)
return None
def exception(self, timeout=None):
self._block(timeout)
return None
def traceback(self, timeout=None):
self._block(timeout)
return None
def add_done_callback(self, fn):
with self._condition:
if not self._cancelled and not self._matured:
self._done_callbacks.append(fn)
return
fn(self)
def start(self):
with self._condition:
self._channel.subscribe(self._update, try_to_connect=True)
def __del__(self):
with self._condition:
if not self._cancelled and not self._matured:
self._channel.unsubscribe(self._update)
return False
for done_callback in done_callbacks:
callable_util.call_logging_exceptions(
done_callback, _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE, self)
def cancelled(self):
with self._condition:
return self._cancelled
def running(self):
with self._condition:
return not self._cancelled and not self._matured
def done(self):
with self._condition:
return self._cancelled or self._matured
def result(self, timeout=None):
self._block(timeout)
return None
def exception(self, timeout=None):
self._block(timeout)
return None
def traceback(self, timeout=None):
self._block(timeout)
return None
def add_done_callback(self, fn):
with self._condition:
if not self._cancelled and not self._matured:
self._done_callbacks.append(fn)
return
fn(self)
def start(self):
with self._condition:
self._channel.subscribe(self._update, try_to_connect=True)
def __del__(self):
with self._condition:
if not self._cancelled and not self._matured:
self._channel.unsubscribe(self._update)
def channel_ready_future(channel):
ready_future = _ChannelReadyFuture(channel)
ready_future.start()
return ready_future
ready_future = _ChannelReadyFuture(channel)
ready_future.start()
return ready_future

File diff suppressed because it is too large Load Diff

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Affords a connectivity-state-listenable channel."""
import threading
@ -41,116 +40,122 @@ _CHANNEL_SUBSCRIPTION_CALLBACK_ERROR_LOG_MESSAGE = (
'Exception calling channel subscription callback!')
_LOW_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY = {
state: connectivity for state, connectivity in zip(
_types.ConnectivityState, interfaces.ChannelConnectivity)
state: connectivity
for state, connectivity in zip(_types.ConnectivityState,
interfaces.ChannelConnectivity)
}
class ConnectivityChannel(object):
def __init__(self, low_channel):
self._lock = threading.Lock()
self._low_channel = low_channel
self._polling = False
self._connectivity = None
self._try_to_connect = False
self._callbacks_and_connectivities = []
self._delivering = False
def _deliveries(self, connectivity):
callbacks_needing_update = []
for callback_and_connectivity in self._callbacks_and_connectivities:
callback, callback_connectivity = callback_and_connectivity
if callback_connectivity is not connectivity:
callbacks_needing_update.append(callback)
callback_and_connectivity[1] = connectivity
return callbacks_needing_update
def _deliver(self, initial_connectivity, initial_callbacks):
connectivity = initial_connectivity
callbacks = initial_callbacks
while True:
for callback in callbacks:
callable_util.call_logging_exceptions(
callback, _CHANNEL_SUBSCRIPTION_CALLBACK_ERROR_LOG_MESSAGE,
connectivity)
with self._lock:
callbacks = self._deliveries(self._connectivity)
if callbacks:
connectivity = self._connectivity
else:
self._delivering = False
return
def _spawn_delivery(self, connectivity, callbacks):
delivering_thread = threading.Thread(
target=self._deliver, args=(connectivity, callbacks,))
delivering_thread.start()
self._delivering = True
def __init__(self, low_channel):
self._lock = threading.Lock()
self._low_channel = low_channel
# TODO(issue 3064): Don't poll.
def _poll_connectivity(self, low_channel, initial_try_to_connect):
try_to_connect = initial_try_to_connect
low_connectivity = low_channel.check_connectivity_state(try_to_connect)
with self._lock:
self._connectivity = _LOW_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[
low_connectivity]
callbacks = tuple(
callback for callback, unused_but_known_to_be_none_connectivity
in self._callbacks_and_connectivities)
for callback_and_connectivity in self._callbacks_and_connectivities:
callback_and_connectivity[1] = self._connectivity
if callbacks:
self._spawn_delivery(self._connectivity, callbacks)
completion_queue = _low.CompletionQueue()
while True:
low_channel.watch_connectivity_state(
low_connectivity, time.time() + 0.2, completion_queue, None)
event = completion_queue.next()
with self._lock:
if not self._callbacks_and_connectivities and not self._try_to_connect:
self._polling = False
self._connectivity = None
completion_queue.shutdown()
break
try_to_connect = self._try_to_connect
self._polling = False
self._connectivity = None
self._try_to_connect = False
if event.success or try_to_connect:
self._callbacks_and_connectivities = []
self._delivering = False
def _deliveries(self, connectivity):
callbacks_needing_update = []
for callback_and_connectivity in self._callbacks_and_connectivities:
callback, callback_connectivity = callback_and_connectivity
if callback_connectivity is not connectivity:
callbacks_needing_update.append(callback)
callback_and_connectivity[1] = connectivity
return callbacks_needing_update
def _deliver(self, initial_connectivity, initial_callbacks):
connectivity = initial_connectivity
callbacks = initial_callbacks
while True:
for callback in callbacks:
callable_util.call_logging_exceptions(
callback, _CHANNEL_SUBSCRIPTION_CALLBACK_ERROR_LOG_MESSAGE,
connectivity)
with self._lock:
callbacks = self._deliveries(self._connectivity)
if callbacks:
connectivity = self._connectivity
else:
self._delivering = False
return
def _spawn_delivery(self, connectivity, callbacks):
delivering_thread = threading.Thread(
target=self._deliver, args=(
connectivity,
callbacks,))
delivering_thread.start()
self._delivering = True
# TODO(issue 3064): Don't poll.
def _poll_connectivity(self, low_channel, initial_try_to_connect):
try_to_connect = initial_try_to_connect
low_connectivity = low_channel.check_connectivity_state(try_to_connect)
with self._lock:
self._connectivity = _LOW_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[
low_connectivity]
if not self._delivering:
callbacks = self._deliveries(self._connectivity)
self._connectivity = _LOW_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[
low_connectivity]
callbacks = tuple(
callback
for callback, unused_but_known_to_be_none_connectivity in
self._callbacks_and_connectivities)
for callback_and_connectivity in self._callbacks_and_connectivities:
callback_and_connectivity[1] = self._connectivity
if callbacks:
self._spawn_delivery(self._connectivity, callbacks)
def subscribe(self, callback, try_to_connect):
with self._lock:
if not self._callbacks_and_connectivities and not self._polling:
polling_thread = threading.Thread(
target=self._poll_connectivity,
args=(self._low_channel, bool(try_to_connect)))
polling_thread.start()
self._polling = True
self._callbacks_and_connectivities.append([callback, None])
elif not self._delivering and self._connectivity is not None:
self._spawn_delivery(self._connectivity, (callback,))
self._try_to_connect |= bool(try_to_connect)
self._callbacks_and_connectivities.append(
[callback, self._connectivity])
else:
self._try_to_connect |= bool(try_to_connect)
self._callbacks_and_connectivities.append([callback, None])
def unsubscribe(self, callback):
with self._lock:
for index, (subscribed_callback, unused_connectivity) in enumerate(
self._callbacks_and_connectivities):
if callback == subscribed_callback:
self._callbacks_and_connectivities.pop(index)
break
def low_channel(self):
return self._low_channel
self._spawn_delivery(self._connectivity, callbacks)
completion_queue = _low.CompletionQueue()
while True:
low_channel.watch_connectivity_state(low_connectivity,
time.time() + 0.2,
completion_queue, None)
event = completion_queue.next()
with self._lock:
if not self._callbacks_and_connectivities and not self._try_to_connect:
self._polling = False
self._connectivity = None
completion_queue.shutdown()
break
try_to_connect = self._try_to_connect
self._try_to_connect = False
if event.success or try_to_connect:
low_connectivity = low_channel.check_connectivity_state(
try_to_connect)
with self._lock:
self._connectivity = _LOW_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[
low_connectivity]
if not self._delivering:
callbacks = self._deliveries(self._connectivity)
if callbacks:
self._spawn_delivery(self._connectivity, callbacks)
def subscribe(self, callback, try_to_connect):
with self._lock:
if not self._callbacks_and_connectivities and not self._polling:
polling_thread = threading.Thread(
target=self._poll_connectivity,
args=(self._low_channel, bool(try_to_connect)))
polling_thread.start()
self._polling = True
self._callbacks_and_connectivities.append([callback, None])
elif not self._delivering and self._connectivity is not None:
self._spawn_delivery(self._connectivity, (callback,))
self._try_to_connect |= bool(try_to_connect)
self._callbacks_and_connectivities.append(
[callback, self._connectivity])
else:
self._try_to_connect |= bool(try_to_connect)
self._callbacks_and_connectivities.append([callback, None])
def unsubscribe(self, callback):
with self._lock:
for index, (subscribed_callback, unused_connectivity
) in enumerate(self._callbacks_and_connectivities):
if callback == subscribed_callback:
self._callbacks_and_connectivities.pop(index)
break
def low_channel(self):
return self._low_channel

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Translates gRPC's server-side API into gRPC's server-side Beta API."""
import collections
@ -47,329 +46,352 @@ _DEFAULT_POOL_SIZE = 8
class _ServerProtocolContext(interfaces.GRPCServicerContext):
def __init__(self, servicer_context):
self._servicer_context = servicer_context
def __init__(self, servicer_context):
self._servicer_context = servicer_context
def peer(self):
return self._servicer_context.peer()
def peer(self):
return self._servicer_context.peer()
def disable_next_response_compression(self):
pass # TODO(https://github.com/grpc/grpc/issues/4078): design, implement.
def disable_next_response_compression(self):
pass # TODO(https://github.com/grpc/grpc/issues/4078): design, implement.
class _FaceServicerContext(face.ServicerContext):
def __init__(self, servicer_context):
self._servicer_context = servicer_context
def __init__(self, servicer_context):
self._servicer_context = servicer_context
def is_active(self):
return self._servicer_context.is_active()
def is_active(self):
return self._servicer_context.is_active()
def time_remaining(self):
return self._servicer_context.time_remaining()
def time_remaining(self):
return self._servicer_context.time_remaining()
def add_abortion_callback(self, abortion_callback):
raise NotImplementedError(
'add_abortion_callback no longer supported server-side!')
def add_abortion_callback(self, abortion_callback):
raise NotImplementedError(
'add_abortion_callback no longer supported server-side!')
def cancel(self):
self._servicer_context.cancel()
def cancel(self):
self._servicer_context.cancel()
def protocol_context(self):
return _ServerProtocolContext(self._servicer_context)
def protocol_context(self):
return _ServerProtocolContext(self._servicer_context)
def invocation_metadata(self):
return _common.cygrpc_metadata(
self._servicer_context.invocation_metadata())
def invocation_metadata(self):
return _common.cygrpc_metadata(
self._servicer_context.invocation_metadata())
def initial_metadata(self, initial_metadata):
self._servicer_context.send_initial_metadata(initial_metadata)
def initial_metadata(self, initial_metadata):
self._servicer_context.send_initial_metadata(initial_metadata)
def terminal_metadata(self, terminal_metadata):
self._servicer_context.set_terminal_metadata(terminal_metadata)
def terminal_metadata(self, terminal_metadata):
self._servicer_context.set_terminal_metadata(terminal_metadata)
def code(self, code):
self._servicer_context.set_code(code)
def code(self, code):
self._servicer_context.set_code(code)
def details(self, details):
self._servicer_context.set_details(details)
def details(self, details):
self._servicer_context.set_details(details)
def _adapt_unary_request_inline(unary_request_inline):
def adaptation(request, servicer_context):
return unary_request_inline(request, _FaceServicerContext(servicer_context))
return adaptation
def adaptation(request, servicer_context):
return unary_request_inline(request,
_FaceServicerContext(servicer_context))
return adaptation
def _adapt_stream_request_inline(stream_request_inline):
def adaptation(request_iterator, servicer_context):
return stream_request_inline(
request_iterator, _FaceServicerContext(servicer_context))
return adaptation
def adaptation(request_iterator, servicer_context):
return stream_request_inline(request_iterator,
_FaceServicerContext(servicer_context))
return adaptation
class _Callback(stream.Consumer):
def __init__(self):
self._condition = threading.Condition()
self._values = []
self._terminated = False
self._cancelled = False
def consume(self, value):
with self._condition:
self._values.append(value)
self._condition.notify_all()
def terminate(self):
with self._condition:
self._terminated = True
self._condition.notify_all()
def consume_and_terminate(self, value):
with self._condition:
self._values.append(value)
self._terminated = True
self._condition.notify_all()
def cancel(self):
with self._condition:
self._cancelled = True
self._condition.notify_all()
def draw_one_value(self):
with self._condition:
while True:
if self._cancelled:
raise abandonment.Abandoned()
elif self._values:
return self._values.pop(0)
elif self._terminated:
return None
else:
self._condition.wait()
def draw_all_values(self):
with self._condition:
while True:
if self._cancelled:
raise abandonment.Abandoned()
elif self._terminated:
all_values = tuple(self._values)
self._values = None
return all_values
else:
self._condition.wait()
def __init__(self):
self._condition = threading.Condition()
self._values = []
self._terminated = False
self._cancelled = False
def consume(self, value):
with self._condition:
self._values.append(value)
self._condition.notify_all()
def terminate(self):
with self._condition:
self._terminated = True
self._condition.notify_all()
def consume_and_terminate(self, value):
with self._condition:
self._values.append(value)
self._terminated = True
self._condition.notify_all()
def cancel(self):
with self._condition:
self._cancelled = True
self._condition.notify_all()
def draw_one_value(self):
with self._condition:
while True:
if self._cancelled:
raise abandonment.Abandoned()
elif self._values:
return self._values.pop(0)
elif self._terminated:
return None
else:
self._condition.wait()
def draw_all_values(self):
with self._condition:
while True:
if self._cancelled:
raise abandonment.Abandoned()
elif self._terminated:
all_values = tuple(self._values)
self._values = None
return all_values
else:
self._condition.wait()
def _run_request_pipe_thread(request_iterator, request_consumer,
servicer_context):
thread_joined = threading.Event()
def pipe_requests():
for request in request_iterator:
if not servicer_context.is_active() or thread_joined.is_set():
return
request_consumer.consume(request)
if not servicer_context.is_active() or thread_joined.is_set():
return
request_consumer.terminate()
thread_joined = threading.Event()
def pipe_requests():
for request in request_iterator:
if not servicer_context.is_active() or thread_joined.is_set():
return
request_consumer.consume(request)
if not servicer_context.is_active() or thread_joined.is_set():
return
request_consumer.terminate()
def stop_request_pipe(timeout):
thread_joined.set()
def stop_request_pipe(timeout):
thread_joined.set()
request_pipe_thread = _common.CleanupThread(
stop_request_pipe, target=pipe_requests)
request_pipe_thread.start()
request_pipe_thread = _common.CleanupThread(
stop_request_pipe, target=pipe_requests)
request_pipe_thread.start()
def _adapt_unary_unary_event(unary_unary_event):
def adaptation(request, servicer_context):
callback = _Callback()
if not servicer_context.add_callback(callback.cancel):
raise abandonment.Abandoned()
unary_unary_event(
request, callback.consume_and_terminate,
_FaceServicerContext(servicer_context))
return callback.draw_all_values()[0]
return adaptation
def adaptation(request, servicer_context):
callback = _Callback()
if not servicer_context.add_callback(callback.cancel):
raise abandonment.Abandoned()
unary_unary_event(request, callback.consume_and_terminate,
_FaceServicerContext(servicer_context))
return callback.draw_all_values()[0]
return adaptation
def _adapt_unary_stream_event(unary_stream_event):
def adaptation(request, servicer_context):
callback = _Callback()
if not servicer_context.add_callback(callback.cancel):
raise abandonment.Abandoned()
unary_stream_event(
request, callback, _FaceServicerContext(servicer_context))
while True:
response = callback.draw_one_value()
if response is None:
return
else:
yield response
return adaptation
def adaptation(request, servicer_context):
callback = _Callback()
if not servicer_context.add_callback(callback.cancel):
raise abandonment.Abandoned()
unary_stream_event(request, callback,
_FaceServicerContext(servicer_context))
while True:
response = callback.draw_one_value()
if response is None:
return
else:
yield response
return adaptation
def _adapt_stream_unary_event(stream_unary_event):
def adaptation(request_iterator, servicer_context):
callback = _Callback()
if not servicer_context.add_callback(callback.cancel):
raise abandonment.Abandoned()
request_consumer = stream_unary_event(
callback.consume_and_terminate, _FaceServicerContext(servicer_context))
_run_request_pipe_thread(
request_iterator, request_consumer, servicer_context)
return callback.draw_all_values()[0]
return adaptation
def adaptation(request_iterator, servicer_context):
callback = _Callback()
if not servicer_context.add_callback(callback.cancel):
raise abandonment.Abandoned()
request_consumer = stream_unary_event(
callback.consume_and_terminate,
_FaceServicerContext(servicer_context))
_run_request_pipe_thread(request_iterator, request_consumer,
servicer_context)
return callback.draw_all_values()[0]
return adaptation
def _adapt_stream_stream_event(stream_stream_event):
def adaptation(request_iterator, servicer_context):
callback = _Callback()
if not servicer_context.add_callback(callback.cancel):
raise abandonment.Abandoned()
request_consumer = stream_stream_event(
callback, _FaceServicerContext(servicer_context))
_run_request_pipe_thread(
request_iterator, request_consumer, servicer_context)
while True:
response = callback.draw_one_value()
if response is None:
return
else:
yield response
return adaptation
def adaptation(request_iterator, servicer_context):
callback = _Callback()
if not servicer_context.add_callback(callback.cancel):
raise abandonment.Abandoned()
request_consumer = stream_stream_event(
callback, _FaceServicerContext(servicer_context))
_run_request_pipe_thread(request_iterator, request_consumer,
servicer_context)
while True:
response = callback.draw_one_value()
if response is None:
return
else:
yield response
return adaptation
class _SimpleMethodHandler(
collections.namedtuple(
'_MethodHandler',
('request_streaming', 'response_streaming', 'request_deserializer',
'response_serializer', 'unary_unary', 'unary_stream', 'stream_unary',
'stream_stream',)),
grpc.RpcMethodHandler):
pass
def _simple_method_handler(
implementation, request_deserializer, response_serializer):
if implementation.style is style.Service.INLINE:
if implementation.cardinality is cardinality.Cardinality.UNARY_UNARY:
return _SimpleMethodHandler(
False, False, request_deserializer, response_serializer,
_adapt_unary_request_inline(implementation.unary_unary_inline), None,
None, None)
elif implementation.cardinality is cardinality.Cardinality.UNARY_STREAM:
return _SimpleMethodHandler(
False, True, request_deserializer, response_serializer, None,
_adapt_unary_request_inline(implementation.unary_stream_inline), None,
None)
elif implementation.cardinality is cardinality.Cardinality.STREAM_UNARY:
return _SimpleMethodHandler(
True, False, request_deserializer, response_serializer, None, None,
_adapt_stream_request_inline(implementation.stream_unary_inline),
None)
elif implementation.cardinality is cardinality.Cardinality.STREAM_STREAM:
return _SimpleMethodHandler(
True, True, request_deserializer, response_serializer, None, None,
None,
_adapt_stream_request_inline(implementation.stream_stream_inline))
elif implementation.style is style.Service.EVENT:
if implementation.cardinality is cardinality.Cardinality.UNARY_UNARY:
return _SimpleMethodHandler(
False, False, request_deserializer, response_serializer,
_adapt_unary_unary_event(implementation.unary_unary_event), None,
None, None)
elif implementation.cardinality is cardinality.Cardinality.UNARY_STREAM:
return _SimpleMethodHandler(
False, True, request_deserializer, response_serializer, None,
_adapt_unary_stream_event(implementation.unary_stream_event), None,
None)
elif implementation.cardinality is cardinality.Cardinality.STREAM_UNARY:
return _SimpleMethodHandler(
True, False, request_deserializer, response_serializer, None, None,
_adapt_stream_unary_event(implementation.stream_unary_event), None)
elif implementation.cardinality is cardinality.Cardinality.STREAM_STREAM:
return _SimpleMethodHandler(
True, True, request_deserializer, response_serializer, None, None,
None, _adapt_stream_stream_event(implementation.stream_stream_event))
collections.namedtuple('_MethodHandler', (
'request_streaming',
'response_streaming',
'request_deserializer',
'response_serializer',
'unary_unary',
'unary_stream',
'stream_unary',
'stream_stream',)), grpc.RpcMethodHandler):
pass
def _simple_method_handler(implementation, request_deserializer,
response_serializer):
if implementation.style is style.Service.INLINE:
if implementation.cardinality is cardinality.Cardinality.UNARY_UNARY:
return _SimpleMethodHandler(
False, False, request_deserializer, response_serializer,
_adapt_unary_request_inline(implementation.unary_unary_inline),
None, None, None)
elif implementation.cardinality is cardinality.Cardinality.UNARY_STREAM:
return _SimpleMethodHandler(
False, True, request_deserializer, response_serializer, None,
_adapt_unary_request_inline(implementation.unary_stream_inline),
None, None)
elif implementation.cardinality is cardinality.Cardinality.STREAM_UNARY:
return _SimpleMethodHandler(True, False, request_deserializer,
response_serializer, None, None,
_adapt_stream_request_inline(
implementation.stream_unary_inline),
None)
elif implementation.cardinality is cardinality.Cardinality.STREAM_STREAM:
return _SimpleMethodHandler(
True, True, request_deserializer, response_serializer, None,
None, None,
_adapt_stream_request_inline(
implementation.stream_stream_inline))
elif implementation.style is style.Service.EVENT:
if implementation.cardinality is cardinality.Cardinality.UNARY_UNARY:
return _SimpleMethodHandler(
False, False, request_deserializer, response_serializer,
_adapt_unary_unary_event(implementation.unary_unary_event),
None, None, None)
elif implementation.cardinality is cardinality.Cardinality.UNARY_STREAM:
return _SimpleMethodHandler(
False, True, request_deserializer, response_serializer, None,
_adapt_unary_stream_event(implementation.unary_stream_event),
None, None)
elif implementation.cardinality is cardinality.Cardinality.STREAM_UNARY:
return _SimpleMethodHandler(
True, False, request_deserializer, response_serializer, None,
None,
_adapt_stream_unary_event(implementation.stream_unary_event),
None)
elif implementation.cardinality is cardinality.Cardinality.STREAM_STREAM:
return _SimpleMethodHandler(
True, True, request_deserializer, response_serializer, None,
None, None,
_adapt_stream_stream_event(implementation.stream_stream_event))
def _flatten_method_pair_map(method_pair_map):
method_pair_map = method_pair_map or {}
flat_map = {}
for method_pair in method_pair_map:
method = _common.fully_qualified_method(method_pair[0], method_pair[1])
flat_map[method] = method_pair_map[method_pair]
return flat_map
method_pair_map = method_pair_map or {}
flat_map = {}
for method_pair in method_pair_map:
method = _common.fully_qualified_method(method_pair[0], method_pair[1])
flat_map[method] = method_pair_map[method_pair]
return flat_map
class _GenericRpcHandler(grpc.GenericRpcHandler):
def __init__(
self, method_implementations, multi_method_implementation,
request_deserializers, response_serializers):
self._method_implementations = _flatten_method_pair_map(
method_implementations)
self._request_deserializers = _flatten_method_pair_map(
request_deserializers)
self._response_serializers = _flatten_method_pair_map(
response_serializers)
self._multi_method_implementation = multi_method_implementation
def service(self, handler_call_details):
method_implementation = self._method_implementations.get(
handler_call_details.method)
if method_implementation is not None:
return _simple_method_handler(
method_implementation,
self._request_deserializers.get(handler_call_details.method),
self._response_serializers.get(handler_call_details.method))
elif self._multi_method_implementation is None:
return None
else:
try:
return None #TODO(nathaniel): call the multimethod.
except face.NoSuchMethodError:
return None
def __init__(self, method_implementations, multi_method_implementation,
request_deserializers, response_serializers):
self._method_implementations = _flatten_method_pair_map(
method_implementations)
self._request_deserializers = _flatten_method_pair_map(
request_deserializers)
self._response_serializers = _flatten_method_pair_map(
response_serializers)
self._multi_method_implementation = multi_method_implementation
def service(self, handler_call_details):
method_implementation = self._method_implementations.get(
handler_call_details.method)
if method_implementation is not None:
return _simple_method_handler(
method_implementation,
self._request_deserializers.get(handler_call_details.method),
self._response_serializers.get(handler_call_details.method))
elif self._multi_method_implementation is None:
return None
else:
try:
return None #TODO(nathaniel): call the multimethod.
except face.NoSuchMethodError:
return None
class _Server(interfaces.Server):
def __init__(self, server):
self._server = server
def __init__(self, server):
self._server = server
def add_insecure_port(self, address):
return self._server.add_insecure_port(address)
def add_insecure_port(self, address):
return self._server.add_insecure_port(address)
def add_secure_port(self, address, server_credentials):
return self._server.add_secure_port(address, server_credentials)
def add_secure_port(self, address, server_credentials):
return self._server.add_secure_port(address, server_credentials)
def start(self):
self._server.start()
def start(self):
self._server.start()
def stop(self, grace):
return self._server.stop(grace)
def stop(self, grace):
return self._server.stop(grace)
def __enter__(self):
self._server.start()
return self
def __enter__(self):
self._server.start()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._server.stop(None)
return False
def __exit__(self, exc_type, exc_val, exc_tb):
self._server.stop(None)
return False
def server(
service_implementations, multi_method_implementation, request_deserializers,
response_serializers, thread_pool, thread_pool_size):
generic_rpc_handler = _GenericRpcHandler(
service_implementations, multi_method_implementation,
request_deserializers, response_serializers)
if thread_pool is None:
effective_thread_pool = logging_pool.pool(
_DEFAULT_POOL_SIZE if thread_pool_size is None else thread_pool_size)
else:
effective_thread_pool = thread_pool
return _Server(
grpc.server(effective_thread_pool, handlers=(generic_rpc_handler,)))
def server(service_implementations, multi_method_implementation,
request_deserializers, response_serializers, thread_pool,
thread_pool_size):
generic_rpc_handler = _GenericRpcHandler(
service_implementations, multi_method_implementation,
request_deserializers, response_serializers)
if thread_pool is None:
effective_thread_pool = logging_pool.pool(_DEFAULT_POOL_SIZE
if thread_pool_size is None
else thread_pool_size)
else:
effective_thread_pool = thread_pool
return _Server(
grpc.server(
effective_thread_pool, handlers=(generic_rpc_handler,)))

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Entry points into the Beta API of gRPC Python."""
# threading is referenced from specification in this module.
@ -43,7 +42,6 @@ from grpc.beta import interfaces
from grpc.framework.common import cardinality # pylint: disable=unused-import
from grpc.framework.interfaces.face import face # pylint: disable=unused-import
ChannelCredentials = grpc.ChannelCredentials
ssl_channel_credentials = grpc.ssl_channel_credentials
CallCredentials = grpc.CallCredentials
@ -51,7 +49,7 @@ metadata_call_credentials = grpc.metadata_call_credentials
def google_call_credentials(credentials):
"""Construct CallCredentials from GoogleCredentials.
"""Construct CallCredentials from GoogleCredentials.
Args:
credentials: A GoogleCredentials object from the oauth2client library.
@ -59,7 +57,8 @@ def google_call_credentials(credentials):
Returns:
A CallCredentials object for use in a GRPCCallOptions object.
"""
return metadata_call_credentials(_auth.GoogleCallCredentials(credentials))
return metadata_call_credentials(_auth.GoogleCallCredentials(credentials))
access_token_call_credentials = grpc.access_token_call_credentials
composite_call_credentials = grpc.composite_call_credentials
@ -67,18 +66,18 @@ composite_channel_credentials = grpc.composite_channel_credentials
class Channel(object):
"""A channel to a remote host through which RPCs may be conducted.
"""A channel to a remote host through which RPCs may be conducted.
Only the "subscribe" and "unsubscribe" methods are supported for application
use. This class' instance constructor and all other attributes are
unsupported.
"""
def __init__(self, channel):
self._channel = channel
def __init__(self, channel):
self._channel = channel
def subscribe(self, callback, try_to_connect=None):
"""Subscribes to this Channel's connectivity.
def subscribe(self, callback, try_to_connect=None):
"""Subscribes to this Channel's connectivity.
Args:
callback: A callable to be invoked and passed an
@ -90,20 +89,20 @@ class Channel(object):
attempt to connect if it is not already connected and ready to conduct
RPCs.
"""
self._channel.subscribe(callback, try_to_connect=try_to_connect)
self._channel.subscribe(callback, try_to_connect=try_to_connect)
def unsubscribe(self, callback):
"""Unsubscribes a callback from this Channel's connectivity.
def unsubscribe(self, callback):
"""Unsubscribes a callback from this Channel's connectivity.
Args:
callback: A callable previously registered with this Channel from having
been passed to its "subscribe" method.
"""
self._channel.unsubscribe(callback)
self._channel.unsubscribe(callback)
def insecure_channel(host, port):
"""Creates an insecure Channel to a remote host.
"""Creates an insecure Channel to a remote host.
Args:
host: The name of the remote host to which to connect.
@ -113,13 +112,13 @@ def insecure_channel(host, port):
Returns:
A Channel to the remote host through which RPCs may be conducted.
"""
channel = grpc.insecure_channel(
host if port is None else '%s:%d' % (host, port))
return Channel(channel)
channel = grpc.insecure_channel(host
if port is None else '%s:%d' % (host, port))
return Channel(channel)
def secure_channel(host, port, channel_credentials):
"""Creates a secure Channel to a remote host.
"""Creates a secure Channel to a remote host.
Args:
host: The name of the remote host to which to connect.
@ -130,37 +129,39 @@ def secure_channel(host, port, channel_credentials):
Returns:
A secure Channel to the remote host through which RPCs may be conducted.
"""
channel = grpc.secure_channel(
host if port is None else '%s:%d' % (host, port), channel_credentials)
return Channel(channel)
channel = grpc.secure_channel(host if port is None else
'%s:%d' % (host, port), channel_credentials)
return Channel(channel)
class StubOptions(object):
"""A value encapsulating the various options for creation of a Stub.
"""A value encapsulating the various options for creation of a Stub.
This class and its instances have no supported interface - it exists to define
the type of its instances and its instances exist to be passed to other
functions.
"""
def __init__(
self, host, request_serializers, response_deserializers,
metadata_transformer, thread_pool, thread_pool_size):
self.host = host
self.request_serializers = request_serializers
self.response_deserializers = response_deserializers
self.metadata_transformer = metadata_transformer
self.thread_pool = thread_pool
self.thread_pool_size = thread_pool_size
def __init__(self, host, request_serializers, response_deserializers,
metadata_transformer, thread_pool, thread_pool_size):
self.host = host
self.request_serializers = request_serializers
self.response_deserializers = response_deserializers
self.metadata_transformer = metadata_transformer
self.thread_pool = thread_pool
self.thread_pool_size = thread_pool_size
_EMPTY_STUB_OPTIONS = StubOptions(
None, None, None, None, None, None)
_EMPTY_STUB_OPTIONS = StubOptions(None, None, None, None, None, None)
def stub_options(
host=None, request_serializers=None, response_deserializers=None,
metadata_transformer=None, thread_pool=None, thread_pool_size=None):
"""Creates a StubOptions value to be passed at stub creation.
def stub_options(host=None,
request_serializers=None,
response_deserializers=None,
metadata_transformer=None,
thread_pool=None,
thread_pool_size=None):
"""Creates a StubOptions value to be passed at stub creation.
All parameters are optional and should always be passed by keyword.
@ -180,13 +181,12 @@ def stub_options(
Returns:
A StubOptions value created from the passed parameters.
"""
return StubOptions(
host, request_serializers, response_deserializers,
metadata_transformer, thread_pool, thread_pool_size)
return StubOptions(host, request_serializers, response_deserializers,
metadata_transformer, thread_pool, thread_pool_size)
def generic_stub(channel, options=None):
"""Creates a face.GenericStub on which RPCs can be made.
"""Creates a face.GenericStub on which RPCs can be made.
Args:
channel: A Channel for use by the created stub.
@ -195,16 +195,17 @@ def generic_stub(channel, options=None):
Returns:
A face.GenericStub on which RPCs can be made.
"""
effective_options = _EMPTY_STUB_OPTIONS if options is None else options
return _client_adaptations.generic_stub(
channel._channel, # pylint: disable=protected-access
effective_options.host, effective_options.metadata_transformer,
effective_options.request_serializers,
effective_options.response_deserializers)
effective_options = _EMPTY_STUB_OPTIONS if options is None else options
return _client_adaptations.generic_stub(
channel._channel, # pylint: disable=protected-access
effective_options.host,
effective_options.metadata_transformer,
effective_options.request_serializers,
effective_options.response_deserializers)
def dynamic_stub(channel, service, cardinalities, options=None):
"""Creates a face.DynamicStub with which RPCs can be invoked.
"""Creates a face.DynamicStub with which RPCs can be invoked.
Args:
channel: A Channel for the returned face.DynamicStub to use.
@ -217,13 +218,15 @@ def dynamic_stub(channel, service, cardinalities, options=None):
Returns:
A face.DynamicStub with which RPCs can be invoked.
"""
effective_options = StubOptions() if options is None else options
return _client_adaptations.dynamic_stub(
channel._channel, # pylint: disable=protected-access
service, cardinalities, effective_options.host,
effective_options.metadata_transformer,
effective_options.request_serializers,
effective_options.response_deserializers)
effective_options = StubOptions() if options is None else options
return _client_adaptations.dynamic_stub(
channel._channel, # pylint: disable=protected-access
service,
cardinalities,
effective_options.host,
effective_options.metadata_transformer,
effective_options.request_serializers,
effective_options.response_deserializers)
ServerCredentials = grpc.ServerCredentials
@ -231,34 +234,36 @@ ssl_server_credentials = grpc.ssl_server_credentials
class ServerOptions(object):
"""A value encapsulating the various options for creation of a Server.
"""A value encapsulating the various options for creation of a Server.
This class and its instances have no supported interface - it exists to define
the type of its instances and its instances exist to be passed to other
functions.
"""
def __init__(
self, multi_method_implementation, request_deserializers,
response_serializers, thread_pool, thread_pool_size, default_timeout,
maximum_timeout):
self.multi_method_implementation = multi_method_implementation
self.request_deserializers = request_deserializers
self.response_serializers = response_serializers
self.thread_pool = thread_pool
self.thread_pool_size = thread_pool_size
self.default_timeout = default_timeout
self.maximum_timeout = maximum_timeout
def __init__(self, multi_method_implementation, request_deserializers,
response_serializers, thread_pool, thread_pool_size,
default_timeout, maximum_timeout):
self.multi_method_implementation = multi_method_implementation
self.request_deserializers = request_deserializers
self.response_serializers = response_serializers
self.thread_pool = thread_pool
self.thread_pool_size = thread_pool_size
self.default_timeout = default_timeout
self.maximum_timeout = maximum_timeout
_EMPTY_SERVER_OPTIONS = ServerOptions(
None, None, None, None, None, None, None)
_EMPTY_SERVER_OPTIONS = ServerOptions(None, None, None, None, None, None, None)
def server_options(
multi_method_implementation=None, request_deserializers=None,
response_serializers=None, thread_pool=None, thread_pool_size=None,
default_timeout=None, maximum_timeout=None):
"""Creates a ServerOptions value to be passed at server creation.
def server_options(multi_method_implementation=None,
request_deserializers=None,
response_serializers=None,
thread_pool=None,
thread_pool_size=None,
default_timeout=None,
maximum_timeout=None):
"""Creates a ServerOptions value to be passed at server creation.
All parameters are optional and should always be passed by keyword.
@ -282,13 +287,13 @@ def server_options(
Returns:
A StubOptions value created from the passed parameters.
"""
return ServerOptions(
multi_method_implementation, request_deserializers, response_serializers,
thread_pool, thread_pool_size, default_timeout, maximum_timeout)
return ServerOptions(multi_method_implementation, request_deserializers,
response_serializers, thread_pool, thread_pool_size,
default_timeout, maximum_timeout)
def server(service_implementations, options=None):
"""Creates an interfaces.Server with which RPCs can be serviced.
"""Creates an interfaces.Server with which RPCs can be serviced.
Args:
service_implementations: A dictionary from service name-method name pair to
@ -299,9 +304,9 @@ def server(service_implementations, options=None):
Returns:
An interfaces.Server with which RPCs can be serviced.
"""
effective_options = _EMPTY_SERVER_OPTIONS if options is None else options
return _server_adaptations.server(
service_implementations, effective_options.multi_method_implementation,
effective_options.request_deserializers,
effective_options.response_serializers, effective_options.thread_pool,
effective_options.thread_pool_size)
effective_options = _EMPTY_SERVER_OPTIONS if options is None else options
return _server_adaptations.server(
service_implementations, effective_options.multi_method_implementation,
effective_options.request_deserializers,
effective_options.response_serializers, effective_options.thread_pool,
effective_options.thread_pool_size)

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Constants and interfaces of the Beta API of gRPC Python."""
import abc
@ -43,21 +42,21 @@ StatusCode = grpc.StatusCode
class GRPCCallOptions(object):
"""A value encapsulating gRPC-specific options passed on RPC invocation.
"""A value encapsulating gRPC-specific options passed on RPC invocation.
This class and its instances have no supported interface - it exists to
define the type of its instances and its instances exist to be passed to
other functions.
"""
def __init__(self, disable_compression, subcall_of, credentials):
self.disable_compression = disable_compression
self.subcall_of = subcall_of
self.credentials = credentials
def __init__(self, disable_compression, subcall_of, credentials):
self.disable_compression = disable_compression
self.subcall_of = subcall_of
self.credentials = credentials
def grpc_call_options(disable_compression=False, credentials=None):
"""Creates a GRPCCallOptions value to be passed at RPC invocation.
"""Creates a GRPCCallOptions value to be passed at RPC invocation.
All parameters are optional and should always be passed by keyword.
@ -67,7 +66,8 @@ def grpc_call_options(disable_compression=False, credentials=None):
request-unary RPCs.
credentials: A CallCredentials object to use for the invoked RPC.
"""
return GRPCCallOptions(disable_compression, None, credentials)
return GRPCCallOptions(disable_compression, None, credentials)
GRPCAuthMetadataContext = grpc.AuthMetadataContext
GRPCAuthMetadataPluginCallback = grpc.AuthMetadataPluginCallback
@ -75,38 +75,38 @@ GRPCAuthMetadataPlugin = grpc.AuthMetadataPlugin
class GRPCServicerContext(six.with_metaclass(abc.ABCMeta)):
"""Exposes gRPC-specific options and behaviors to code servicing RPCs."""
"""Exposes gRPC-specific options and behaviors to code servicing RPCs."""
@abc.abstractmethod
def peer(self):
"""Identifies the peer that invoked the RPC being serviced.
@abc.abstractmethod
def peer(self):
"""Identifies the peer that invoked the RPC being serviced.
Returns:
A string identifying the peer that invoked the RPC being serviced.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def disable_next_response_compression(self):
"""Disables compression of the next response passed by the application."""
raise NotImplementedError()
@abc.abstractmethod
def disable_next_response_compression(self):
"""Disables compression of the next response passed by the application."""
raise NotImplementedError()
class GRPCInvocationContext(six.with_metaclass(abc.ABCMeta)):
"""Exposes gRPC-specific options and behaviors to code invoking RPCs."""
"""Exposes gRPC-specific options and behaviors to code invoking RPCs."""
@abc.abstractmethod
def disable_next_request_compression(self):
"""Disables compression of the next request passed by the application."""
raise NotImplementedError()
@abc.abstractmethod
def disable_next_request_compression(self):
"""Disables compression of the next request passed by the application."""
raise NotImplementedError()
class Server(six.with_metaclass(abc.ABCMeta)):
"""Services RPCs."""
"""Services RPCs."""
@abc.abstractmethod
def add_insecure_port(self, address):
"""Reserves a port for insecure RPC service once this Server becomes active.
@abc.abstractmethod
def add_insecure_port(self, address):
"""Reserves a port for insecure RPC service once this Server becomes active.
This method may only be called before calling this Server's start method is
called.
@ -120,11 +120,11 @@ class Server(six.with_metaclass(abc.ABCMeta)):
in the passed address, but will likely be different if the port number
contained in the passed address was zero.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def add_secure_port(self, address, server_credentials):
"""Reserves a port for secure RPC service after this Server becomes active.
@abc.abstractmethod
def add_secure_port(self, address, server_credentials):
"""Reserves a port for secure RPC service after this Server becomes active.
This method may only be called before calling this Server's start method is
called.
@ -139,20 +139,20 @@ class Server(six.with_metaclass(abc.ABCMeta)):
in the passed address, but will likely be different if the port number
contained in the passed address was zero.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def start(self):
"""Starts this Server's service of RPCs.
@abc.abstractmethod
def start(self):
"""Starts this Server's service of RPCs.
This method may only be called while the server is not serving RPCs (i.e. it
is not idempotent).
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def stop(self, grace):
"""Stops this Server's service of RPCs.
@abc.abstractmethod
def stop(self, grace):
"""Stops this Server's service of RPCs.
All calls to this method immediately stop service of new RPCs. When existing
RPCs are aborted is controlled by the grace period parameter passed to this
@ -177,4 +177,4 @@ class Server(six.with_metaclass(abc.ABCMeta)):
at the time it was stopped or if all RPCs that it had underway completed
very early in the grace period).
"""
raise NotImplementedError()
raise NotImplementedError()

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Utilities for the gRPC Python Beta API."""
import threading
@ -44,107 +43,107 @@ _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE = (
class _ChannelReadyFuture(future.Future):
def __init__(self, channel):
self._condition = threading.Condition()
self._channel = channel
self._matured = False
self._cancelled = False
self._done_callbacks = []
def _block(self, timeout):
until = None if timeout is None else time.time() + timeout
with self._condition:
while True:
if self._cancelled:
raise future.CancelledError()
elif self._matured:
return
else:
if until is None:
self._condition.wait()
else:
remaining = until - time.time()
if remaining < 0:
raise future.TimeoutError()
def __init__(self, channel):
self._condition = threading.Condition()
self._channel = channel
self._matured = False
self._cancelled = False
self._done_callbacks = []
def _block(self, timeout):
until = None if timeout is None else time.time() + timeout
with self._condition:
while True:
if self._cancelled:
raise future.CancelledError()
elif self._matured:
return
else:
if until is None:
self._condition.wait()
else:
remaining = until - time.time()
if remaining < 0:
raise future.TimeoutError()
else:
self._condition.wait(timeout=remaining)
def _update(self, connectivity):
with self._condition:
if (not self._cancelled and
connectivity is interfaces.ChannelConnectivity.READY):
self._matured = True
self._channel.unsubscribe(self._update)
self._condition.notify_all()
done_callbacks = tuple(self._done_callbacks)
self._done_callbacks = None
else:
return
for done_callback in done_callbacks:
callable_util.call_logging_exceptions(
done_callback, _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE, self)
def cancel(self):
with self._condition:
if not self._matured:
self._cancelled = True
self._channel.unsubscribe(self._update)
self._condition.notify_all()
done_callbacks = tuple(self._done_callbacks)
self._done_callbacks = None
else:
self._condition.wait(timeout=remaining)
def _update(self, connectivity):
with self._condition:
if (not self._cancelled and
connectivity is interfaces.ChannelConnectivity.READY):
self._matured = True
self._channel.unsubscribe(self._update)
self._condition.notify_all()
done_callbacks = tuple(self._done_callbacks)
self._done_callbacks = None
else:
return
for done_callback in done_callbacks:
callable_util.call_logging_exceptions(
done_callback, _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE, self)
def cancel(self):
with self._condition:
if not self._matured:
self._cancelled = True
self._channel.unsubscribe(self._update)
self._condition.notify_all()
done_callbacks = tuple(self._done_callbacks)
self._done_callbacks = None
else:
return False
for done_callback in done_callbacks:
callable_util.call_logging_exceptions(
done_callback, _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE, self)
def cancelled(self):
with self._condition:
return self._cancelled
def running(self):
with self._condition:
return not self._cancelled and not self._matured
def done(self):
with self._condition:
return self._cancelled or self._matured
def result(self, timeout=None):
self._block(timeout)
return None
def exception(self, timeout=None):
self._block(timeout)
return None
def traceback(self, timeout=None):
self._block(timeout)
return None
def add_done_callback(self, fn):
with self._condition:
if not self._cancelled and not self._matured:
self._done_callbacks.append(fn)
return
fn(self)
def start(self):
with self._condition:
self._channel.subscribe(self._update, try_to_connect=True)
def __del__(self):
with self._condition:
if not self._cancelled and not self._matured:
self._channel.unsubscribe(self._update)
return False
for done_callback in done_callbacks:
callable_util.call_logging_exceptions(
done_callback, _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE, self)
def cancelled(self):
with self._condition:
return self._cancelled
def running(self):
with self._condition:
return not self._cancelled and not self._matured
def done(self):
with self._condition:
return self._cancelled or self._matured
def result(self, timeout=None):
self._block(timeout)
return None
def exception(self, timeout=None):
self._block(timeout)
return None
def traceback(self, timeout=None):
self._block(timeout)
return None
def add_done_callback(self, fn):
with self._condition:
if not self._cancelled and not self._matured:
self._done_callbacks.append(fn)
return
fn(self)
def start(self):
with self._condition:
self._channel.subscribe(self._update, try_to_connect=True)
def __del__(self):
with self._condition:
if not self._cancelled and not self._matured:
self._channel.unsubscribe(self._update)
def channel_ready_future(channel):
"""Creates a future.Future tracking when an implementations.Channel is ready.
"""Creates a future.Future tracking when an implementations.Channel is ready.
Cancelling the returned future.Future does not tell the given
implementations.Channel to abandon attempts it may have been making to
@ -158,7 +157,6 @@ def channel_ready_future(channel):
A future.Future that matures when the given Channel has connectivity
interfaces.ChannelConnectivity.READY.
"""
ready_future = _ChannelReadyFuture(channel)
ready_future.start()
return ready_future
ready_future = _ChannelReadyFuture(channel)
ready_future.start()
return ready_future

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Defines an enum for classifying RPC methods by streaming semantics."""
import enum
@ -34,9 +33,9 @@ import enum
@enum.unique
class Cardinality(enum.Enum):
"""Describes the streaming semantics of an RPC method."""
"""Describes the streaming semantics of an RPC method."""
UNARY_UNARY = 'request-unary/response-unary'
UNARY_STREAM = 'request-unary/response-streaming'
STREAM_UNARY = 'request-streaming/response-unary'
STREAM_STREAM = 'request-streaming/response-streaming'
UNARY_UNARY = 'request-unary/response-unary'
UNARY_STREAM = 'request-unary/response-streaming'
STREAM_UNARY = 'request-streaming/response-unary'
STREAM_STREAM = 'request-streaming/response-streaming'

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Defines an enum for classifying RPC methods by control flow semantics."""
import enum
@ -34,7 +33,7 @@ import enum
@enum.unique
class Service(enum.Enum):
"""Describes the control flow style of RPC method implementation."""
"""Describes the control flow style of RPC method implementation."""
INLINE = 'inline'
EVENT = 'event'
INLINE = 'inline'
EVENT = 'event'

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,12 +26,11 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Utilities for indicating abandonment of computation."""
class Abandoned(Exception):
"""Indicates that some computation is being abandoned.
"""Indicates that some computation is being abandoned.
Abandoning a computation is different than returning a value or raising
an exception indicating some operational or programming defect.

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Utilities for working with callables."""
import abc
@ -39,7 +38,7 @@ import six
class Outcome(six.with_metaclass(abc.ABCMeta)):
"""A sum type describing the outcome of some call.
"""A sum type describing the outcome of some call.
Attributes:
kind: One of Kind.RETURNED or Kind.RAISED respectively indicating that the
@ -50,31 +49,31 @@ class Outcome(six.with_metaclass(abc.ABCMeta)):
Kind.RAISED.
"""
@enum.unique
class Kind(enum.Enum):
"""Identifies the general kind of the outcome of some call."""
@enum.unique
class Kind(enum.Enum):
"""Identifies the general kind of the outcome of some call."""
RETURNED = object()
RAISED = object()
RETURNED = object()
RAISED = object()
class _EasyOutcome(
collections.namedtuple(
'_EasyOutcome', ['kind', 'return_value', 'exception']),
Outcome):
"""A trivial implementation of Outcome."""
collections.namedtuple('_EasyOutcome',
['kind', 'return_value', 'exception']), Outcome):
"""A trivial implementation of Outcome."""
def _call_logging_exceptions(behavior, message, *args, **kwargs):
try:
return _EasyOutcome(Outcome.Kind.RETURNED, behavior(*args, **kwargs), None)
except Exception as e: # pylint: disable=broad-except
logging.exception(message)
return _EasyOutcome(Outcome.Kind.RAISED, None, e)
try:
return _EasyOutcome(Outcome.Kind.RETURNED,
behavior(*args, **kwargs), None)
except Exception as e: # pylint: disable=broad-except
logging.exception(message)
return _EasyOutcome(Outcome.Kind.RAISED, None, e)
def with_exceptions_logged(behavior, message):
"""Wraps a callable in a try-except that logs any exceptions it raises.
"""Wraps a callable in a try-except that logs any exceptions it raises.
Args:
behavior: Any callable.
@ -86,14 +85,16 @@ def with_exceptions_logged(behavior, message):
future.Outcome describing whether the given behavior returned a value or
raised an exception.
"""
@functools.wraps(behavior)
def wrapped_behavior(*args, **kwargs):
return _call_logging_exceptions(behavior, message, *args, **kwargs)
return wrapped_behavior
@functools.wraps(behavior)
def wrapped_behavior(*args, **kwargs):
return _call_logging_exceptions(behavior, message, *args, **kwargs)
return wrapped_behavior
def call_logging_exceptions(behavior, message, *args, **kwargs):
"""Calls a behavior in a try-except that logs any exceptions it raises.
"""Calls a behavior in a try-except that logs any exceptions it raises.
Args:
behavior: Any callable.
@ -105,4 +106,4 @@ def call_logging_exceptions(behavior, message, *args, **kwargs):
An Outcome describing whether the given behavior returned a value or raised
an exception.
"""
return _call_logging_exceptions(behavior, message, *args, **kwargs)
return _call_logging_exceptions(behavior, message, *args, **kwargs)

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A Future interface.
Python doesn't have a Future interface in its standard library. In the absence
@ -53,33 +52,33 @@ import six
class TimeoutError(Exception):
"""Indicates that a particular call timed out."""
"""Indicates that a particular call timed out."""
class CancelledError(Exception):
"""Indicates that the computation underlying a Future was cancelled."""
"""Indicates that the computation underlying a Future was cancelled."""
class Future(six.with_metaclass(abc.ABCMeta)):
"""A representation of a computation in another control flow.
"""A representation of a computation in another control flow.
Computations represented by a Future may be yet to be begun, may be ongoing,
or may have already completed.
"""
# NOTE(nathaniel): This isn't the return type that I would want to have if it
# were up to me. Were this interface being written from scratch, the return
# type of this method would probably be a sum type like:
#
# NOT_COMMENCED
# COMMENCED_AND_NOT_COMPLETED
# PARTIAL_RESULT<Partial_Result_Type>
# COMPLETED<Result_Type>
# UNCANCELLABLE
# NOT_IMMEDIATELY_DETERMINABLE
@abc.abstractmethod
def cancel(self):
"""Attempts to cancel the computation.
# NOTE(nathaniel): This isn't the return type that I would want to have if it
# were up to me. Were this interface being written from scratch, the return
# type of this method would probably be a sum type like:
#
# NOT_COMMENCED
# COMMENCED_AND_NOT_COMPLETED
# PARTIAL_RESULT<Partial_Result_Type>
# COMPLETED<Result_Type>
# UNCANCELLABLE
# NOT_IMMEDIATELY_DETERMINABLE
@abc.abstractmethod
def cancel(self):
"""Attempts to cancel the computation.
This method does not block.
@ -92,25 +91,25 @@ class Future(six.with_metaclass(abc.ABCMeta)):
remote system for which a determination of whether or not it commenced
before being cancelled cannot be made without blocking.
"""
raise NotImplementedError()
# NOTE(nathaniel): Here too this isn't the return type that I'd want this
# method to have if it were up to me. I think I'd go with another sum type
# like:
#
# NOT_CANCELLED (this object's cancel method hasn't been called)
# NOT_COMMENCED
# COMMENCED_AND_NOT_COMPLETED
# PARTIAL_RESULT<Partial_Result_Type>
# COMPLETED<Result_Type>
# UNCANCELLABLE
# NOT_IMMEDIATELY_DETERMINABLE
#
# Notice how giving the cancel method the right semantics obviates most
# reasons for this method to exist.
@abc.abstractmethod
def cancelled(self):
"""Describes whether the computation was cancelled.
raise NotImplementedError()
# NOTE(nathaniel): Here too this isn't the return type that I'd want this
# method to have if it were up to me. I think I'd go with another sum type
# like:
#
# NOT_CANCELLED (this object's cancel method hasn't been called)
# NOT_COMMENCED
# COMMENCED_AND_NOT_COMPLETED
# PARTIAL_RESULT<Partial_Result_Type>
# COMPLETED<Result_Type>
# UNCANCELLABLE
# NOT_IMMEDIATELY_DETERMINABLE
#
# Notice how giving the cancel method the right semantics obviates most
# reasons for this method to exist.
@abc.abstractmethod
def cancelled(self):
"""Describes whether the computation was cancelled.
This method does not block.
@ -120,11 +119,11 @@ class Future(six.with_metaclass(abc.ABCMeta)):
not limited to this object's cancel method not having been called and
the computation's result having become immediately available.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def running(self):
"""Describes whether the computation is taking place.
@abc.abstractmethod
def running(self):
"""Describes whether the computation is taking place.
This method does not block.
@ -133,15 +132,15 @@ class Future(six.with_metaclass(abc.ABCMeta)):
taking place now, or False if the computation took place in the past or
was cancelled.
"""
raise NotImplementedError()
raise NotImplementedError()
# NOTE(nathaniel): These aren't quite the semantics I'd like here either. I
# would rather this only returned True in cases in which the underlying
# computation completed successfully. A computation's having been cancelled
# conflicts with considering that computation "done".
@abc.abstractmethod
def done(self):
"""Describes whether the computation has taken place.
# NOTE(nathaniel): These aren't quite the semantics I'd like here either. I
# would rather this only returned True in cases in which the underlying
# computation completed successfully. A computation's having been cancelled
# conflicts with considering that computation "done".
@abc.abstractmethod
def done(self):
"""Describes whether the computation has taken place.
This method does not block.
@ -150,11 +149,11 @@ class Future(six.with_metaclass(abc.ABCMeta)):
unscheduled or interrupted. False if the computation may possibly be
executing or scheduled to execute later.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def result(self, timeout=None):
"""Accesses the outcome of the computation or raises its exception.
@abc.abstractmethod
def result(self, timeout=None):
"""Accesses the outcome of the computation or raises its exception.
This method may return immediately or may block.
@ -173,11 +172,11 @@ class Future(six.with_metaclass(abc.ABCMeta)):
Exception: If the computation raised an exception, this call will raise
the same exception.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def exception(self, timeout=None):
"""Return the exception raised by the computation.
@abc.abstractmethod
def exception(self, timeout=None):
"""Return the exception raised by the computation.
This method may return immediately or may block.
@ -196,11 +195,11 @@ class Future(six.with_metaclass(abc.ABCMeta)):
terminate within the allotted time.
CancelledError: If the computation was cancelled.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def traceback(self, timeout=None):
"""Access the traceback of the exception raised by the computation.
@abc.abstractmethod
def traceback(self, timeout=None):
"""Access the traceback of the exception raised by the computation.
This method may return immediately or may block.
@ -219,11 +218,11 @@ class Future(six.with_metaclass(abc.ABCMeta)):
terminate within the allotted time.
CancelledError: If the computation was cancelled.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def add_done_callback(self, fn):
"""Adds a function to be called at completion of the computation.
@abc.abstractmethod
def add_done_callback(self, fn):
"""Adds a function to be called at completion of the computation.
The callback will be passed this Future object describing the outcome of
the computation.
@ -234,4 +233,4 @@ class Future(six.with_metaclass(abc.ABCMeta)):
Args:
fn: A callable taking this Future object as its single parameter.
"""
raise NotImplementedError()
raise NotImplementedError()

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A thread pool that logs exceptions raised by tasks executed within it."""
import logging
@ -35,42 +34,46 @@ from concurrent import futures
def _wrap(behavior):
"""Wraps an arbitrary callable behavior in exception-logging."""
def _wrapping(*args, **kwargs):
try:
return behavior(*args, **kwargs)
except Exception as e:
logging.exception(
'Unexpected exception from %s executed in logging pool!', behavior)
raise
return _wrapping
"""Wraps an arbitrary callable behavior in exception-logging."""
def _wrapping(*args, **kwargs):
try:
return behavior(*args, **kwargs)
except Exception as e:
logging.exception(
'Unexpected exception from %s executed in logging pool!',
behavior)
raise
return _wrapping
class _LoggingPool(object):
"""An exception-logging futures.ThreadPoolExecutor-compatible thread pool."""
"""An exception-logging futures.ThreadPoolExecutor-compatible thread pool."""
def __init__(self, backing_pool):
self._backing_pool = backing_pool
def __init__(self, backing_pool):
self._backing_pool = backing_pool
def __enter__(self):
return self
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._backing_pool.shutdown(wait=True)
def __exit__(self, exc_type, exc_val, exc_tb):
self._backing_pool.shutdown(wait=True)
def submit(self, fn, *args, **kwargs):
return self._backing_pool.submit(_wrap(fn), *args, **kwargs)
def submit(self, fn, *args, **kwargs):
return self._backing_pool.submit(_wrap(fn), *args, **kwargs)
def map(self, func, *iterables, **kwargs):
return self._backing_pool.map(
_wrap(func), *iterables, timeout=kwargs.get('timeout', None))
def map(self, func, *iterables, **kwargs):
return self._backing_pool.map(_wrap(func),
*iterables,
timeout=kwargs.get('timeout', None))
def shutdown(self, wait=True):
self._backing_pool.shutdown(wait=wait)
def shutdown(self, wait=True):
self._backing_pool.shutdown(wait=wait)
def pool(max_workers):
"""Creates a thread pool that logs exceptions raised by the tasks within it.
"""Creates a thread pool that logs exceptions raised by the tasks within it.
Args:
max_workers: The maximum number of worker threads to allow the pool.
@ -79,4 +82,4 @@ def pool(max_workers):
A futures.ThreadPoolExecutor-compatible thread pool that logs exceptions
raised by the tasks executed within it.
"""
return _LoggingPool(futures.ThreadPoolExecutor(max_workers))
return _LoggingPool(futures.ThreadPoolExecutor(max_workers))

@ -26,35 +26,35 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Interfaces related to streams of values or objects."""
import abc
import six
class Consumer(six.with_metaclass(abc.ABCMeta)):
"""Interface for consumers of finite streams of values or objects."""
"""Interface for consumers of finite streams of values or objects."""
@abc.abstractmethod
def consume(self, value):
"""Accepts a value.
@abc.abstractmethod
def consume(self, value):
"""Accepts a value.
Args:
value: Any value accepted by this Consumer.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def terminate(self):
"""Indicates to this Consumer that no more values will be supplied."""
raise NotImplementedError()
@abc.abstractmethod
def terminate(self):
"""Indicates to this Consumer that no more values will be supplied."""
raise NotImplementedError()
@abc.abstractmethod
def consume_and_terminate(self, value):
"""Supplies a value and signals that no more values will be supplied.
@abc.abstractmethod
def consume_and_terminate(self, value):
"""Supplies a value and signals that no more values will be supplied.
Args:
value: Any value accepted by this Consumer.
"""
raise NotImplementedError()
raise NotImplementedError()

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Helpful utilities related to the stream module."""
import logging
@ -38,126 +37,126 @@ _NO_VALUE = object()
class TransformingConsumer(stream.Consumer):
"""A stream.Consumer that passes a transformation of its input to another."""
"""A stream.Consumer that passes a transformation of its input to another."""
def __init__(self, transformation, downstream):
self._transformation = transformation
self._downstream = downstream
def __init__(self, transformation, downstream):
self._transformation = transformation
self._downstream = downstream
def consume(self, value):
self._downstream.consume(self._transformation(value))
def consume(self, value):
self._downstream.consume(self._transformation(value))
def terminate(self):
self._downstream.terminate()
def terminate(self):
self._downstream.terminate()
def consume_and_terminate(self, value):
self._downstream.consume_and_terminate(self._transformation(value))
def consume_and_terminate(self, value):
self._downstream.consume_and_terminate(self._transformation(value))
class IterableConsumer(stream.Consumer):
"""A Consumer that when iterated over emits the values it has consumed."""
def __init__(self):
self._condition = threading.Condition()
self._values = []
self._active = True
def consume(self, stock_reply):
with self._condition:
if self._active:
self._values.append(stock_reply)
self._condition.notify()
def terminate(self):
with self._condition:
self._active = False
self._condition.notify()
def consume_and_terminate(self, stock_reply):
with self._condition:
if self._active:
self._values.append(stock_reply)
self._active = False
self._condition.notify()
def __iter__(self):
return self
def __next__(self):
return self.next()
def next(self):
with self._condition:
while self._active and not self._values:
self._condition.wait()
if self._values:
return self._values.pop(0)
else:
raise StopIteration()
"""A Consumer that when iterated over emits the values it has consumed."""
def __init__(self):
self._condition = threading.Condition()
self._values = []
self._active = True
def consume(self, stock_reply):
with self._condition:
if self._active:
self._values.append(stock_reply)
self._condition.notify()
def terminate(self):
with self._condition:
self._active = False
self._condition.notify()
def consume_and_terminate(self, stock_reply):
with self._condition:
if self._active:
self._values.append(stock_reply)
self._active = False
self._condition.notify()
def __iter__(self):
return self
def __next__(self):
return self.next()
def next(self):
with self._condition:
while self._active and not self._values:
self._condition.wait()
if self._values:
return self._values.pop(0)
else:
raise StopIteration()
class ThreadSwitchingConsumer(stream.Consumer):
"""A Consumer decorator that affords serialization and asynchrony."""
def __init__(self, sink, pool):
self._lock = threading.Lock()
self._sink = sink
self._pool = pool
# True if self._spin has been submitted to the pool to be called once and
# that call has not yet returned, False otherwise.
self._spinning = False
self._values = []
self._active = True
def _spin(self, sink, value, terminate):
while True:
try:
if value is _NO_VALUE:
sink.terminate()
elif terminate:
sink.consume_and_terminate(value)
else:
sink.consume(value)
except Exception as e: # pylint:disable=broad-except
logging.exception(e)
with self._lock:
if terminate:
self._spinning = False
return
elif self._values:
value = self._values.pop(0)
terminate = not self._values and not self._active
elif not self._active:
value = _NO_VALUE
terminate = True
else:
self._spinning = False
return
def consume(self, value):
with self._lock:
if self._active:
if self._spinning:
self._values.append(value)
else:
self._pool.submit(self._spin, self._sink, value, False)
self._spinning = True
def terminate(self):
with self._lock:
if self._active:
self._active = False
if not self._spinning:
self._pool.submit(self._spin, self._sink, _NO_VALUE, True)
self._spinning = True
def consume_and_terminate(self, value):
with self._lock:
if self._active:
self._active = False
if self._spinning:
self._values.append(value)
else:
self._pool.submit(self._spin, self._sink, value, True)
self._spinning = True
"""A Consumer decorator that affords serialization and asynchrony."""
def __init__(self, sink, pool):
self._lock = threading.Lock()
self._sink = sink
self._pool = pool
# True if self._spin has been submitted to the pool to be called once and
# that call has not yet returned, False otherwise.
self._spinning = False
self._values = []
self._active = True
def _spin(self, sink, value, terminate):
while True:
try:
if value is _NO_VALUE:
sink.terminate()
elif terminate:
sink.consume_and_terminate(value)
else:
sink.consume(value)
except Exception as e: # pylint:disable=broad-except
logging.exception(e)
with self._lock:
if terminate:
self._spinning = False
return
elif self._values:
value = self._values.pop(0)
terminate = not self._values and not self._active
elif not self._active:
value = _NO_VALUE
terminate = True
else:
self._spinning = False
return
def consume(self, value):
with self._lock:
if self._active:
if self._spinning:
self._values.append(value)
else:
self._pool.submit(self._spin, self._sink, value, False)
self._spinning = True
def terminate(self):
with self._lock:
if self._active:
self._active = False
if not self._spinning:
self._pool.submit(self._spin, self._sink, _NO_VALUE, True)
self._spinning = True
def consume_and_terminate(self, value):
with self._lock:
if self._active:
self._active = False
if self._spinning:
self._values.append(value)
else:
self._pool.submit(self._spin, self._sink, value, True)
self._spinning = True

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""The base interface of RPC Framework.
Implementations of this interface support the conduct of "operations":
@ -49,7 +48,7 @@ from grpc.framework.foundation import abandonment # pylint: disable=unused-impo
class NoSuchMethodError(Exception):
"""Indicates that an unrecognized operation has been called.
"""Indicates that an unrecognized operation has been called.
Attributes:
code: A code value to communicate to the other side of the operation along
@ -58,8 +57,8 @@ class NoSuchMethodError(Exception):
along with indication of operation termination. May be None.
"""
def __init__(self, code, details):
"""Constructor.
def __init__(self, code, details):
"""Constructor.
Args:
code: A code value to communicate to the other side of the operation
@ -67,12 +66,12 @@ class NoSuchMethodError(Exception):
details: A details value to communicate to the other side of the
operation along with indication of operation termination. May be None.
"""
self.code = code
self.details = details
self.code = code
self.details = details
class Outcome(object):
"""The outcome of an operation.
"""The outcome of an operation.
Attributes:
kind: A Kind value coarsely identifying how the operation terminated.
@ -82,23 +81,23 @@ class Outcome(object):
provided.
"""
@enum.unique
class Kind(enum.Enum):
"""Ways in which an operation can terminate."""
@enum.unique
class Kind(enum.Enum):
"""Ways in which an operation can terminate."""
COMPLETED = 'completed'
CANCELLED = 'cancelled'
EXPIRED = 'expired'
LOCAL_SHUTDOWN = 'local shutdown'
REMOTE_SHUTDOWN = 'remote shutdown'
RECEPTION_FAILURE = 'reception failure'
TRANSMISSION_FAILURE = 'transmission failure'
LOCAL_FAILURE = 'local failure'
REMOTE_FAILURE = 'remote failure'
COMPLETED = 'completed'
CANCELLED = 'cancelled'
EXPIRED = 'expired'
LOCAL_SHUTDOWN = 'local shutdown'
REMOTE_SHUTDOWN = 'remote shutdown'
RECEPTION_FAILURE = 'reception failure'
TRANSMISSION_FAILURE = 'transmission failure'
LOCAL_FAILURE = 'local failure'
REMOTE_FAILURE = 'remote failure'
class Completion(six.with_metaclass(abc.ABCMeta)):
"""An aggregate of the values exchanged upon operation completion.
"""An aggregate of the values exchanged upon operation completion.
Attributes:
terminal_metadata: A terminal metadata value for the operaton.
@ -108,21 +107,21 @@ class Completion(six.with_metaclass(abc.ABCMeta)):
class OperationContext(six.with_metaclass(abc.ABCMeta)):
"""Provides operation-related information and action."""
"""Provides operation-related information and action."""
@abc.abstractmethod
def outcome(self):
"""Indicates the operation's outcome (or that the operation is ongoing).
@abc.abstractmethod
def outcome(self):
"""Indicates the operation's outcome (or that the operation is ongoing).
Returns:
None if the operation is still active or the Outcome value for the
operation if it has terminated.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def add_termination_callback(self, callback):
"""Adds a function to be called upon operation termination.
@abc.abstractmethod
def add_termination_callback(self, callback):
"""Adds a function to be called upon operation termination.
Args:
callback: A callable to be passed an Outcome value on operation
@ -134,42 +133,44 @@ class OperationContext(six.with_metaclass(abc.ABCMeta)):
terminated an Outcome value describing the operation termination and the
passed callback will not be called as a result of this method call.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def time_remaining(self):
"""Describes the length of allowed time remaining for the operation.
@abc.abstractmethod
def time_remaining(self):
"""Describes the length of allowed time remaining for the operation.
Returns:
A nonnegative float indicating the length of allowed time in seconds
remaining for the operation to complete before it is considered to have
timed out. Zero is returned if the operation has terminated.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def cancel(self):
"""Cancels the operation if the operation has not yet terminated."""
raise NotImplementedError()
@abc.abstractmethod
def cancel(self):
"""Cancels the operation if the operation has not yet terminated."""
raise NotImplementedError()
@abc.abstractmethod
def fail(self, exception):
"""Indicates that the operation has failed.
@abc.abstractmethod
def fail(self, exception):
"""Indicates that the operation has failed.
Args:
exception: An exception germane to the operation failure. May be None.
"""
raise NotImplementedError()
raise NotImplementedError()
class Operator(six.with_metaclass(abc.ABCMeta)):
"""An interface through which to participate in an operation."""
"""An interface through which to participate in an operation."""
@abc.abstractmethod
def advance(
self, initial_metadata=None, payload=None, completion=None,
allowance=None):
"""Progresses the operation.
@abc.abstractmethod
def advance(self,
initial_metadata=None,
payload=None,
completion=None,
allowance=None):
"""Progresses the operation.
Args:
initial_metadata: An initial metadata value. Only one may ever be
@ -181,23 +182,24 @@ class Operator(six.with_metaclass(abc.ABCMeta)):
allowance: A positive integer communicating the number of additional
payloads allowed to be passed by the remote side of the operation.
"""
raise NotImplementedError()
raise NotImplementedError()
class ProtocolReceiver(six.with_metaclass(abc.ABCMeta)):
"""A means of receiving protocol values during an operation."""
"""A means of receiving protocol values during an operation."""
@abc.abstractmethod
def context(self, protocol_context):
"""Accepts the protocol context object for the operation.
@abc.abstractmethod
def context(self, protocol_context):
"""Accepts the protocol context object for the operation.
Args:
protocol_context: The protocol context object for the operation.
"""
raise NotImplementedError()
raise NotImplementedError()
class Subscription(six.with_metaclass(abc.ABCMeta)):
"""Describes customer code's interest in values from the other side.
"""Describes customer code's interest in values from the other side.
Attributes:
kind: A Kind value describing the overall kind of this value.
@ -215,20 +217,20 @@ class Subscription(six.with_metaclass(abc.ABCMeta)):
Kind.FULL.
"""
@enum.unique
class Kind(enum.Enum):
@enum.unique
class Kind(enum.Enum):
NONE = 'none'
TERMINATION_ONLY = 'termination only'
FULL = 'full'
NONE = 'none'
TERMINATION_ONLY = 'termination only'
FULL = 'full'
class Servicer(six.with_metaclass(abc.ABCMeta)):
"""Interface for service implementations."""
"""Interface for service implementations."""
@abc.abstractmethod
def service(self, group, method, context, output_operator):
"""Services an operation.
@abc.abstractmethod
def service(self, group, method, context, output_operator):
"""Services an operation.
Args:
group: The group identifier of the operation to be serviced.
@ -248,20 +250,20 @@ class Servicer(six.with_metaclass(abc.ABCMeta)):
abandonment.Abandoned: If the operation has been aborted and there no
longer is any reason to service the operation.
"""
raise NotImplementedError()
raise NotImplementedError()
class End(six.with_metaclass(abc.ABCMeta)):
"""Common type for entry-point objects on both sides of an operation."""
"""Common type for entry-point objects on both sides of an operation."""
@abc.abstractmethod
def start(self):
"""Starts this object's service of operations."""
raise NotImplementedError()
@abc.abstractmethod
def start(self):
"""Starts this object's service of operations."""
raise NotImplementedError()
@abc.abstractmethod
def stop(self, grace):
"""Stops this object's service of operations.
@abc.abstractmethod
def stop(self, grace):
"""Stops this object's service of operations.
This object will refuse service of new operations as soon as this method is
called but operations under way at the time of the call may be given a
@ -281,13 +283,19 @@ class End(six.with_metaclass(abc.ABCMeta)):
much sooner (if for example this End had no operations in progress at
the time its stop method was called).
"""
raise NotImplementedError()
@abc.abstractmethod
def operate(
self, group, method, subscription, timeout, initial_metadata=None,
payload=None, completion=None, protocol_options=None):
"""Commences an operation.
raise NotImplementedError()
@abc.abstractmethod
def operate(self,
group,
method,
subscription,
timeout,
initial_metadata=None,
payload=None,
completion=None,
protocol_options=None):
"""Commences an operation.
Args:
group: The group identifier of the invoked operation.
@ -312,23 +320,23 @@ class End(six.with_metaclass(abc.ABCMeta)):
returned pair is an Operator to which operation values not passed in
this call should later be passed.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def operation_stats(self):
"""Reports the number of terminated operations broken down by outcome.
@abc.abstractmethod
def operation_stats(self):
"""Reports the number of terminated operations broken down by outcome.
Returns:
A dictionary from Outcome.Kind value to an integer identifying the number
of operations that terminated with that outcome kind.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def add_idle_action(self, action):
"""Adds an action to be called when this End has no ongoing operations.
@abc.abstractmethod
def add_idle_action(self, action):
"""Adds an action to be called when this End has no ongoing operations.
Args:
action: A callable that accepts no arguments.
"""
raise NotImplementedError()
raise NotImplementedError()

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Utilities for use with the base interface of RPC Framework."""
import collections
@ -34,27 +33,30 @@ import collections
from grpc.framework.interfaces.base import base
class _Completion(
base.Completion,
collections.namedtuple(
'_Completion', ('terminal_metadata', 'code', 'message',))):
"""A trivial implementation of base.Completion."""
class _Completion(base.Completion,
collections.namedtuple('_Completion', (
'terminal_metadata',
'code',
'message',))):
"""A trivial implementation of base.Completion."""
class _Subscription(base.Subscription,
collections.namedtuple('_Subscription', (
'kind',
'termination_callback',
'allowance',
'operator',
'protocol_receiver',))):
"""A trivial implementation of base.Subscription."""
class _Subscription(
base.Subscription,
collections.namedtuple(
'_Subscription',
('kind', 'termination_callback', 'allowance', 'operator',
'protocol_receiver',))):
"""A trivial implementation of base.Subscription."""
_NONE_SUBSCRIPTION = _Subscription(
base.Subscription.Kind.NONE, None, None, None, None)
_NONE_SUBSCRIPTION = _Subscription(base.Subscription.Kind.NONE, None, None,
None, None)
def completion(terminal_metadata, code, message):
"""Creates a base.Completion aggregating the given operation values.
"""Creates a base.Completion aggregating the given operation values.
Args:
terminal_metadata: A terminal metadata value for an operaton.
@ -64,11 +66,11 @@ def completion(terminal_metadata, code, message):
Returns:
A base.Completion aggregating the given operation values.
"""
return _Completion(terminal_metadata, code, message)
return _Completion(terminal_metadata, code, message)
def full_subscription(operator, protocol_receiver):
"""Creates a "full" base.Subscription for the given base.Operator.
"""Creates a "full" base.Subscription for the given base.Operator.
Args:
operator: A base.Operator to be used in an operation.
@ -78,5 +80,5 @@ def full_subscription(operator, protocol_receiver):
A base.Subscription of kind base.Subscription.Kind.FULL wrapping the given
base.Operator and base.ProtocolReceiver.
"""
return _Subscription(
base.Subscription.Kind.FULL, None, None, operator, protocol_receiver)
return _Subscription(base.Subscription.Kind.FULL, None, None, operator,
protocol_receiver)

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Interfaces defining the Face layer of RPC Framework."""
import abc
@ -45,33 +44,38 @@ from grpc.framework.foundation import stream # pylint: disable=unused-import
class NoSuchMethodError(Exception):
"""Raised by customer code to indicate an unrecognized method.
"""Raised by customer code to indicate an unrecognized method.
Attributes:
group: The group of the unrecognized method.
name: The name of the unrecognized method.
"""
def __init__(self, group, method):
"""Constructor.
def __init__(self, group, method):
"""Constructor.
Args:
group: The group identifier of the unrecognized RPC name.
method: The method identifier of the unrecognized RPC name.
"""
super(NoSuchMethodError, self).__init__()
self.group = group
self.method = method
super(NoSuchMethodError, self).__init__()
self.group = group
self.method = method
def __repr__(self):
return 'face.NoSuchMethodError(%s, %s)' % (self.group, self.method,)
def __repr__(self):
return 'face.NoSuchMethodError(%s, %s)' % (
self.group,
self.method,)
class Abortion(
collections.namedtuple(
'Abortion',
('kind', 'initial_metadata', 'terminal_metadata', 'code', 'details',))):
"""A value describing RPC abortion.
collections.namedtuple('Abortion', (
'kind',
'initial_metadata',
'terminal_metadata',
'code',
'details',))):
"""A value describing RPC abortion.
Attributes:
kind: A Kind value identifying how the RPC failed.
@ -85,21 +89,21 @@ class Abortion(
details value was received.
"""
@enum.unique
class Kind(enum.Enum):
"""Types of RPC abortion."""
@enum.unique
class Kind(enum.Enum):
"""Types of RPC abortion."""
CANCELLED = 'cancelled'
EXPIRED = 'expired'
LOCAL_SHUTDOWN = 'local shutdown'
REMOTE_SHUTDOWN = 'remote shutdown'
NETWORK_FAILURE = 'network failure'
LOCAL_FAILURE = 'local failure'
REMOTE_FAILURE = 'remote failure'
CANCELLED = 'cancelled'
EXPIRED = 'expired'
LOCAL_SHUTDOWN = 'local shutdown'
REMOTE_SHUTDOWN = 'remote shutdown'
NETWORK_FAILURE = 'network failure'
LOCAL_FAILURE = 'local failure'
REMOTE_FAILURE = 'remote failure'
class AbortionError(six.with_metaclass(abc.ABCMeta, Exception)):
"""Common super type for exceptions indicating RPC abortion.
"""Common super type for exceptions indicating RPC abortion.
initial_metadata: The initial metadata from the other side of the RPC or
None if no initial metadata value was received.
@ -111,100 +115,100 @@ class AbortionError(six.with_metaclass(abc.ABCMeta, Exception)):
details value was received.
"""
def __init__(self, initial_metadata, terminal_metadata, code, details):
super(AbortionError, self).__init__()
self.initial_metadata = initial_metadata
self.terminal_metadata = terminal_metadata
self.code = code
self.details = details
def __init__(self, initial_metadata, terminal_metadata, code, details):
super(AbortionError, self).__init__()
self.initial_metadata = initial_metadata
self.terminal_metadata = terminal_metadata
self.code = code
self.details = details
def __str__(self):
return '%s(code=%s, details="%s")' % (
self.__class__.__name__, self.code, self.details)
def __str__(self):
return '%s(code=%s, details="%s")' % (self.__class__.__name__,
self.code, self.details)
class CancellationError(AbortionError):
"""Indicates that an RPC has been cancelled."""
"""Indicates that an RPC has been cancelled."""
class ExpirationError(AbortionError):
"""Indicates that an RPC has expired ("timed out")."""
"""Indicates that an RPC has expired ("timed out")."""
class LocalShutdownError(AbortionError):
"""Indicates that an RPC has terminated due to local shutdown of RPCs."""
"""Indicates that an RPC has terminated due to local shutdown of RPCs."""
class RemoteShutdownError(AbortionError):
"""Indicates that an RPC has terminated due to remote shutdown of RPCs."""
"""Indicates that an RPC has terminated due to remote shutdown of RPCs."""
class NetworkError(AbortionError):
"""Indicates that some error occurred on the network."""
"""Indicates that some error occurred on the network."""
class LocalError(AbortionError):
"""Indicates that an RPC has terminated due to a local defect."""
"""Indicates that an RPC has terminated due to a local defect."""
class RemoteError(AbortionError):
"""Indicates that an RPC has terminated due to a remote defect."""
"""Indicates that an RPC has terminated due to a remote defect."""
class RpcContext(six.with_metaclass(abc.ABCMeta)):
"""Provides RPC-related information and action."""
"""Provides RPC-related information and action."""
@abc.abstractmethod
def is_active(self):
"""Describes whether the RPC is active or has terminated."""
raise NotImplementedError()
@abc.abstractmethod
def is_active(self):
"""Describes whether the RPC is active or has terminated."""
raise NotImplementedError()
@abc.abstractmethod
def time_remaining(self):
"""Describes the length of allowed time remaining for the RPC.
@abc.abstractmethod
def time_remaining(self):
"""Describes the length of allowed time remaining for the RPC.
Returns:
A nonnegative float indicating the length of allowed time in seconds
remaining for the RPC to complete before it is considered to have timed
out.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def add_abortion_callback(self, abortion_callback):
"""Registers a callback to be called if the RPC is aborted.
@abc.abstractmethod
def add_abortion_callback(self, abortion_callback):
"""Registers a callback to be called if the RPC is aborted.
Args:
abortion_callback: A callable to be called and passed an Abortion value
in the event of RPC abortion.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def cancel(self):
"""Cancels the RPC.
@abc.abstractmethod
def cancel(self):
"""Cancels the RPC.
Idempotent and has no effect if the RPC has already terminated.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def protocol_context(self):
"""Accesses a custom object specified by an implementation provider.
@abc.abstractmethod
def protocol_context(self):
"""Accesses a custom object specified by an implementation provider.
Returns:
A value specified by the provider of a Face interface implementation
affording custom state and behavior.
"""
raise NotImplementedError()
raise NotImplementedError()
class Call(six.with_metaclass(abc.ABCMeta, RpcContext)):
"""Invocation-side utility object for an RPC."""
"""Invocation-side utility object for an RPC."""
@abc.abstractmethod
def initial_metadata(self):
"""Accesses the initial metadata from the service-side of the RPC.
@abc.abstractmethod
def initial_metadata(self):
"""Accesses the initial metadata from the service-side of the RPC.
This method blocks until the value is available or is known not to have been
emitted from the service-side of the RPC.
@ -213,11 +217,11 @@ class Call(six.with_metaclass(abc.ABCMeta, RpcContext)):
The initial metadata object emitted by the service-side of the RPC, or
None if there was no such value.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def terminal_metadata(self):
"""Accesses the terminal metadata from the service-side of the RPC.
@abc.abstractmethod
def terminal_metadata(self):
"""Accesses the terminal metadata from the service-side of the RPC.
This method blocks until the value is available or is known not to have been
emitted from the service-side of the RPC.
@ -226,11 +230,11 @@ class Call(six.with_metaclass(abc.ABCMeta, RpcContext)):
The terminal metadata object emitted by the service-side of the RPC, or
None if there was no such value.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def code(self):
"""Accesses the code emitted by the service-side of the RPC.
@abc.abstractmethod
def code(self):
"""Accesses the code emitted by the service-side of the RPC.
This method blocks until the value is available or is known not to have been
emitted from the service-side of the RPC.
@ -239,11 +243,11 @@ class Call(six.with_metaclass(abc.ABCMeta, RpcContext)):
The code object emitted by the service-side of the RPC, or None if there
was no such value.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def details(self):
"""Accesses the details value emitted by the service-side of the RPC.
@abc.abstractmethod
def details(self):
"""Accesses the details value emitted by the service-side of the RPC.
This method blocks until the value is available or is known not to have been
emitted from the service-side of the RPC.
@ -252,15 +256,15 @@ class Call(six.with_metaclass(abc.ABCMeta, RpcContext)):
The details value emitted by the service-side of the RPC, or None if there
was no such value.
"""
raise NotImplementedError()
raise NotImplementedError()
class ServicerContext(six.with_metaclass(abc.ABCMeta, RpcContext)):
"""A context object passed to method implementations."""
"""A context object passed to method implementations."""
@abc.abstractmethod
def invocation_metadata(self):
"""Accesses the metadata from the invocation-side of the RPC.
@abc.abstractmethod
def invocation_metadata(self):
"""Accesses the metadata from the invocation-side of the RPC.
This method blocks until the value is available or is known not to have been
emitted from the invocation-side of the RPC.
@ -269,11 +273,11 @@ class ServicerContext(six.with_metaclass(abc.ABCMeta, RpcContext)):
The metadata object emitted by the invocation-side of the RPC, or None if
there was no such value.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def initial_metadata(self, initial_metadata):
"""Accepts the service-side initial metadata value of the RPC.
@abc.abstractmethod
def initial_metadata(self, initial_metadata):
"""Accepts the service-side initial metadata value of the RPC.
This method need not be called by method implementations if they have no
service-side initial metadata to transmit.
@ -282,11 +286,11 @@ class ServicerContext(six.with_metaclass(abc.ABCMeta, RpcContext)):
initial_metadata: The service-side initial metadata value of the RPC to
be transmitted to the invocation side of the RPC.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def terminal_metadata(self, terminal_metadata):
"""Accepts the service-side terminal metadata value of the RPC.
@abc.abstractmethod
def terminal_metadata(self, terminal_metadata):
"""Accepts the service-side terminal metadata value of the RPC.
This method need not be called by method implementations if they have no
service-side terminal metadata to transmit.
@ -295,11 +299,11 @@ class ServicerContext(six.with_metaclass(abc.ABCMeta, RpcContext)):
terminal_metadata: The service-side terminal metadata value of the RPC to
be transmitted to the invocation side of the RPC.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def code(self, code):
"""Accepts the service-side code of the RPC.
@abc.abstractmethod
def code(self, code):
"""Accepts the service-side code of the RPC.
This method need not be called by method implementations if they have no
code to transmit.
@ -308,11 +312,11 @@ class ServicerContext(six.with_metaclass(abc.ABCMeta, RpcContext)):
code: The code of the RPC to be transmitted to the invocation side of the
RPC.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def details(self, details):
"""Accepts the service-side details of the RPC.
@abc.abstractmethod
def details(self, details):
"""Accepts the service-side details of the RPC.
This method need not be called by method implementations if they have no
service-side details to transmit.
@ -321,34 +325,34 @@ class ServicerContext(six.with_metaclass(abc.ABCMeta, RpcContext)):
details: The service-side details value of the RPC to be transmitted to
the invocation side of the RPC.
"""
raise NotImplementedError()
raise NotImplementedError()
class ResponseReceiver(six.with_metaclass(abc.ABCMeta)):
"""Invocation-side object used to accept the output of an RPC."""
"""Invocation-side object used to accept the output of an RPC."""
@abc.abstractmethod
def initial_metadata(self, initial_metadata):
"""Receives the initial metadata from the service-side of the RPC.
@abc.abstractmethod
def initial_metadata(self, initial_metadata):
"""Receives the initial metadata from the service-side of the RPC.
Args:
initial_metadata: The initial metadata object emitted from the
service-side of the RPC.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def response(self, response):
"""Receives a response from the service-side of the RPC.
@abc.abstractmethod
def response(self, response):
"""Receives a response from the service-side of the RPC.
Args:
response: A response object emitted from the service-side of the RPC.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def complete(self, terminal_metadata, code, details):
"""Receives the completion values emitted from the service-side of the RPC.
@abc.abstractmethod
def complete(self, terminal_metadata, code, details):
"""Receives the completion values emitted from the service-side of the RPC.
Args:
terminal_metadata: The terminal metadata object emitted from the
@ -356,17 +360,20 @@ class ResponseReceiver(six.with_metaclass(abc.ABCMeta)):
code: The code object emitted from the service-side of the RPC.
details: The details object emitted from the service-side of the RPC.
"""
raise NotImplementedError()
raise NotImplementedError()
class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
"""Affords invoking a unary-unary RPC in any call style."""
"""Affords invoking a unary-unary RPC in any call style."""
@abc.abstractmethod
def __call__(
self, request, timeout, metadata=None, with_call=False,
protocol_options=None):
"""Synchronously invokes the underlying RPC.
@abc.abstractmethod
def __call__(self,
request,
timeout,
metadata=None,
with_call=False,
protocol_options=None):
"""Synchronously invokes the underlying RPC.
Args:
request: The request value for the RPC.
@ -385,11 +392,11 @@ class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
Raises:
AbortionError: Indicating that the RPC was aborted.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def future(self, request, timeout, metadata=None, protocol_options=None):
"""Asynchronously invokes the underlying RPC.
@abc.abstractmethod
def future(self, request, timeout, metadata=None, protocol_options=None):
"""Asynchronously invokes the underlying RPC.
Args:
request: The request value for the RPC.
@ -405,13 +412,17 @@ class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
response value of the RPC. In the event of RPC abortion, the returned
Future's exception value will be an AbortionError.
"""
raise NotImplementedError()
@abc.abstractmethod
def event(
self, request, receiver, abortion_callback, timeout,
metadata=None, protocol_options=None):
"""Asynchronously invokes the underlying RPC.
raise NotImplementedError()
@abc.abstractmethod
def event(self,
request,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None):
"""Asynchronously invokes the underlying RPC.
Args:
request: The request value for the RPC.
@ -427,15 +438,15 @@ class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
Returns:
A Call for the RPC.
"""
raise NotImplementedError()
raise NotImplementedError()
class UnaryStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
"""Affords invoking a unary-stream RPC in any call style."""
"""Affords invoking a unary-stream RPC in any call style."""
@abc.abstractmethod
def __call__(self, request, timeout, metadata=None, protocol_options=None):
"""Invokes the underlying RPC.
@abc.abstractmethod
def __call__(self, request, timeout, metadata=None, protocol_options=None):
"""Invokes the underlying RPC.
Args:
request: The request value for the RPC.
@ -450,13 +461,17 @@ class UnaryStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
values. Drawing response values from the returned iterator may raise
AbortionError indicating abortion of the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def event(
self, request, receiver, abortion_callback, timeout,
metadata=None, protocol_options=None):
"""Asynchronously invokes the underlying RPC.
raise NotImplementedError()
@abc.abstractmethod
def event(self,
request,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None):
"""Asynchronously invokes the underlying RPC.
Args:
request: The request value for the RPC.
@ -472,17 +487,20 @@ class UnaryStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
Returns:
A Call object for the RPC.
"""
raise NotImplementedError()
raise NotImplementedError()
class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
"""Affords invoking a stream-unary RPC in any call style."""
"""Affords invoking a stream-unary RPC in any call style."""
@abc.abstractmethod
def __call__(
self, request_iterator, timeout, metadata=None,
with_call=False, protocol_options=None):
"""Synchronously invokes the underlying RPC.
@abc.abstractmethod
def __call__(self,
request_iterator,
timeout,
metadata=None,
with_call=False,
protocol_options=None):
"""Synchronously invokes the underlying RPC.
Args:
request_iterator: An iterator that yields request values for the RPC.
@ -501,12 +519,15 @@ class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
Raises:
AbortionError: Indicating that the RPC was aborted.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def future(
self, request_iterator, timeout, metadata=None, protocol_options=None):
"""Asynchronously invokes the underlying RPC.
@abc.abstractmethod
def future(self,
request_iterator,
timeout,
metadata=None,
protocol_options=None):
"""Asynchronously invokes the underlying RPC.
Args:
request_iterator: An iterator that yields request values for the RPC.
@ -522,13 +543,16 @@ class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
response value of the RPC. In the event of RPC abortion, the returned
Future's exception value will be an AbortionError.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def event(
self, receiver, abortion_callback, timeout, metadata=None,
protocol_options=None):
"""Asynchronously invokes the underlying RPC.
@abc.abstractmethod
def event(self,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None):
"""Asynchronously invokes the underlying RPC.
Args:
receiver: A ResponseReceiver to be passed the response data of the RPC.
@ -544,16 +568,19 @@ class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
A single object that is both a Call object for the RPC and a
stream.Consumer to which the request values of the RPC should be passed.
"""
raise NotImplementedError()
raise NotImplementedError()
class StreamStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
"""Affords invoking a stream-stream RPC in any call style."""
"""Affords invoking a stream-stream RPC in any call style."""
@abc.abstractmethod
def __call__(
self, request_iterator, timeout, metadata=None, protocol_options=None):
"""Invokes the underlying RPC.
@abc.abstractmethod
def __call__(self,
request_iterator,
timeout,
metadata=None,
protocol_options=None):
"""Invokes the underlying RPC.
Args:
request_iterator: An iterator that yields request values for the RPC.
@ -568,13 +595,16 @@ class StreamStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
values. Drawing response values from the returned iterator may raise
AbortionError indicating abortion of the RPC.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def event(
self, receiver, abortion_callback, timeout, metadata=None,
protocol_options=None):
"""Asynchronously invokes the underlying RPC.
@abc.abstractmethod
def event(self,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None):
"""Asynchronously invokes the underlying RPC.
Args:
receiver: A ResponseReceiver to be passed the response data of the RPC.
@ -590,11 +620,11 @@ class StreamStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
A single object that is both a Call object for the RPC and a
stream.Consumer to which the request values of the RPC should be passed.
"""
raise NotImplementedError()
raise NotImplementedError()
class MethodImplementation(six.with_metaclass(abc.ABCMeta)):
"""A sum type that describes a method implementation.
"""A sum type that describes a method implementation.
Attributes:
cardinality: A cardinality.Cardinality value.
@ -639,11 +669,11 @@ class MethodImplementation(six.with_metaclass(abc.ABCMeta)):
class MultiMethodImplementation(six.with_metaclass(abc.ABCMeta)):
"""A general type able to service many methods."""
"""A general type able to service many methods."""
@abc.abstractmethod
def service(self, group, method, response_consumer, context):
"""Services an RPC.
@abc.abstractmethod
def service(self, group, method, response_consumer, context):
"""Services an RPC.
Args:
group: The group identifier of the RPC.
@ -666,17 +696,22 @@ class MultiMethodImplementation(six.with_metaclass(abc.ABCMeta)):
NoSuchMethodError: If this MultiMethod does not recognize the given group
and name for the RPC and is not able to service the RPC.
"""
raise NotImplementedError()
raise NotImplementedError()
class GenericStub(six.with_metaclass(abc.ABCMeta)):
"""Affords RPC invocation via generic methods."""
@abc.abstractmethod
def blocking_unary_unary(
self, group, method, request, timeout, metadata=None,
with_call=False, protocol_options=None):
"""Invokes a unary-request-unary-response method.
"""Affords RPC invocation via generic methods."""
@abc.abstractmethod
def blocking_unary_unary(self,
group,
method,
request,
timeout,
metadata=None,
with_call=False,
protocol_options=None):
"""Invokes a unary-request-unary-response method.
This method blocks until either returning the response value of the RPC
(in the event of RPC completion) or raising an exception (in the event of
@ -700,13 +735,17 @@ class GenericStub(six.with_metaclass(abc.ABCMeta)):
Raises:
AbortionError: Indicating that the RPC was aborted.
"""
raise NotImplementedError()
@abc.abstractmethod
def future_unary_unary(
self, group, method, request, timeout, metadata=None,
protocol_options=None):
"""Invokes a unary-request-unary-response method.
raise NotImplementedError()
@abc.abstractmethod
def future_unary_unary(self,
group,
method,
request,
timeout,
metadata=None,
protocol_options=None):
"""Invokes a unary-request-unary-response method.
Args:
group: The group identifier of the RPC.
@ -723,13 +762,17 @@ class GenericStub(six.with_metaclass(abc.ABCMeta)):
response value of the RPC. In the event of RPC abortion, the returned
Future's exception value will be an AbortionError.
"""
raise NotImplementedError()
@abc.abstractmethod
def inline_unary_stream(
self, group, method, request, timeout, metadata=None,
protocol_options=None):
"""Invokes a unary-request-stream-response method.
raise NotImplementedError()
@abc.abstractmethod
def inline_unary_stream(self,
group,
method,
request,
timeout,
metadata=None,
protocol_options=None):
"""Invokes a unary-request-stream-response method.
Args:
group: The group identifier of the RPC.
@ -745,13 +788,18 @@ class GenericStub(six.with_metaclass(abc.ABCMeta)):
values. Drawing response values from the returned iterator may raise
AbortionError indicating abortion of the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def blocking_stream_unary(
self, group, method, request_iterator, timeout, metadata=None,
with_call=False, protocol_options=None):
"""Invokes a stream-request-unary-response method.
raise NotImplementedError()
@abc.abstractmethod
def blocking_stream_unary(self,
group,
method,
request_iterator,
timeout,
metadata=None,
with_call=False,
protocol_options=None):
"""Invokes a stream-request-unary-response method.
This method blocks until either returning the response value of the RPC
(in the event of RPC completion) or raising an exception (in the event of
@ -775,13 +823,17 @@ class GenericStub(six.with_metaclass(abc.ABCMeta)):
Raises:
AbortionError: Indicating that the RPC was aborted.
"""
raise NotImplementedError()
@abc.abstractmethod
def future_stream_unary(
self, group, method, request_iterator, timeout, metadata=None,
protocol_options=None):
"""Invokes a stream-request-unary-response method.
raise NotImplementedError()
@abc.abstractmethod
def future_stream_unary(self,
group,
method,
request_iterator,
timeout,
metadata=None,
protocol_options=None):
"""Invokes a stream-request-unary-response method.
Args:
group: The group identifier of the RPC.
@ -798,13 +850,17 @@ class GenericStub(six.with_metaclass(abc.ABCMeta)):
response value of the RPC. In the event of RPC abortion, the returned
Future's exception value will be an AbortionError.
"""
raise NotImplementedError()
@abc.abstractmethod
def inline_stream_stream(
self, group, method, request_iterator, timeout, metadata=None,
protocol_options=None):
"""Invokes a stream-request-stream-response method.
raise NotImplementedError()
@abc.abstractmethod
def inline_stream_stream(self,
group,
method,
request_iterator,
timeout,
metadata=None,
protocol_options=None):
"""Invokes a stream-request-stream-response method.
Args:
group: The group identifier of the RPC.
@ -820,13 +876,19 @@ class GenericStub(six.with_metaclass(abc.ABCMeta)):
values. Drawing response values from the returned iterator may raise
AbortionError indicating abortion of the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def event_unary_unary(
self, group, method, request, receiver, abortion_callback, timeout,
metadata=None, protocol_options=None):
"""Event-driven invocation of a unary-request-unary-response method.
raise NotImplementedError()
@abc.abstractmethod
def event_unary_unary(self,
group,
method,
request,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None):
"""Event-driven invocation of a unary-request-unary-response method.
Args:
group: The group identifier of the RPC.
@ -843,13 +905,19 @@ class GenericStub(six.with_metaclass(abc.ABCMeta)):
Returns:
A Call for the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def event_unary_stream(
self, group, method, request, receiver, abortion_callback, timeout,
metadata=None, protocol_options=None):
"""Event-driven invocation of a unary-request-stream-response method.
raise NotImplementedError()
@abc.abstractmethod
def event_unary_stream(self,
group,
method,
request,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None):
"""Event-driven invocation of a unary-request-stream-response method.
Args:
group: The group identifier of the RPC.
@ -866,13 +934,18 @@ class GenericStub(six.with_metaclass(abc.ABCMeta)):
Returns:
A Call for the RPC.
"""
raise NotImplementedError()
@abc.abstractmethod
def event_stream_unary(
self, group, method, receiver, abortion_callback, timeout,
metadata=None, protocol_options=None):
"""Event-driven invocation of a unary-request-unary-response method.
raise NotImplementedError()
@abc.abstractmethod
def event_stream_unary(self,
group,
method,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None):
"""Event-driven invocation of a unary-request-unary-response method.
Args:
group: The group identifier of the RPC.
@ -889,13 +962,18 @@ class GenericStub(six.with_metaclass(abc.ABCMeta)):
A pair of a Call object for the RPC and a stream.Consumer to which the
request values of the RPC should be passed.
"""
raise NotImplementedError()
@abc.abstractmethod
def event_stream_stream(
self, group, method, receiver, abortion_callback, timeout,
metadata=None, protocol_options=None):
"""Event-driven invocation of a unary-request-stream-response method.
raise NotImplementedError()
@abc.abstractmethod
def event_stream_stream(self,
group,
method,
receiver,
abortion_callback,
timeout,
metadata=None,
protocol_options=None):
"""Event-driven invocation of a unary-request-stream-response method.
Args:
group: The group identifier of the RPC.
@ -912,11 +990,11 @@ class GenericStub(six.with_metaclass(abc.ABCMeta)):
A pair of a Call object for the RPC and a stream.Consumer to which the
request values of the RPC should be passed.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def unary_unary(self, group, method):
"""Creates a UnaryUnaryMultiCallable for a unary-unary method.
@abc.abstractmethod
def unary_unary(self, group, method):
"""Creates a UnaryUnaryMultiCallable for a unary-unary method.
Args:
group: The group identifier of the RPC.
@ -925,11 +1003,11 @@ class GenericStub(six.with_metaclass(abc.ABCMeta)):
Returns:
A UnaryUnaryMultiCallable value for the named unary-unary method.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def unary_stream(self, group, method):
"""Creates a UnaryStreamMultiCallable for a unary-stream method.
@abc.abstractmethod
def unary_stream(self, group, method):
"""Creates a UnaryStreamMultiCallable for a unary-stream method.
Args:
group: The group identifier of the RPC.
@ -938,11 +1016,11 @@ class GenericStub(six.with_metaclass(abc.ABCMeta)):
Returns:
A UnaryStreamMultiCallable value for the name unary-stream method.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def stream_unary(self, group, method):
"""Creates a StreamUnaryMultiCallable for a stream-unary method.
@abc.abstractmethod
def stream_unary(self, group, method):
"""Creates a StreamUnaryMultiCallable for a stream-unary method.
Args:
group: The group identifier of the RPC.
@ -951,11 +1029,11 @@ class GenericStub(six.with_metaclass(abc.ABCMeta)):
Returns:
A StreamUnaryMultiCallable value for the named stream-unary method.
"""
raise NotImplementedError()
raise NotImplementedError()
@abc.abstractmethod
def stream_stream(self, group, method):
"""Creates a StreamStreamMultiCallable for a stream-stream method.
@abc.abstractmethod
def stream_stream(self, group, method):
"""Creates a StreamStreamMultiCallable for a stream-stream method.
Args:
group: The group identifier of the RPC.
@ -964,11 +1042,11 @@ class GenericStub(six.with_metaclass(abc.ABCMeta)):
Returns:
A StreamStreamMultiCallable value for the named stream-stream method.
"""
raise NotImplementedError()
raise NotImplementedError()
class DynamicStub(six.with_metaclass(abc.ABCMeta)):
"""Affords RPC invocation via attributes corresponding to afforded methods.
"""Affords RPC invocation via attributes corresponding to afforded methods.
Instances of this type may be scoped to a single group so that attribute
access is unambiguous.

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Utilities for RPC Framework's Face interface."""
import collections
@ -38,18 +37,24 @@ from grpc.framework.foundation import stream # pylint: disable=unused-import
from grpc.framework.interfaces.face import face
class _MethodImplementation(
face.MethodImplementation,
collections.namedtuple(
'_MethodImplementation',
['cardinality', 'style', 'unary_unary_inline', 'unary_stream_inline',
'stream_unary_inline', 'stream_stream_inline', 'unary_unary_event',
'unary_stream_event', 'stream_unary_event', 'stream_stream_event',])):
pass
class _MethodImplementation(face.MethodImplementation,
collections.namedtuple('_MethodImplementation', [
'cardinality',
'style',
'unary_unary_inline',
'unary_stream_inline',
'stream_unary_inline',
'stream_stream_inline',
'unary_unary_event',
'unary_stream_event',
'stream_unary_event',
'stream_stream_event',
])):
pass
def unary_unary_inline(behavior):
"""Creates an face.MethodImplementation for the given behavior.
"""Creates an face.MethodImplementation for the given behavior.
Args:
behavior: The implementation of a unary-unary RPC method as a callable value
@ -59,13 +64,13 @@ def unary_unary_inline(behavior):
Returns:
An face.MethodImplementation derived from the given behavior.
"""
return _MethodImplementation(
cardinality.Cardinality.UNARY_UNARY, style.Service.INLINE, behavior,
None, None, None, None, None, None, None)
return _MethodImplementation(cardinality.Cardinality.UNARY_UNARY,
style.Service.INLINE, behavior, None, None,
None, None, None, None, None)
def unary_stream_inline(behavior):
"""Creates an face.MethodImplementation for the given behavior.
"""Creates an face.MethodImplementation for the given behavior.
Args:
behavior: The implementation of a unary-stream RPC method as a callable
@ -75,13 +80,13 @@ def unary_stream_inline(behavior):
Returns:
An face.MethodImplementation derived from the given behavior.
"""
return _MethodImplementation(
cardinality.Cardinality.UNARY_STREAM, style.Service.INLINE, None,
behavior, None, None, None, None, None, None)
return _MethodImplementation(cardinality.Cardinality.UNARY_STREAM,
style.Service.INLINE, None, behavior, None,
None, None, None, None, None)
def stream_unary_inline(behavior):
"""Creates an face.MethodImplementation for the given behavior.
"""Creates an face.MethodImplementation for the given behavior.
Args:
behavior: The implementation of a stream-unary RPC method as a callable
@ -91,13 +96,13 @@ def stream_unary_inline(behavior):
Returns:
An face.MethodImplementation derived from the given behavior.
"""
return _MethodImplementation(
cardinality.Cardinality.STREAM_UNARY, style.Service.INLINE, None, None,
behavior, None, None, None, None, None)
return _MethodImplementation(cardinality.Cardinality.STREAM_UNARY,
style.Service.INLINE, None, None, behavior,
None, None, None, None, None)
def stream_stream_inline(behavior):
"""Creates an face.MethodImplementation for the given behavior.
"""Creates an face.MethodImplementation for the given behavior.
Args:
behavior: The implementation of a stream-stream RPC method as a callable
@ -107,13 +112,13 @@ def stream_stream_inline(behavior):
Returns:
An face.MethodImplementation derived from the given behavior.
"""
return _MethodImplementation(
cardinality.Cardinality.STREAM_STREAM, style.Service.INLINE, None, None,
None, behavior, None, None, None, None)
return _MethodImplementation(cardinality.Cardinality.STREAM_STREAM,
style.Service.INLINE, None, None, None,
behavior, None, None, None, None)
def unary_unary_event(behavior):
"""Creates an face.MethodImplementation for the given behavior.
"""Creates an face.MethodImplementation for the given behavior.
Args:
behavior: The implementation of a unary-unary RPC method as a callable
@ -123,13 +128,13 @@ def unary_unary_event(behavior):
Returns:
An face.MethodImplementation derived from the given behavior.
"""
return _MethodImplementation(
cardinality.Cardinality.UNARY_UNARY, style.Service.EVENT, None, None,
None, None, behavior, None, None, None)
return _MethodImplementation(cardinality.Cardinality.UNARY_UNARY,
style.Service.EVENT, None, None, None, None,
behavior, None, None, None)
def unary_stream_event(behavior):
"""Creates an face.MethodImplementation for the given behavior.
"""Creates an face.MethodImplementation for the given behavior.
Args:
behavior: The implementation of a unary-stream RPC method as a callable
@ -139,13 +144,13 @@ def unary_stream_event(behavior):
Returns:
An face.MethodImplementation derived from the given behavior.
"""
return _MethodImplementation(
cardinality.Cardinality.UNARY_STREAM, style.Service.EVENT, None, None,
None, None, None, behavior, None, None)
return _MethodImplementation(cardinality.Cardinality.UNARY_STREAM,
style.Service.EVENT, None, None, None, None,
None, behavior, None, None)
def stream_unary_event(behavior):
"""Creates an face.MethodImplementation for the given behavior.
"""Creates an face.MethodImplementation for the given behavior.
Args:
behavior: The implementation of a stream-unary RPC method as a callable
@ -156,13 +161,13 @@ def stream_unary_event(behavior):
Returns:
An face.MethodImplementation derived from the given behavior.
"""
return _MethodImplementation(
cardinality.Cardinality.STREAM_UNARY, style.Service.EVENT, None, None,
None, None, None, None, behavior, None)
return _MethodImplementation(cardinality.Cardinality.STREAM_UNARY,
style.Service.EVENT, None, None, None, None,
None, None, behavior, None)
def stream_stream_event(behavior):
"""Creates an face.MethodImplementation for the given behavior.
"""Creates an face.MethodImplementation for the given behavior.
Args:
behavior: The implementation of a stream-stream RPC method as a callable
@ -173,6 +178,6 @@ def stream_stream_event(behavior):
Returns:
An face.MethodImplementation derived from the given behavior.
"""
return _MethodImplementation(
cardinality.Cardinality.STREAM_STREAM, style.Service.EVENT, None, None,
None, None, None, None, None, behavior)
return _MethodImplementation(cardinality.Cardinality.STREAM_STREAM,
style.Service.EVENT, None, None, None, None,
None, None, None, behavior)

@ -27,7 +27,6 @@
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import os.path
import shutil
@ -38,7 +37,6 @@ from distutils import errors
import commands
C_PYTHON_DEV = """
#include <Python.h>
int main(int argc, char **argv) { return 0; }
@ -55,69 +53,70 @@ Could not find <Python.h>. This could mean the following:
(check your environment variables or try re-installing?)
"""
C_CHECKS = {
C_PYTHON_DEV: C_PYTHON_DEV_ERROR_MESSAGE,
}
C_CHECKS = {C_PYTHON_DEV: C_PYTHON_DEV_ERROR_MESSAGE,}
def _compile(compiler, source_string):
tempdir = tempfile.mkdtemp()
cpath = os.path.join(tempdir, 'a.c')
with open(cpath, 'w') as cfile:
cfile.write(source_string)
try:
compiler.compile([cpath])
except errors.CompileError as error:
return error
finally:
shutil.rmtree(tempdir)
tempdir = tempfile.mkdtemp()
cpath = os.path.join(tempdir, 'a.c')
with open(cpath, 'w') as cfile:
cfile.write(source_string)
try:
compiler.compile([cpath])
except errors.CompileError as error:
return error
finally:
shutil.rmtree(tempdir)
def _expect_compile(compiler, source_string, error_message):
if _compile(compiler, source_string) is not None:
sys.stderr.write(error_message)
raise commands.CommandError(
"Diagnostics found a compilation environment issue:\n{}"
if _compile(compiler, source_string) is not None:
sys.stderr.write(error_message)
raise commands.CommandError(
"Diagnostics found a compilation environment issue:\n{}"
.format(error_message))
def diagnose_compile_error(build_ext, error):
"""Attempt to diagnose an error during compilation."""
for c_check, message in C_CHECKS.items():
_expect_compile(build_ext.compiler, c_check, message)
python_sources = [
source for source in build_ext.get_source_files()
if source.startswith('./src/python') and source.endswith('c')
]
for source in python_sources:
if not os.path.isfile(source):
raise commands.CommandError(
("Diagnostics found a missing Python extension source file:\n{}\n\n"
"This is usually because the Cython sources haven't been transpiled "
"into C yet and you're building from source.\n"
"Try setting the environment variable "
"`GRPC_PYTHON_BUILD_WITH_CYTHON=1` when invoking `setup.py` or "
"when using `pip`, e.g.:\n\n"
"pip install -rrequirements.txt\n"
"GRPC_PYTHON_BUILD_WITH_CYTHON=1 pip install .")
.format(source)
)
"""Attempt to diagnose an error during compilation."""
for c_check, message in C_CHECKS.items():
_expect_compile(build_ext.compiler, c_check, message)
python_sources = [
source for source in build_ext.get_source_files()
if source.startswith('./src/python') and source.endswith('c')
]
for source in python_sources:
if not os.path.isfile(source):
raise commands.CommandError((
"Diagnostics found a missing Python extension source file:\n{}\n\n"
"This is usually because the Cython sources haven't been transpiled "
"into C yet and you're building from source.\n"
"Try setting the environment variable "
"`GRPC_PYTHON_BUILD_WITH_CYTHON=1` when invoking `setup.py` or "
"when using `pip`, e.g.:\n\n"
"pip install -rrequirements.txt\n"
"GRPC_PYTHON_BUILD_WITH_CYTHON=1 pip install .").format(source))
def diagnose_attribute_error(build_ext, error):
if any('_needs_stub' in arg for arg in error.args):
raise commands.CommandError(
"We expect a missing `_needs_stub` attribute from older versions of "
"setuptools. Consider upgrading setuptools.")
if any('_needs_stub' in arg for arg in error.args):
raise commands.CommandError(
"We expect a missing `_needs_stub` attribute from older versions of "
"setuptools. Consider upgrading setuptools.")
_ERROR_DIAGNOSES = {
errors.CompileError: diagnose_compile_error,
AttributeError: diagnose_attribute_error
}
def diagnose_build_ext_error(build_ext, error, formatted):
diagnostic = _ERROR_DIAGNOSES.get(type(error))
if diagnostic is None:
raise commands.CommandError(
"\n\nWe could not diagnose your build failure. Please file an issue at "
"http://www.github.com/grpc/grpc with `[Python install]` in the title."
"\n\n{}".format(formatted))
else:
diagnostic(build_ext, error)
def diagnose_build_ext_error(build_ext, error, formatted):
diagnostic = _ERROR_DIAGNOSES.get(type(error))
if diagnostic is None:
raise commands.CommandError(
"\n\nWe could not diagnose your build failure. Please file an issue at "
"http://www.github.com/grpc/grpc with `[Python install]` in the title."
"\n\n{}".format(formatted))
else:
diagnostic(build_ext, error)

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Reference implementation for health checking in gRPC Python."""
import threading
@ -37,23 +36,23 @@ from grpc_health.v1 import health_pb2
class HealthServicer(health_pb2.HealthServicer):
"""Servicer handling RPCs for service statuses."""
"""Servicer handling RPCs for service statuses."""
def __init__(self):
self._server_status_lock = threading.Lock()
self._server_status = {}
def __init__(self):
self._server_status_lock = threading.Lock()
self._server_status = {}
def Check(self, request, context):
with self._server_status_lock:
status = self._server_status.get(request.service)
if status is None:
context.set_code(grpc.StatusCode.NOT_FOUND)
return health_pb2.HealthCheckResponse()
else:
return health_pb2.HealthCheckResponse(status=status)
def Check(self, request, context):
with self._server_status_lock:
status = self._server_status.get(request.service)
if status is None:
context.set_code(grpc.StatusCode.NOT_FOUND)
return health_pb2.HealthCheckResponse()
else:
return health_pb2.HealthCheckResponse(status=status)
def set(self, service, status):
"""Sets the status of a service.
def set(self, service, status):
"""Sets the status of a service.
Args:
service: string, the name of the service.
@ -61,5 +60,5 @@ class HealthServicer(health_pb2.HealthServicer):
status: HealthCheckResponse.status enum value indicating
the status of the service
"""
with self._server_status_lock:
self._server_status[service] = status
with self._server_status_lock:
self._server_status[service] = status

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Provides distutils command classes for the GRPC Python setup process."""
import os
@ -39,40 +38,40 @@ HEALTH_PROTO = os.path.join(ROOT_DIR, '../../proto/grpc/health/v1/health.proto')
class CopyProtoModules(setuptools.Command):
"""Command to copy proto modules from grpc/src/proto."""
"""Command to copy proto modules from grpc/src/proto."""
description = ''
user_options = []
description = ''
user_options = []
def initialize_options(self):
pass
def initialize_options(self):
pass
def finalize_options(self):
pass
def finalize_options(self):
pass
def run(self):
if os.path.isfile(HEALTH_PROTO):
shutil.copyfile(
HEALTH_PROTO,
os.path.join(ROOT_DIR, 'grpc_health/v1/health.proto'))
def run(self):
if os.path.isfile(HEALTH_PROTO):
shutil.copyfile(
HEALTH_PROTO,
os.path.join(ROOT_DIR, 'grpc_health/v1/health.proto'))
class BuildPackageProtos(setuptools.Command):
"""Command to generate project *_pb2.py modules from proto files."""
"""Command to generate project *_pb2.py modules from proto files."""
description = 'build grpc protobuf modules'
user_options = []
description = 'build grpc protobuf modules'
user_options = []
def initialize_options(self):
pass
def initialize_options(self):
pass
def finalize_options(self):
pass
def finalize_options(self):
pass
def run(self):
# due to limitations of the proto generator, we require that only *one*
# directory is provided as an 'include' directory. We assume it's the '' key
# to `self.distribution.package_dir` (and get a key error if it's not
# there).
from grpc_tools import command
command.build_package_protos(self.distribution.package_dir[''])
def run(self):
# due to limitations of the proto generator, we require that only *one*
# directory is provided as an 'include' directory. We assume it's the '' key
# to `self.distribution.package_dir` (and get a key error if it's not
# there).
from grpc_tools import command
command.build_package_protos(self.distribution.package_dir[''])

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Setup module for the GRPC Python package's optional health checking."""
import os
@ -41,18 +40,14 @@ os.chdir(os.path.dirname(os.path.abspath(__file__)))
import health_commands
import grpc_version
PACKAGE_DIRECTORIES = {
'': '.',
}
PACKAGE_DIRECTORIES = {'': '.',}
SETUP_REQUIRES = (
'grpcio-tools>={version}'.format(version=grpc_version.VERSION),
)
'grpcio-tools>={version}'.format(version=grpc_version.VERSION),)
INSTALL_REQUIRES = (
'protobuf>=3.0.0',
'grpcio>={version}'.format(version=grpc_version.VERSION),
)
'grpcio>={version}'.format(version=grpc_version.VERSION),)
COMMAND_CLASS = {
# Run preprocess from the repository *before* doing any packaging!
@ -68,5 +63,4 @@ setuptools.setup(
packages=setuptools.find_packages('.'),
install_requires=INSTALL_REQUIRES,
setup_requires=SETUP_REQUIRES,
cmdclass=COMMAND_CLASS
)
cmdclass=COMMAND_CLASS)

@ -26,4 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,4 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Reference implementation for reflection in gRPC Python."""
import threading
@ -39,105 +38,96 @@ from grpc_reflection.v1alpha import reflection_pb2
_POOL = descriptor_pool.Default()
def _not_found_error():
return reflection_pb2.ServerReflectionResponse(
error_response=reflection_pb2.ErrorResponse(
error_code=grpc.StatusCode.NOT_FOUND.value[0],
error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),
)
)
return reflection_pb2.ServerReflectionResponse(
error_response=reflection_pb2.ErrorResponse(
error_code=grpc.StatusCode.NOT_FOUND.value[0],
error_message=grpc.StatusCode.NOT_FOUND.value[1].encode(),))
def _file_descriptor_response(descriptor):
proto = descriptor_pb2.FileDescriptorProto()
descriptor.CopyToProto(proto)
serialized_proto = proto.SerializeToString()
return reflection_pb2.ServerReflectionResponse(
file_descriptor_response=reflection_pb2.FileDescriptorResponse(
file_descriptor_proto=(serialized_proto,)
),
)
proto = descriptor_pb2.FileDescriptorProto()
descriptor.CopyToProto(proto)
serialized_proto = proto.SerializeToString()
return reflection_pb2.ServerReflectionResponse(
file_descriptor_response=reflection_pb2.FileDescriptorResponse(
file_descriptor_proto=(serialized_proto,)),)
class ReflectionServicer(reflection_pb2.ServerReflectionServicer):
"""Servicer handling RPCs for service statuses."""
"""Servicer handling RPCs for service statuses."""
def __init__(self, service_names, pool=None):
"""Constructor.
def __init__(self, service_names, pool=None):
"""Constructor.
Args:
service_names: Iterable of fully-qualified service names available.
"""
self._service_names = list(service_names)
self._pool = _POOL if pool is None else pool
def _file_by_filename(self, filename):
try:
descriptor = self._pool.FindFileByName(filename)
except KeyError:
return _not_found_error()
else:
return _file_descriptor_response(descriptor)
def _file_containing_symbol(self, fully_qualified_name):
try:
descriptor = self._pool.FindFileContainingSymbol(fully_qualified_name)
except KeyError:
return _not_found_error()
else:
return _file_descriptor_response(descriptor)
def _file_containing_extension(containing_type, extension_number):
# TODO(atash) Python protobuf currently doesn't support querying extensions.
# https://github.com/google/protobuf/issues/2248
return reflection_pb2.ServerReflectionResponse(
error_response=reflection_pb2.ErrorResponse(
error_code=grpc.StatusCode.UNIMPLEMENTED.value[0],
error_message=grpc.StatusCode.UNIMPLMENTED.value[1].encode(),
)
)
def _extension_numbers_of_type(fully_qualified_name):
# TODO(atash) We're allowed to leave this unsupported according to the
# protocol, but we should still eventually implement it. Hits the same issue
# as `_file_containing_extension`, however.
# https://github.com/google/protobuf/issues/2248
return reflection_pb2.ServerReflectionResponse(
error_response=reflection_pb2.ErrorResponse(
error_code=grpc.StatusCode.UNIMPLEMENTED.value[0],
error_message=grpc.StatusCode.UNIMPLMENTED.value[1].encode(),
)
)
self._service_names = list(service_names)
self._pool = _POOL if pool is None else pool
def _file_by_filename(self, filename):
try:
descriptor = self._pool.FindFileByName(filename)
except KeyError:
return _not_found_error()
else:
return _file_descriptor_response(descriptor)
def _file_containing_symbol(self, fully_qualified_name):
try:
descriptor = self._pool.FindFileContainingSymbol(
fully_qualified_name)
except KeyError:
return _not_found_error()
else:
return _file_descriptor_response(descriptor)
def _file_containing_extension(containing_type, extension_number):
# TODO(atash) Python protobuf currently doesn't support querying extensions.
# https://github.com/google/protobuf/issues/2248
return reflection_pb2.ServerReflectionResponse(
error_response=reflection_pb2.ErrorResponse(
error_code=grpc.StatusCode.UNIMPLEMENTED.value[0],
error_message=grpc.StatusCode.UNIMPLMENTED.value[1].encode(),))
def _extension_numbers_of_type(fully_qualified_name):
# TODO(atash) We're allowed to leave this unsupported according to the
# protocol, but we should still eventually implement it. Hits the same issue
# as `_file_containing_extension`, however.
# https://github.com/google/protobuf/issues/2248
return reflection_pb2.ServerReflectionResponse(
error_response=reflection_pb2.ErrorResponse(
error_code=grpc.StatusCode.UNIMPLEMENTED.value[0],
error_message=grpc.StatusCode.UNIMPLMENTED.value[1].encode(),))
def _list_services(self):
return reflection_pb2.ServerReflectionResponse(
list_services_response=reflection_pb2.ListServiceResponse(
service=[
def _list_services(self):
return reflection_pb2.ServerReflectionResponse(
list_services_response=reflection_pb2.ListServiceResponse(service=[
reflection_pb2.ServiceResponse(name=service_name)
for service_name in self._service_names
]
)
)
def ServerReflectionInfo(self, request_iterator, context):
for request in request_iterator:
if request.HasField('file_by_filename'):
yield self._file_by_filename(request.file_by_filename)
elif request.HasField('file_containing_symbol'):
yield self._file_containing_symbol(request.file_containing_symbol)
elif request.HasField('file_containing_extension'):
yield self._file_containing_extension(
request.file_containing_extension.containing_type,
request.file_containing_extension.extension_number)
elif request.HasField('all_extension_numbers_of_type'):
yield _all_extension_numbers_of_type(
request.all_extension_numbers_of_type)
elif request.HasField('list_services'):
yield self._list_services()
else:
yield reflection_pb2.ServerReflectionResponse(
error_response=reflection_pb2.ErrorResponse(
error_code=grpc.StatusCode.INVALID_ARGUMENT.value[0],
error_message=grpc.StatusCode.INVALID_ARGUMENT.value[1].encode(),
)
)
]))
def ServerReflectionInfo(self, request_iterator, context):
for request in request_iterator:
if request.HasField('file_by_filename'):
yield self._file_by_filename(request.file_by_filename)
elif request.HasField('file_containing_symbol'):
yield self._file_containing_symbol(
request.file_containing_symbol)
elif request.HasField('file_containing_extension'):
yield self._file_containing_extension(
request.file_containing_extension.containing_type,
request.file_containing_extension.extension_number)
elif request.HasField('all_extension_numbers_of_type'):
yield _all_extension_numbers_of_type(
request.all_extension_numbers_of_type)
elif request.HasField('list_services'):
yield self._list_services()
else:
yield reflection_pb2.ServerReflectionResponse(
error_response=reflection_pb2.ErrorResponse(
error_code=grpc.StatusCode.INVALID_ARGUMENT.value[0],
error_message=grpc.StatusCode.INVALID_ARGUMENT.value[1]
.encode(),))

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Provides distutils command classes for the GRPC Python setup process."""
import os
@ -35,44 +34,46 @@ import shutil
import setuptools
ROOT_DIR = os.path.abspath(os.path.dirname(os.path.abspath(__file__)))
HEALTH_PROTO = os.path.join(ROOT_DIR, '../../proto/grpc/reflection/v1alpha/reflection.proto')
HEALTH_PROTO = os.path.join(
ROOT_DIR, '../../proto/grpc/reflection/v1alpha/reflection.proto')
class CopyProtoModules(setuptools.Command):
"""Command to copy proto modules from grpc/src/proto."""
"""Command to copy proto modules from grpc/src/proto."""
description = ''
user_options = []
description = ''
user_options = []
def initialize_options(self):
pass
def initialize_options(self):
pass
def finalize_options(self):
pass
def finalize_options(self):
pass
def run(self):
if os.path.isfile(HEALTH_PROTO):
shutil.copyfile(
HEALTH_PROTO,
os.path.join(ROOT_DIR, 'grpc_reflection/v1alpha/reflection.proto'))
def run(self):
if os.path.isfile(HEALTH_PROTO):
shutil.copyfile(
HEALTH_PROTO,
os.path.join(ROOT_DIR,
'grpc_reflection/v1alpha/reflection.proto'))
class BuildPackageProtos(setuptools.Command):
"""Command to generate project *_pb2.py modules from proto files."""
"""Command to generate project *_pb2.py modules from proto files."""
description = 'build grpc protobuf modules'
user_options = []
description = 'build grpc protobuf modules'
user_options = []
def initialize_options(self):
pass
def initialize_options(self):
pass
def finalize_options(self):
pass
def finalize_options(self):
pass
def run(self):
# due to limitations of the proto generator, we require that only *one*
# directory is provided as an 'include' directory. We assume it's the '' key
# to `self.distribution.package_dir` (and get a key error if it's not
# there).
from grpc_tools import command
command.build_package_protos(self.distribution.package_dir[''])
def run(self):
# due to limitations of the proto generator, we require that only *one*
# directory is provided as an 'include' directory. We assume it's the '' key
# to `self.distribution.package_dir` (and get a key error if it's not
# there).
from grpc_tools import command
command.build_package_protos(self.distribution.package_dir[''])

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Setup module for the GRPC Python package's optional reflection."""
import os
@ -41,18 +40,14 @@ os.chdir(os.path.dirname(os.path.abspath(__file__)))
import reflection_commands
import grpc_version
PACKAGE_DIRECTORIES = {
'': '.',
}
PACKAGE_DIRECTORIES = {'': '.',}
SETUP_REQUIRES = (
'grpcio-tools>={version}'.format(version=grpc_version.VERSION),
)
'grpcio-tools>={version}'.format(version=grpc_version.VERSION),)
INSTALL_REQUIRES = (
'protobuf>=3.0.0',
'grpcio>={version}'.format(version=grpc_version.VERSION),
)
'grpcio>={version}'.format(version=grpc_version.VERSION),)
COMMAND_CLASS = {
# Run preprocess from the repository *before* doing any packaging!
@ -68,5 +63,4 @@ setuptools.setup(
packages=setuptools.find_packages('.'),
install_requires=INSTALL_REQUIRES,
setup_requires=SETUP_REQUIRES,
cmdclass=COMMAND_CLASS
)
cmdclass=COMMAND_CLASS)

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Provides distutils command classes for the gRPC Python setup process."""
import distutils
@ -55,163 +54,162 @@ PYTHON_PROTO_TOP_LEVEL = os.path.join(PYTHON_STEM, 'src')
class CommandError(object):
pass
pass
class GatherProto(setuptools.Command):
description = 'gather proto dependencies'
user_options = []
description = 'gather proto dependencies'
user_options = []
def initialize_options(self):
pass
def initialize_options(self):
pass
def finalize_options(self):
pass
def finalize_options(self):
pass
def run(self):
# TODO(atash) ensure that we're running from the repository directory when
# this command is used
try:
shutil.rmtree(PROTO_STEM)
except Exception as error:
# We don't care if this command fails
pass
shutil.copytree(GRPC_PROTO_STEM, PROTO_STEM)
for root, _, _ in os.walk(PYTHON_PROTO_TOP_LEVEL):
path = os.path.join(root, '__init__.py')
open(path, 'a').close()
def run(self):
# TODO(atash) ensure that we're running from the repository directory when
# this command is used
try:
shutil.rmtree(PROTO_STEM)
except Exception as error:
# We don't care if this command fails
pass
shutil.copytree(GRPC_PROTO_STEM, PROTO_STEM)
for root, _, _ in os.walk(PYTHON_PROTO_TOP_LEVEL):
path = os.path.join(root, '__init__.py')
open(path, 'a').close()
class BuildProtoModules(setuptools.Command):
"""Command to generate project *_pb2.py modules from proto files."""
description = 'build protobuf modules'
user_options = [
('include=', None, 'path patterns to include in protobuf generation'),
('exclude=', None, 'path patterns to exclude from protobuf generation')
]
def initialize_options(self):
self.exclude = None
self.include = r'.*\.proto$'
def finalize_options(self):
pass
def run(self):
import grpc_tools.protoc as protoc
include_regex = re.compile(self.include)
exclude_regex = re.compile(self.exclude) if self.exclude else None
paths = []
for walk_root, directories, filenames in os.walk(PROTO_STEM):
for filename in filenames:
path = os.path.join(walk_root, filename)
if include_regex.match(path) and not (
exclude_regex and exclude_regex.match(path)):
paths.append(path)
# TODO(kpayson): It would be nice to do this in a batch command,
# but we currently have name conflicts in src/proto
for path in paths:
command = [
'grpc_tools.protoc',
'-I {}'.format(PROTO_STEM),
'--python_out={}'.format(PROTO_STEM),
'--grpc_python_out={}'.format(PROTO_STEM),
] + [path]
if protoc.main(command) != 0:
sys.stderr.write(
'warning: Command:\n{}\nFailed'.format(
command))
# Generated proto directories dont include __init__.py, but
# these are needed for python package resolution
for walk_root, _, _ in os.walk(PROTO_STEM):
path = os.path.join(walk_root, '__init__.py')
open(path, 'a').close()
"""Command to generate project *_pb2.py modules from proto files."""
description = 'build protobuf modules'
user_options = [
('include=', None, 'path patterns to include in protobuf generation'),
('exclude=', None, 'path patterns to exclude from protobuf generation')
]
def initialize_options(self):
self.exclude = None
self.include = r'.*\.proto$'
def finalize_options(self):
pass
def run(self):
import grpc_tools.protoc as protoc
include_regex = re.compile(self.include)
exclude_regex = re.compile(self.exclude) if self.exclude else None
paths = []
for walk_root, directories, filenames in os.walk(PROTO_STEM):
for filename in filenames:
path = os.path.join(walk_root, filename)
if include_regex.match(path) and not (
exclude_regex and exclude_regex.match(path)):
paths.append(path)
# TODO(kpayson): It would be nice to do this in a batch command,
# but we currently have name conflicts in src/proto
for path in paths:
command = [
'grpc_tools.protoc',
'-I {}'.format(PROTO_STEM),
'--python_out={}'.format(PROTO_STEM),
'--grpc_python_out={}'.format(PROTO_STEM),
] + [path]
if protoc.main(command) != 0:
sys.stderr.write('warning: Command:\n{}\nFailed'.format(
command))
# Generated proto directories dont include __init__.py, but
# these are needed for python package resolution
for walk_root, _, _ in os.walk(PROTO_STEM):
path = os.path.join(walk_root, '__init__.py')
open(path, 'a').close()
class BuildPy(build_py.build_py):
"""Custom project build command."""
"""Custom project build command."""
def run(self):
try:
self.run_command('build_package_protos')
except CommandError as error:
sys.stderr.write('warning: %s\n' % error.message)
build_py.build_py.run(self)
def run(self):
try:
self.run_command('build_package_protos')
except CommandError as error:
sys.stderr.write('warning: %s\n' % error.message)
build_py.build_py.run(self)
class TestLite(setuptools.Command):
"""Command to run tests without fetching or building anything."""
"""Command to run tests without fetching or building anything."""
description = 'run tests without fetching or building anything.'
user_options = []
description = 'run tests without fetching or building anything.'
user_options = []
def initialize_options(self):
pass
def initialize_options(self):
pass
def finalize_options(self):
# distutils requires this override.
pass
def finalize_options(self):
# distutils requires this override.
pass
def run(self):
self._add_eggs_to_path()
def run(self):
self._add_eggs_to_path()
import tests
loader = tests.Loader()
loader.loadTestsFromNames(['tests'])
runner = tests.Runner()
result = runner.run(loader.suite)
if not result.wasSuccessful():
sys.exit('Test failure')
import tests
loader = tests.Loader()
loader.loadTestsFromNames(['tests'])
runner = tests.Runner()
result = runner.run(loader.suite)
if not result.wasSuccessful():
sys.exit('Test failure')
def _add_eggs_to_path(self):
"""Fetch install and test requirements"""
self.distribution.fetch_build_eggs(self.distribution.install_requires)
self.distribution.fetch_build_eggs(self.distribution.tests_require)
def _add_eggs_to_path(self):
"""Fetch install and test requirements"""
self.distribution.fetch_build_eggs(self.distribution.install_requires)
self.distribution.fetch_build_eggs(self.distribution.tests_require)
class RunInterop(test.test):
description = 'run interop test client/server'
user_options = [
('args=', 'a', 'pass-thru arguments for the client/server'),
('client', 'c', 'flag indicating to run the client'),
('server', 's', 'flag indicating to run the server')
]
def initialize_options(self):
self.args = ''
self.client = False
self.server = False
def finalize_options(self):
if self.client and self.server:
raise DistutilsOptionError('you may only specify one of client or server')
def run(self):
if self.distribution.install_requires:
self.distribution.fetch_build_eggs(self.distribution.install_requires)
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
if self.client:
self.run_client()
elif self.server:
self.run_server()
def run_server(self):
# We import here to ensure that our setuptools parent has had a chance to
# edit the Python system path.
from tests.interop import server
sys.argv[1:] = self.args.split()
server.serve()
def run_client(self):
# We import here to ensure that our setuptools parent has had a chance to
# edit the Python system path.
from tests.interop import client
sys.argv[1:] = self.args.split()
client.test_interoperability()
description = 'run interop test client/server'
user_options = [('args=', 'a', 'pass-thru arguments for the client/server'),
('client', 'c', 'flag indicating to run the client'),
('server', 's', 'flag indicating to run the server')]
def initialize_options(self):
self.args = ''
self.client = False
self.server = False
def finalize_options(self):
if self.client and self.server:
raise DistutilsOptionError(
'you may only specify one of client or server')
def run(self):
if self.distribution.install_requires:
self.distribution.fetch_build_eggs(
self.distribution.install_requires)
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
if self.client:
self.run_client()
elif self.server:
self.run_server()
def run_server(self):
# We import here to ensure that our setuptools parent has had a chance to
# edit the Python system path.
from tests.interop import server
sys.argv[1:] = self.args.split()
server.serve()
def run_client(self):
# We import here to ensure that our setuptools parent has had a chance to
# edit the Python system path.
from tests.interop import client
sys.argv[1:] = self.args.split()
client.test_interoperability()

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A setup module for the gRPC Python package."""
import os
@ -48,9 +47,7 @@ import grpc_version
LICENSE = '3-clause BSD'
PACKAGE_DIRECTORIES = {
'': '.',
}
PACKAGE_DIRECTORIES = {'': '.',}
INSTALL_REQUIRES = (
'coverage>=4.0',
@ -61,13 +58,11 @@ INSTALL_REQUIRES = (
'grpcio-health-checking>={version}'.format(version=grpc_version.VERSION),
'oauth2client>=1.4.7',
'protobuf>=3.0.0',
'six>=1.10',
)
'six>=1.10',)
COMMAND_CLASS = {
# Run `preprocess` *before* doing any packaging!
'preprocess': commands.GatherProto,
'build_package_protos': grpc_tools.command.BuildPackageProtos,
'build_py': commands.BuildPy,
'run_interop': commands.RunInterop,
@ -80,9 +75,7 @@ PACKAGE_DATA = {
'credentials/server1.key',
'credentials/server1.pem',
],
'tests.protoc_plugin.protos.invocation_testing': [
'same.proto',
],
'tests.protoc_plugin.protos.invocation_testing': ['same.proto',],
'tests.protoc_plugin.protos.invocation_testing.split_messages': [
'messages.proto',
],
@ -94,9 +87,7 @@ PACKAGE_DATA = {
'credentials/server1.key',
'credentials/server1.pem',
],
'tests': [
'tests.json'
],
'tests': ['tests.json'],
}
TEST_SUITE = 'tests'
@ -107,16 +98,15 @@ TESTS_REQUIRE = INSTALL_REQUIRES
PACKAGES = setuptools.find_packages('.')
setuptools.setup(
name='grpcio-tests',
version=grpc_version.VERSION,
license=LICENSE,
packages=list(PACKAGES),
package_dir=PACKAGE_DIRECTORIES,
package_data=PACKAGE_DATA,
install_requires=INSTALL_REQUIRES,
cmdclass=COMMAND_CLASS,
tests_require=TESTS_REQUIRE,
test_suite=TEST_SUITE,
test_loader=TEST_LOADER,
test_runner=TEST_RUNNER,
)
name='grpcio-tests',
version=grpc_version.VERSION,
license=LICENSE,
packages=list(PACKAGES),
package_dir=PACKAGE_DIRECTORIES,
package_data=PACKAGE_DATA,
install_requires=INSTALL_REQUIRES,
cmdclass=COMMAND_CLASS,
tests_require=TESTS_REQUIRE,
test_suite=TEST_SUITE,
test_loader=TEST_LOADER,
test_runner=TEST_RUNNER,)

@ -40,7 +40,7 @@ TEST_MODULE_REGEX = r'^.*_test$'
class Loader(object):
"""Test loader for setuptools test suite support.
"""Test loader for setuptools test suite support.
Attributes:
suite (unittest.TestSuite): All tests collected by the loader.
@ -51,57 +51,57 @@ class Loader(object):
contributes to the test suite.
"""
def __init__(self):
self.suite = unittest.TestSuite()
self.loader = unittest.TestLoader()
self.module_matcher = re.compile(TEST_MODULE_REGEX)
def __init__(self):
self.suite = unittest.TestSuite()
self.loader = unittest.TestLoader()
self.module_matcher = re.compile(TEST_MODULE_REGEX)
def loadTestsFromNames(self, names, module=None):
"""Function mirroring TestLoader::loadTestsFromNames, as expected by
def loadTestsFromNames(self, names, module=None):
"""Function mirroring TestLoader::loadTestsFromNames, as expected by
setuptools.setup argument `test_loader`."""
# ensure that we capture decorators and definitions (else our coverage
# measure unnecessarily suffers)
coverage_context = coverage.Coverage(data_suffix=True)
coverage_context.start()
modules = [importlib.import_module(name) for name in names]
for module in modules:
self.visit_module(module)
for module in modules:
try:
package_paths = module.__path__
except:
continue
self.walk_packages(package_paths)
coverage_context.stop()
coverage_context.save()
return self.suite
def walk_packages(self, package_paths):
"""Walks over the packages, dispatching `visit_module` calls.
# ensure that we capture decorators and definitions (else our coverage
# measure unnecessarily suffers)
coverage_context = coverage.Coverage(data_suffix=True)
coverage_context.start()
modules = [importlib.import_module(name) for name in names]
for module in modules:
self.visit_module(module)
for module in modules:
try:
package_paths = module.__path__
except:
continue
self.walk_packages(package_paths)
coverage_context.stop()
coverage_context.save()
return self.suite
def walk_packages(self, package_paths):
"""Walks over the packages, dispatching `visit_module` calls.
Args:
package_paths (list): A list of paths over which to walk through modules
along.
"""
for importer, module_name, is_package in (
pkgutil.walk_packages(package_paths)):
module = importer.find_module(module_name).load_module(module_name)
self.visit_module(module)
for importer, module_name, is_package in (
pkgutil.walk_packages(package_paths)):
module = importer.find_module(module_name).load_module(module_name)
self.visit_module(module)
def visit_module(self, module):
"""Visits the module, adding discovered tests to the test suite.
def visit_module(self, module):
"""Visits the module, adding discovered tests to the test suite.
Args:
module (module): Module to match against self.module_matcher; if matched
it has its tests loaded via self.loader into self.suite.
"""
if self.module_matcher.match(module.__name__):
module_suite = self.loader.loadTestsFromModule(module)
self.suite.addTest(module_suite)
if self.module_matcher.match(module.__name__):
module_suite = self.loader.loadTestsFromModule(module)
self.suite.addTest(module_suite)
def iterate_suite_cases(suite):
"""Generator over all unittest.TestCases in a unittest.TestSuite.
"""Generator over all unittest.TestCases in a unittest.TestSuite.
Args:
suite (unittest.TestSuite): Suite to iterate over in the generator.
@ -109,11 +109,12 @@ def iterate_suite_cases(suite):
Returns:
generator: A generator over all unittest.TestCases in `suite`.
"""
for item in suite:
if isinstance(item, unittest.TestSuite):
for child_item in iterate_suite_cases(item):
yield child_item
elif isinstance(item, unittest.TestCase):
yield item
else:
raise ValueError('unexpected suite item of type {}'.format(type(item)))
for item in suite:
if isinstance(item, unittest.TestSuite):
for child_item in iterate_suite_cases(item):
yield child_item
elif isinstance(item, unittest.TestCase):
yield item
else:
raise ValueError('unexpected suite item of type {}'.format(
type(item)))

@ -41,9 +41,11 @@ from six import moves
from tests import _loader
class CaseResult(collections.namedtuple('CaseResult', [
'id', 'name', 'kind', 'stdout', 'stderr', 'skip_reason', 'traceback'])):
"""A serializable result of a single test case.
class CaseResult(
collections.namedtuple('CaseResult', [
'id', 'name', 'kind', 'stdout', 'stderr', 'skip_reason', 'traceback'
])):
"""A serializable result of a single test case.
Attributes:
id (object): Any serializable object used to denote the identity of this
@ -59,62 +61,78 @@ class CaseResult(collections.namedtuple('CaseResult', [
None.
"""
class Kind:
UNTESTED = 'untested'
RUNNING = 'running'
ERROR = 'error'
FAILURE = 'failure'
SUCCESS = 'success'
SKIP = 'skip'
EXPECTED_FAILURE = 'expected failure'
UNEXPECTED_SUCCESS = 'unexpected success'
def __new__(cls, id=None, name=None, kind=None, stdout=None, stderr=None,
skip_reason=None, traceback=None):
"""Helper keyword constructor for the namedtuple.
class Kind:
UNTESTED = 'untested'
RUNNING = 'running'
ERROR = 'error'
FAILURE = 'failure'
SUCCESS = 'success'
SKIP = 'skip'
EXPECTED_FAILURE = 'expected failure'
UNEXPECTED_SUCCESS = 'unexpected success'
def __new__(cls,
id=None,
name=None,
kind=None,
stdout=None,
stderr=None,
skip_reason=None,
traceback=None):
"""Helper keyword constructor for the namedtuple.
See this class' attributes for information on the arguments."""
assert id is not None
assert name is None or isinstance(name, str)
if kind is CaseResult.Kind.UNTESTED:
pass
elif kind is CaseResult.Kind.RUNNING:
pass
elif kind is CaseResult.Kind.ERROR:
assert traceback is not None
elif kind is CaseResult.Kind.FAILURE:
assert traceback is not None
elif kind is CaseResult.Kind.SUCCESS:
pass
elif kind is CaseResult.Kind.SKIP:
assert skip_reason is not None
elif kind is CaseResult.Kind.EXPECTED_FAILURE:
assert traceback is not None
elif kind is CaseResult.Kind.UNEXPECTED_SUCCESS:
pass
else:
assert False
return super(cls, CaseResult).__new__(
cls, id, name, kind, stdout, stderr, skip_reason, traceback)
def updated(self, name=None, kind=None, stdout=None, stderr=None,
skip_reason=None, traceback=None):
"""Get a new validated CaseResult with the fields updated.
assert id is not None
assert name is None or isinstance(name, str)
if kind is CaseResult.Kind.UNTESTED:
pass
elif kind is CaseResult.Kind.RUNNING:
pass
elif kind is CaseResult.Kind.ERROR:
assert traceback is not None
elif kind is CaseResult.Kind.FAILURE:
assert traceback is not None
elif kind is CaseResult.Kind.SUCCESS:
pass
elif kind is CaseResult.Kind.SKIP:
assert skip_reason is not None
elif kind is CaseResult.Kind.EXPECTED_FAILURE:
assert traceback is not None
elif kind is CaseResult.Kind.UNEXPECTED_SUCCESS:
pass
else:
assert False
return super(cls, CaseResult).__new__(cls, id, name, kind, stdout,
stderr, skip_reason, traceback)
def updated(self,
name=None,
kind=None,
stdout=None,
stderr=None,
skip_reason=None,
traceback=None):
"""Get a new validated CaseResult with the fields updated.
See this class' attributes for information on the arguments."""
name = self.name if name is None else name
kind = self.kind if kind is None else kind
stdout = self.stdout if stdout is None else stdout
stderr = self.stderr if stderr is None else stderr
skip_reason = self.skip_reason if skip_reason is None else skip_reason
traceback = self.traceback if traceback is None else traceback
return CaseResult(id=self.id, name=name, kind=kind, stdout=stdout,
stderr=stderr, skip_reason=skip_reason,
traceback=traceback)
name = self.name if name is None else name
kind = self.kind if kind is None else kind
stdout = self.stdout if stdout is None else stdout
stderr = self.stderr if stderr is None else stderr
skip_reason = self.skip_reason if skip_reason is None else skip_reason
traceback = self.traceback if traceback is None else traceback
return CaseResult(
id=self.id,
name=name,
kind=kind,
stdout=stdout,
stderr=stderr,
skip_reason=skip_reason,
traceback=traceback)
class AugmentedResult(unittest.TestResult):
"""unittest.Result that keeps track of additional information.
"""unittest.Result that keeps track of additional information.
Uses CaseResult objects to store test-case results, providing additional
information beyond that of the standard Python unittest library, such as
@ -127,228 +145,215 @@ class AugmentedResult(unittest.TestResult):
to CaseResult objects corresponding to those IDs.
"""
def __init__(self, id_map):
"""Initialize the object with an identifier mapping.
def __init__(self, id_map):
"""Initialize the object with an identifier mapping.
Arguments:
id_map (callable): Corresponds to the attribute `id_map`."""
super(AugmentedResult, self).__init__()
self.id_map = id_map
self.cases = None
def startTestRun(self):
"""See unittest.TestResult.startTestRun."""
super(AugmentedResult, self).startTestRun()
self.cases = dict()
def stopTestRun(self):
"""See unittest.TestResult.stopTestRun."""
super(AugmentedResult, self).stopTestRun()
def startTest(self, test):
"""See unittest.TestResult.startTest."""
super(AugmentedResult, self).startTest(test)
case_id = self.id_map(test)
self.cases[case_id] = CaseResult(
id=case_id, name=test.id(), kind=CaseResult.Kind.RUNNING)
def addError(self, test, error):
"""See unittest.TestResult.addError."""
super(AugmentedResult, self).addError(test, error)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.ERROR, traceback=error)
def addFailure(self, test, error):
"""See unittest.TestResult.addFailure."""
super(AugmentedResult, self).addFailure(test, error)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.FAILURE, traceback=error)
def addSuccess(self, test):
"""See unittest.TestResult.addSuccess."""
super(AugmentedResult, self).addSuccess(test)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.SUCCESS)
def addSkip(self, test, reason):
"""See unittest.TestResult.addSkip."""
super(AugmentedResult, self).addSkip(test, reason)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.SKIP, skip_reason=reason)
def addExpectedFailure(self, test, error):
"""See unittest.TestResult.addExpectedFailure."""
super(AugmentedResult, self).addExpectedFailure(test, error)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.EXPECTED_FAILURE, traceback=error)
def addUnexpectedSuccess(self, test):
"""See unittest.TestResult.addUnexpectedSuccess."""
super(AugmentedResult, self).addUnexpectedSuccess(test)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.UNEXPECTED_SUCCESS)
def set_output(self, test, stdout, stderr):
"""Set the output attributes for the CaseResult corresponding to a test.
super(AugmentedResult, self).__init__()
self.id_map = id_map
self.cases = None
def startTestRun(self):
"""See unittest.TestResult.startTestRun."""
super(AugmentedResult, self).startTestRun()
self.cases = dict()
def stopTestRun(self):
"""See unittest.TestResult.stopTestRun."""
super(AugmentedResult, self).stopTestRun()
def startTest(self, test):
"""See unittest.TestResult.startTest."""
super(AugmentedResult, self).startTest(test)
case_id = self.id_map(test)
self.cases[case_id] = CaseResult(
id=case_id, name=test.id(), kind=CaseResult.Kind.RUNNING)
def addError(self, test, error):
"""See unittest.TestResult.addError."""
super(AugmentedResult, self).addError(test, error)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.ERROR, traceback=error)
def addFailure(self, test, error):
"""See unittest.TestResult.addFailure."""
super(AugmentedResult, self).addFailure(test, error)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.FAILURE, traceback=error)
def addSuccess(self, test):
"""See unittest.TestResult.addSuccess."""
super(AugmentedResult, self).addSuccess(test)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.SUCCESS)
def addSkip(self, test, reason):
"""See unittest.TestResult.addSkip."""
super(AugmentedResult, self).addSkip(test, reason)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.SKIP, skip_reason=reason)
def addExpectedFailure(self, test, error):
"""See unittest.TestResult.addExpectedFailure."""
super(AugmentedResult, self).addExpectedFailure(test, error)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.EXPECTED_FAILURE, traceback=error)
def addUnexpectedSuccess(self, test):
"""See unittest.TestResult.addUnexpectedSuccess."""
super(AugmentedResult, self).addUnexpectedSuccess(test)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.UNEXPECTED_SUCCESS)
def set_output(self, test, stdout, stderr):
"""Set the output attributes for the CaseResult corresponding to a test.
Args:
test (unittest.TestCase): The TestCase to set the outputs of.
stdout (str): Output from stdout to assign to self.id_map(test).
stderr (str): Output from stderr to assign to self.id_map(test).
"""
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
stdout=stdout.decode(), stderr=stderr.decode())
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
stdout=stdout.decode(), stderr=stderr.decode())
def augmented_results(self, filter):
"""Convenience method to retrieve filtered case results.
def augmented_results(self, filter):
"""Convenience method to retrieve filtered case results.
Args:
filter (callable): A unary predicate to filter over CaseResult objects.
"""
return (self.cases[case_id] for case_id in self.cases
if filter(self.cases[case_id]))
return (self.cases[case_id] for case_id in self.cases
if filter(self.cases[case_id]))
class CoverageResult(AugmentedResult):
"""Extension to AugmentedResult adding coverage.py support per test.\
"""Extension to AugmentedResult adding coverage.py support per test.\
Attributes:
coverage_context (coverage.Coverage): coverage.py management object.
"""
def __init__(self, id_map):
"""See AugmentedResult.__init__."""
super(CoverageResult, self).__init__(id_map=id_map)
self.coverage_context = None
def __init__(self, id_map):
"""See AugmentedResult.__init__."""
super(CoverageResult, self).__init__(id_map=id_map)
self.coverage_context = None
def startTest(self, test):
"""See unittest.TestResult.startTest.
def startTest(self, test):
"""See unittest.TestResult.startTest.
Additionally initializes and begins code coverage tracking."""
super(CoverageResult, self).startTest(test)
self.coverage_context = coverage.Coverage(data_suffix=True)
self.coverage_context.start()
super(CoverageResult, self).startTest(test)
self.coverage_context = coverage.Coverage(data_suffix=True)
self.coverage_context.start()
def stopTest(self, test):
"""See unittest.TestResult.stopTest.
def stopTest(self, test):
"""See unittest.TestResult.stopTest.
Additionally stops and deinitializes code coverage tracking."""
super(CoverageResult, self).stopTest(test)
self.coverage_context.stop()
self.coverage_context.save()
self.coverage_context = None
super(CoverageResult, self).stopTest(test)
self.coverage_context.stop()
self.coverage_context.save()
self.coverage_context = None
def stopTestRun(self):
"""See unittest.TestResult.stopTestRun."""
super(CoverageResult, self).stopTestRun()
# TODO(atash): Dig deeper into why the following line fails to properly
# combine coverage data from the Cython plugin.
#coverage.Coverage().combine()
def stopTestRun(self):
"""See unittest.TestResult.stopTestRun."""
super(CoverageResult, self).stopTestRun()
# TODO(atash): Dig deeper into why the following line fails to properly
# combine coverage data from the Cython plugin.
#coverage.Coverage().combine()
class _Colors:
"""Namespaced constants for terminal color magic numbers."""
HEADER = '\033[95m'
INFO = '\033[94m'
OK = '\033[92m'
WARN = '\033[93m'
FAIL = '\033[91m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
END = '\033[0m'
"""Namespaced constants for terminal color magic numbers."""
HEADER = '\033[95m'
INFO = '\033[94m'
OK = '\033[92m'
WARN = '\033[93m'
FAIL = '\033[91m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
END = '\033[0m'
class TerminalResult(CoverageResult):
"""Extension to CoverageResult adding basic terminal reporting."""
"""Extension to CoverageResult adding basic terminal reporting."""
def __init__(self, out, id_map):
"""Initialize the result object.
def __init__(self, out, id_map):
"""Initialize the result object.
Args:
out (file-like): Output file to which terminal-colored live results will
be written.
id_map (callable): See AugmentedResult.__init__.
"""
super(TerminalResult, self).__init__(id_map=id_map)
self.out = out
def startTestRun(self):
"""See unittest.TestResult.startTestRun."""
super(TerminalResult, self).startTestRun()
self.out.write(
_Colors.HEADER +
'Testing gRPC Python...\n' +
_Colors.END)
def stopTestRun(self):
"""See unittest.TestResult.stopTestRun."""
super(TerminalResult, self).stopTestRun()
self.out.write(summary(self))
self.out.flush()
def addError(self, test, error):
"""See unittest.TestResult.addError."""
super(TerminalResult, self).addError(test, error)
self.out.write(
_Colors.FAIL +
'ERROR {}\n'.format(test.id()) +
_Colors.END)
self.out.flush()
def addFailure(self, test, error):
"""See unittest.TestResult.addFailure."""
super(TerminalResult, self).addFailure(test, error)
self.out.write(
_Colors.FAIL +
'FAILURE {}\n'.format(test.id()) +
_Colors.END)
self.out.flush()
def addSuccess(self, test):
"""See unittest.TestResult.addSuccess."""
super(TerminalResult, self).addSuccess(test)
self.out.write(
_Colors.OK +
'SUCCESS {}\n'.format(test.id()) +
_Colors.END)
self.out.flush()
def addSkip(self, test, reason):
"""See unittest.TestResult.addSkip."""
super(TerminalResult, self).addSkip(test, reason)
self.out.write(
_Colors.INFO +
'SKIP {}\n'.format(test.id()) +
_Colors.END)
self.out.flush()
def addExpectedFailure(self, test, error):
"""See unittest.TestResult.addExpectedFailure."""
super(TerminalResult, self).addExpectedFailure(test, error)
self.out.write(
_Colors.INFO +
'FAILURE_OK {}\n'.format(test.id()) +
_Colors.END)
self.out.flush()
def addUnexpectedSuccess(self, test):
"""See unittest.TestResult.addUnexpectedSuccess."""
super(TerminalResult, self).addUnexpectedSuccess(test)
self.out.write(
_Colors.INFO +
'UNEXPECTED_OK {}\n'.format(test.id()) +
_Colors.END)
self.out.flush()
super(TerminalResult, self).__init__(id_map=id_map)
self.out = out
def startTestRun(self):
"""See unittest.TestResult.startTestRun."""
super(TerminalResult, self).startTestRun()
self.out.write(_Colors.HEADER + 'Testing gRPC Python...\n' +
_Colors.END)
def stopTestRun(self):
"""See unittest.TestResult.stopTestRun."""
super(TerminalResult, self).stopTestRun()
self.out.write(summary(self))
self.out.flush()
def addError(self, test, error):
"""See unittest.TestResult.addError."""
super(TerminalResult, self).addError(test, error)
self.out.write(_Colors.FAIL + 'ERROR {}\n'.format(test.id()) +
_Colors.END)
self.out.flush()
def addFailure(self, test, error):
"""See unittest.TestResult.addFailure."""
super(TerminalResult, self).addFailure(test, error)
self.out.write(_Colors.FAIL + 'FAILURE {}\n'.format(test.id()) +
_Colors.END)
self.out.flush()
def addSuccess(self, test):
"""See unittest.TestResult.addSuccess."""
super(TerminalResult, self).addSuccess(test)
self.out.write(_Colors.OK + 'SUCCESS {}\n'.format(test.id()) +
_Colors.END)
self.out.flush()
def addSkip(self, test, reason):
"""See unittest.TestResult.addSkip."""
super(TerminalResult, self).addSkip(test, reason)
self.out.write(_Colors.INFO + 'SKIP {}\n'.format(test.id()) +
_Colors.END)
self.out.flush()
def addExpectedFailure(self, test, error):
"""See unittest.TestResult.addExpectedFailure."""
super(TerminalResult, self).addExpectedFailure(test, error)
self.out.write(_Colors.INFO + 'FAILURE_OK {}\n'.format(test.id()) +
_Colors.END)
self.out.flush()
def addUnexpectedSuccess(self, test):
"""See unittest.TestResult.addUnexpectedSuccess."""
super(TerminalResult, self).addUnexpectedSuccess(test)
self.out.write(_Colors.INFO + 'UNEXPECTED_OK {}\n'.format(test.id()) +
_Colors.END)
self.out.flush()
def _traceback_string(type, value, trace):
"""Generate a descriptive string of a Python exception traceback.
"""Generate a descriptive string of a Python exception traceback.
Args:
type (class): The type of the exception.
@ -358,12 +363,13 @@ def _traceback_string(type, value, trace):
Returns:
str: Formatted exception descriptive string.
"""
buffer = moves.cStringIO()
traceback.print_exception(type, value, trace, file=buffer)
return buffer.getvalue()
buffer = moves.cStringIO()
traceback.print_exception(type, value, trace, file=buffer)
return buffer.getvalue()
def summary(result):
"""A summary string of a result object.
"""A summary string of a result object.
Args:
result (AugmentedResult): The result object to get the summary of.
@ -371,62 +377,68 @@ def summary(result):
Returns:
str: The summary string.
"""
assert isinstance(result, AugmentedResult)
untested = list(result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.UNTESTED))
running = list(result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.RUNNING))
failures = list(result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.FAILURE))
errors = list(result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.ERROR))
successes = list(result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.SUCCESS))
skips = list(result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.SKIP))
expected_failures = list(result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.EXPECTED_FAILURE))
unexpected_successes = list(result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.UNEXPECTED_SUCCESS))
running_names = [case.name for case in running]
finished_count = (len(failures) + len(errors) + len(successes) +
len(expected_failures) + len(unexpected_successes))
statistics = (
'{finished} tests finished:\n'
'\t{successful} successful\n'
'\t{unsuccessful} unsuccessful\n'
'\t{skipped} skipped\n'
'\t{expected_fail} expected failures\n'
'\t{unexpected_successful} unexpected successes\n'
'Interrupted Tests:\n'
'\t{interrupted}\n'
.format(finished=finished_count,
successful=len(successes),
unsuccessful=(len(failures)+len(errors)),
skipped=len(skips),
expected_fail=len(expected_failures),
unexpected_successful=len(unexpected_successes),
interrupted=str(running_names)))
tracebacks = '\n\n'.join([
(_Colors.FAIL + '{test_name}' + _Colors.END + '\n' +
_Colors.BOLD + 'traceback:' + _Colors.END + '\n' +
'{traceback}\n' +
_Colors.BOLD + 'stdout:' + _Colors.END + '\n' +
'{stdout}\n' +
_Colors.BOLD + 'stderr:' + _Colors.END + '\n' +
'{stderr}\n').format(
test_name=result.name,
traceback=_traceback_string(*result.traceback),
stdout=result.stdout, stderr=result.stderr)
for result in itertools.chain(failures, errors)
])
notes = 'Unexpected successes: {}\n'.format([
result.name for result in unexpected_successes])
return statistics + '\nErrors/Failures: \n' + tracebacks + '\n' + notes
assert isinstance(result, AugmentedResult)
untested = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.UNTESTED))
running = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.RUNNING))
failures = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.FAILURE))
errors = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.ERROR))
successes = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.SUCCESS))
skips = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.SKIP))
expected_failures = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.EXPECTED_FAILURE
))
unexpected_successes = list(
result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.UNEXPECTED_SUCCESS
))
running_names = [case.name for case in running]
finished_count = (len(failures) + len(errors) + len(successes) +
len(expected_failures) + len(unexpected_successes))
statistics = ('{finished} tests finished:\n'
'\t{successful} successful\n'
'\t{unsuccessful} unsuccessful\n'
'\t{skipped} skipped\n'
'\t{expected_fail} expected failures\n'
'\t{unexpected_successful} unexpected successes\n'
'Interrupted Tests:\n'
'\t{interrupted}\n'.format(
finished=finished_count,
successful=len(successes),
unsuccessful=(len(failures) + len(errors)),
skipped=len(skips),
expected_fail=len(expected_failures),
unexpected_successful=len(unexpected_successes),
interrupted=str(running_names)))
tracebacks = '\n\n'.join(
[(_Colors.FAIL + '{test_name}' + _Colors.END + '\n' + _Colors.BOLD +
'traceback:' + _Colors.END + '\n' + '{traceback}\n' + _Colors.BOLD +
'stdout:' + _Colors.END + '\n' + '{stdout}\n' + _Colors.BOLD +
'stderr:' + _Colors.END + '\n' + '{stderr}\n').format(
test_name=result.name,
traceback=_traceback_string(*result.traceback),
stdout=result.stdout,
stderr=result.stderr)
for result in itertools.chain(failures, errors)])
notes = 'Unexpected successes: {}\n'.format(
[result.name for result in unexpected_successes])
return statistics + '\nErrors/Failures: \n' + tracebacks + '\n' + notes
def jenkins_junit_xml(result):
"""An XML tree object that when written is recognizable by Jenkins.
"""An XML tree object that when written is recognizable by Jenkins.
Args:
result (AugmentedResult): The result object to get the junit xml output of.
@ -434,20 +446,18 @@ def jenkins_junit_xml(result):
Returns:
ElementTree.ElementTree: The XML tree.
"""
assert isinstance(result, AugmentedResult)
root = ElementTree.Element('testsuites')
suite = ElementTree.SubElement(root, 'testsuite', {
'name': 'Python gRPC tests',
})
for case in result.cases.values():
if case.kind is CaseResult.Kind.SUCCESS:
ElementTree.SubElement(suite, 'testcase', {
'name': case.name,
})
elif case.kind in (CaseResult.Kind.ERROR, CaseResult.Kind.FAILURE):
case_xml = ElementTree.SubElement(suite, 'testcase', {
'name': case.name,
})
error_xml = ElementTree.SubElement(case_xml, 'error', {})
error_xml.text = ''.format(case.stderr, case.traceback)
return ElementTree.ElementTree(element=root)
assert isinstance(result, AugmentedResult)
root = ElementTree.Element('testsuites')
suite = ElementTree.SubElement(root, 'testsuite', {
'name': 'Python gRPC tests',
})
for case in result.cases.values():
if case.kind is CaseResult.Kind.SUCCESS:
ElementTree.SubElement(suite, 'testcase', {'name': case.name,})
elif case.kind in (CaseResult.Kind.ERROR, CaseResult.Kind.FAILURE):
case_xml = ElementTree.SubElement(suite, 'testcase', {
'name': case.name,
})
error_xml = ElementTree.SubElement(case_xml, 'error', {})
error_xml.text = ''.format(case.stderr, case.traceback)
return ElementTree.ElementTree(element=root)

@ -49,7 +49,7 @@ from tests import _result
class CaptureFile(object):
"""A context-managed file to redirect output to a byte array.
"""A context-managed file to redirect output to a byte array.
Use by invoking `start` (`__enter__`) and at some point invoking `stop`
(`__exit__`). At any point after the initial call to `start` call `output` to
@ -66,57 +66,56 @@ class CaptureFile(object):
Only non-None when self is started.
"""
def __init__(self, fd):
self._redirected_fd = fd
self._saved_fd = os.dup(self._redirected_fd)
self._into_file = None
def __init__(self, fd):
self._redirected_fd = fd
self._saved_fd = os.dup(self._redirected_fd)
self._into_file = None
def output(self):
"""Get all output from the redirected-to file if it exists."""
if self._into_file:
self._into_file.seek(0)
return bytes(self._into_file.read())
else:
return bytes()
def output(self):
"""Get all output from the redirected-to file if it exists."""
if self._into_file:
self._into_file.seek(0)
return bytes(self._into_file.read())
else:
return bytes()
def start(self):
"""Start redirection of writes to the file descriptor."""
self._into_file = tempfile.TemporaryFile()
os.dup2(self._into_file.fileno(), self._redirected_fd)
def start(self):
"""Start redirection of writes to the file descriptor."""
self._into_file = tempfile.TemporaryFile()
os.dup2(self._into_file.fileno(), self._redirected_fd)
def stop(self):
"""Stop redirection of writes to the file descriptor."""
# n.b. this dup2 call auto-closes self._redirected_fd
os.dup2(self._saved_fd, self._redirected_fd)
def stop(self):
"""Stop redirection of writes to the file descriptor."""
# n.b. this dup2 call auto-closes self._redirected_fd
os.dup2(self._saved_fd, self._redirected_fd)
def write_bypass(self, value):
"""Bypass the redirection and write directly to the original file.
def write_bypass(self, value):
"""Bypass the redirection and write directly to the original file.
Arguments:
value (str): What to write to the original file.
"""
if six.PY3 and not isinstance(value, six.binary_type):
value = bytes(value, 'ascii')
if self._saved_fd is None:
os.write(self._redirect_fd, value)
else:
os.write(self._saved_fd, value)
if six.PY3 and not isinstance(value, six.binary_type):
value = bytes(value, 'ascii')
if self._saved_fd is None:
os.write(self._redirect_fd, value)
else:
os.write(self._saved_fd, value)
def __enter__(self):
self.start()
return self
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
def __exit__(self, type, value, traceback):
self.stop()
def close(self):
"""Close any resources used by self not closed by stop()."""
os.close(self._saved_fd)
def close(self):
"""Close any resources used by self not closed by stop()."""
os.close(self._saved_fd)
class AugmentedCase(collections.namedtuple('AugmentedCase', [
'case', 'id'])):
"""A test case with a guaranteed unique externally specified identifier.
class AugmentedCase(collections.namedtuple('AugmentedCase', ['case', 'id'])):
"""A test case with a guaranteed unique externally specified identifier.
Attributes:
case (unittest.TestCase): TestCase we're decorating with an additional
@ -125,105 +124,107 @@ class AugmentedCase(collections.namedtuple('AugmentedCase', [
purposes.
"""
def __new__(cls, case, id=None):
if id is None:
id = uuid.uuid4()
return super(cls, AugmentedCase).__new__(cls, case, id)
def __new__(cls, case, id=None):
if id is None:
id = uuid.uuid4()
return super(cls, AugmentedCase).__new__(cls, case, id)
class Runner(object):
def run(self, suite):
"""See setuptools' test_runner setup argument for information."""
# only run test cases with id starting with given prefix
testcase_filter = os.getenv('GRPC_PYTHON_TESTRUNNER_FILTER')
filtered_cases = []
for case in _loader.iterate_suite_cases(suite):
if not testcase_filter or case.id().startswith(testcase_filter):
filtered_cases.append(case)
# Ensure that every test case has no collision with any other test case in
# the augmented results.
augmented_cases = [AugmentedCase(case, uuid.uuid4())
for case in filtered_cases]
case_id_by_case = dict((augmented_case.case, augmented_case.id)
for augmented_case in augmented_cases)
result_out = moves.cStringIO()
result = _result.TerminalResult(
result_out, id_map=lambda case: case_id_by_case[case])
stdout_pipe = CaptureFile(sys.stdout.fileno())
stderr_pipe = CaptureFile(sys.stderr.fileno())
kill_flag = [False]
def sigint_handler(signal_number, frame):
if signal_number == signal.SIGINT:
kill_flag[0] = True # Python 2.7 not having 'local'... :-(
signal.signal(signal_number, signal.SIG_DFL)
def fault_handler(signal_number, frame):
stdout_pipe.write_bypass(
'Received fault signal {}\nstdout:\n{}\n\nstderr:{}\n'
.format(signal_number, stdout_pipe.output(),
stderr_pipe.output()))
os._exit(1)
def check_kill_self():
if kill_flag[0]:
stdout_pipe.write_bypass('Stopping tests short...')
result.stopTestRun()
stdout_pipe.write_bypass(result_out.getvalue())
stdout_pipe.write_bypass(
'\ninterrupted stdout:\n{}\n'.format(stdout_pipe.output().decode()))
stderr_pipe.write_bypass(
'\ninterrupted stderr:\n{}\n'.format(stderr_pipe.output().decode()))
os._exit(1)
def try_set_handler(name, handler):
try:
signal.signal(getattr(signal, name), handler)
except AttributeError:
pass
try_set_handler('SIGINT', sigint_handler)
try_set_handler('SIGSEGV', fault_handler)
try_set_handler('SIGBUS', fault_handler)
try_set_handler('SIGABRT', fault_handler)
try_set_handler('SIGFPE', fault_handler)
try_set_handler('SIGILL', fault_handler)
# Sometimes output will lag after a test has successfully finished; we
# ignore such writes to our pipes.
try_set_handler('SIGPIPE', signal.SIG_IGN)
# Run the tests
result.startTestRun()
for augmented_case in augmented_cases:
sys.stdout.write('Running {}\n'.format(augmented_case.case.id()))
sys.stdout.flush()
case_thread = threading.Thread(
target=augmented_case.case.run, args=(result,))
try:
with stdout_pipe, stderr_pipe:
case_thread.start()
while case_thread.is_alive():
def run(self, suite):
"""See setuptools' test_runner setup argument for information."""
# only run test cases with id starting with given prefix
testcase_filter = os.getenv('GRPC_PYTHON_TESTRUNNER_FILTER')
filtered_cases = []
for case in _loader.iterate_suite_cases(suite):
if not testcase_filter or case.id().startswith(testcase_filter):
filtered_cases.append(case)
# Ensure that every test case has no collision with any other test case in
# the augmented results.
augmented_cases = [
AugmentedCase(case, uuid.uuid4()) for case in filtered_cases
]
case_id_by_case = dict((augmented_case.case, augmented_case.id)
for augmented_case in augmented_cases)
result_out = moves.cStringIO()
result = _result.TerminalResult(
result_out, id_map=lambda case: case_id_by_case[case])
stdout_pipe = CaptureFile(sys.stdout.fileno())
stderr_pipe = CaptureFile(sys.stderr.fileno())
kill_flag = [False]
def sigint_handler(signal_number, frame):
if signal_number == signal.SIGINT:
kill_flag[0] = True # Python 2.7 not having 'local'... :-(
signal.signal(signal_number, signal.SIG_DFL)
def fault_handler(signal_number, frame):
stdout_pipe.write_bypass(
'Received fault signal {}\nstdout:\n{}\n\nstderr:{}\n'.format(
signal_number, stdout_pipe.output(), stderr_pipe.output()))
os._exit(1)
def check_kill_self():
if kill_flag[0]:
stdout_pipe.write_bypass('Stopping tests short...')
result.stopTestRun()
stdout_pipe.write_bypass(result_out.getvalue())
stdout_pipe.write_bypass('\ninterrupted stdout:\n{}\n'.format(
stdout_pipe.output().decode()))
stderr_pipe.write_bypass('\ninterrupted stderr:\n{}\n'.format(
stderr_pipe.output().decode()))
os._exit(1)
def try_set_handler(name, handler):
try:
signal.signal(getattr(signal, name), handler)
except AttributeError:
pass
try_set_handler('SIGINT', sigint_handler)
try_set_handler('SIGSEGV', fault_handler)
try_set_handler('SIGBUS', fault_handler)
try_set_handler('SIGABRT', fault_handler)
try_set_handler('SIGFPE', fault_handler)
try_set_handler('SIGILL', fault_handler)
# Sometimes output will lag after a test has successfully finished; we
# ignore such writes to our pipes.
try_set_handler('SIGPIPE', signal.SIG_IGN)
# Run the tests
result.startTestRun()
for augmented_case in augmented_cases:
sys.stdout.write('Running {}\n'.format(augmented_case.case.id(
)))
sys.stdout.flush()
case_thread = threading.Thread(
target=augmented_case.case.run, args=(result,))
try:
with stdout_pipe, stderr_pipe:
case_thread.start()
while case_thread.is_alive():
check_kill_self()
time.sleep(0)
case_thread.join()
except:
# re-raise the exception after forcing the with-block to end
raise
result.set_output(augmented_case.case,
stdout_pipe.output(), stderr_pipe.output())
sys.stdout.write(result_out.getvalue())
sys.stdout.flush()
result_out.truncate(0)
check_kill_self()
time.sleep(0)
case_thread.join()
except:
# re-raise the exception after forcing the with-block to end
raise
result.set_output(
augmented_case.case, stdout_pipe.output(), stderr_pipe.output())
sys.stdout.write(result_out.getvalue())
sys.stdout.flush()
result_out.truncate(0)
check_kill_self()
result.stopTestRun()
stdout_pipe.close()
stderr_pipe.close()
# Report results
sys.stdout.write(result_out.getvalue())
sys.stdout.flush()
signal.signal(signal.SIGINT, signal.SIG_DFL)
with open('report.xml', 'wb') as report_xml_file:
_result.jenkins_junit_xml(result).write(report_xml_file)
return result
result.stopTestRun()
stdout_pipe.close()
stderr_pipe.close()
# Report results
sys.stdout.write(result_out.getvalue())
sys.stdout.flush()
signal.signal(signal.SIGINT, signal.SIG_DFL)
with open('report.xml', 'wb') as report_xml_file:
_result.jenkins_junit_xml(result).write(report_xml_file)
return result

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests of grpc_health.v1.health."""
import unittest
@ -41,55 +40,55 @@ from tests.unit.framework.common import test_constants
class HealthServicerTest(unittest.TestCase):
def setUp(self):
servicer = health.HealthServicer()
servicer.set('', health_pb2.HealthCheckResponse.SERVING)
servicer.set('grpc.test.TestServiceServing',
health_pb2.HealthCheckResponse.SERVING)
servicer.set('grpc.test.TestServiceUnknown',
health_pb2.HealthCheckResponse.UNKNOWN)
servicer.set('grpc.test.TestServiceNotServing',
health_pb2.HealthCheckResponse.NOT_SERVING)
server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
self._server = grpc.server(server_pool)
port = self._server.add_insecure_port('[::]:0')
health_pb2.add_HealthServicer_to_server(servicer, self._server)
self._server.start()
def setUp(self):
servicer = health.HealthServicer()
servicer.set('', health_pb2.HealthCheckResponse.SERVING)
servicer.set('grpc.test.TestServiceServing',
health_pb2.HealthCheckResponse.SERVING)
servicer.set('grpc.test.TestServiceUnknown',
health_pb2.HealthCheckResponse.UNKNOWN)
servicer.set('grpc.test.TestServiceNotServing',
health_pb2.HealthCheckResponse.NOT_SERVING)
server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
self._server = grpc.server(server_pool)
port = self._server.add_insecure_port('[::]:0')
health_pb2.add_HealthServicer_to_server(servicer, self._server)
self._server.start()
channel = grpc.insecure_channel('localhost:%d' % port)
self._stub = health_pb2.HealthStub(channel)
channel = grpc.insecure_channel('localhost:%d' % port)
self._stub = health_pb2.HealthStub(channel)
def test_empty_service(self):
request = health_pb2.HealthCheckRequest()
resp = self._stub.Check(request)
self.assertEqual(health_pb2.HealthCheckResponse.SERVING, resp.status)
def test_empty_service(self):
request = health_pb2.HealthCheckRequest()
resp = self._stub.Check(request)
self.assertEqual(health_pb2.HealthCheckResponse.SERVING, resp.status)
def test_serving_service(self):
request = health_pb2.HealthCheckRequest(
service='grpc.test.TestServiceServing')
resp = self._stub.Check(request)
self.assertEqual(health_pb2.HealthCheckResponse.SERVING, resp.status)
def test_serving_service(self):
request = health_pb2.HealthCheckRequest(
service='grpc.test.TestServiceServing')
resp = self._stub.Check(request)
self.assertEqual(health_pb2.HealthCheckResponse.SERVING, resp.status)
def test_unknown_serivce(self):
request = health_pb2.HealthCheckRequest(
service='grpc.test.TestServiceUnknown')
resp = self._stub.Check(request)
self.assertEqual(health_pb2.HealthCheckResponse.UNKNOWN, resp.status)
def test_unknown_serivce(self):
request = health_pb2.HealthCheckRequest(
service='grpc.test.TestServiceUnknown')
resp = self._stub.Check(request)
self.assertEqual(health_pb2.HealthCheckResponse.UNKNOWN, resp.status)
def test_not_serving_service(self):
request = health_pb2.HealthCheckRequest(
service='grpc.test.TestServiceNotServing')
resp = self._stub.Check(request)
self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING,
resp.status)
def test_not_serving_service(self):
request = health_pb2.HealthCheckRequest(
service='grpc.test.TestServiceNotServing')
resp = self._stub.Check(request)
self.assertEqual(health_pb2.HealthCheckResponse.NOT_SERVING, resp.status)
def test_not_found_service(self):
request = health_pb2.HealthCheckRequest(service='not-found')
with self.assertRaises(grpc.RpcError) as context:
resp = self._stub.Check(request)
def test_not_found_service(self):
request = health_pb2.HealthCheckRequest(
service='not-found')
with self.assertRaises(grpc.RpcError) as context:
resp = self._stub.Check(request)
self.assertEqual(grpc.StatusCode.NOT_FOUND, context.exception.code())
self.assertEqual(grpc.StatusCode.NOT_FOUND, context.exception.code())
if __name__ == '__main__':
unittest.main(verbosity=2)
unittest.main(verbosity=2)

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""The Python client used to test negative http2 conditions."""
import argparse
@ -35,29 +34,32 @@ import grpc
from src.proto.grpc.testing import test_pb2
from src.proto.grpc.testing import messages_pb2
def _validate_payload_type_and_length(response, expected_type, expected_length):
if response.payload.type is not expected_type:
raise ValueError(
'expected payload type %s, got %s' %
(expected_type, type(response.payload.type)))
elif len(response.payload.body) != expected_length:
raise ValueError(
'expected payload body size %d, got %d' %
(expected_length, len(response.payload.body)))
if response.payload.type is not expected_type:
raise ValueError('expected payload type %s, got %s' %
(expected_type, type(response.payload.type)))
elif len(response.payload.body) != expected_length:
raise ValueError('expected payload body size %d, got %d' %
(expected_length, len(response.payload.body)))
def _expect_status_code(call, expected_code):
if call.code() != expected_code:
raise ValueError(
'expected code %s, got %s' % (expected_code, call.code()))
if call.code() != expected_code:
raise ValueError('expected code %s, got %s' %
(expected_code, call.code()))
def _expect_status_details(call, expected_details):
if call.details() != expected_details:
raise ValueError(
'expected message %s, got %s' % (expected_details, call.details()))
if call.details() != expected_details:
raise ValueError('expected message %s, got %s' %
(expected_details, call.details()))
def _validate_status_code_and_details(call, expected_code, expected_details):
_expect_status_code(call, expected_code)
_expect_status_details(call, expected_details)
_expect_status_code(call, expected_code)
_expect_status_details(call, expected_details)
# common requests
_REQUEST_SIZE = 314159
@ -68,86 +70,103 @@ _SIMPLE_REQUEST = messages_pb2.SimpleRequest(
response_size=_RESPONSE_SIZE,
payload=messages_pb2.Payload(body=b'\x00' * _REQUEST_SIZE))
def _goaway(stub):
first_response = stub.UnaryCall(_SIMPLE_REQUEST)
_validate_payload_type_and_length(first_response,
messages_pb2.COMPRESSABLE, _RESPONSE_SIZE)
second_response = stub.UnaryCall(_SIMPLE_REQUEST)
_validate_payload_type_and_length(second_response,
messages_pb2.COMPRESSABLE, _RESPONSE_SIZE)
first_response = stub.UnaryCall(_SIMPLE_REQUEST)
_validate_payload_type_and_length(first_response, messages_pb2.COMPRESSABLE,
_RESPONSE_SIZE)
second_response = stub.UnaryCall(_SIMPLE_REQUEST)
_validate_payload_type_and_length(second_response,
messages_pb2.COMPRESSABLE, _RESPONSE_SIZE)
def _rst_after_header(stub):
resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST)
_validate_status_code_and_details(resp_future, grpc.StatusCode.UNAVAILABLE, "")
resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST)
_validate_status_code_and_details(resp_future, grpc.StatusCode.UNAVAILABLE,
"")
def _rst_during_data(stub):
resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST)
_validate_status_code_and_details(resp_future, grpc.StatusCode.UNKNOWN, "")
resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST)
_validate_status_code_and_details(resp_future, grpc.StatusCode.UNKNOWN, "")
def _rst_after_data(stub):
resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST)
_validate_payload_type_and_length(next(resp_future),
messages_pb2.COMPRESSABLE, _RESPONSE_SIZE)
_validate_status_code_and_details(resp_future, grpc.StatusCode.UNKNOWN, "")
resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST)
_validate_payload_type_and_length(
next(resp_future), messages_pb2.COMPRESSABLE, _RESPONSE_SIZE)
_validate_status_code_and_details(resp_future, grpc.StatusCode.UNKNOWN, "")
def _ping(stub):
response = stub.UnaryCall(_SIMPLE_REQUEST)
_validate_payload_type_and_length(response,
messages_pb2.COMPRESSABLE, _RESPONSE_SIZE)
response = stub.UnaryCall(_SIMPLE_REQUEST)
_validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE,
_RESPONSE_SIZE)
def _max_streams(stub):
# send one req to ensure server sets MAX_STREAMS
response = stub.UnaryCall(_SIMPLE_REQUEST)
_validate_payload_type_and_length(response,
messages_pb2.COMPRESSABLE, _RESPONSE_SIZE)
# give the streams a workout
futures = []
for _ in range(15):
futures.append(stub.UnaryCall.future(_SIMPLE_REQUEST))
for future in futures:
_validate_payload_type_and_length(future.result(),
messages_pb2.COMPRESSABLE, _RESPONSE_SIZE)
# send one req to ensure server sets MAX_STREAMS
response = stub.UnaryCall(_SIMPLE_REQUEST)
_validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE,
_RESPONSE_SIZE)
# give the streams a workout
futures = []
for _ in range(15):
futures.append(stub.UnaryCall.future(_SIMPLE_REQUEST))
for future in futures:
_validate_payload_type_and_length(
future.result(), messages_pb2.COMPRESSABLE, _RESPONSE_SIZE)
def _run_test_case(test_case, stub):
if test_case == 'goaway':
_goaway(stub)
elif test_case == 'rst_after_header':
_rst_after_header(stub)
elif test_case == 'rst_during_data':
_rst_during_data(stub)
elif test_case == 'rst_after_data':
_rst_after_data(stub)
elif test_case =='ping':
_ping(stub)
elif test_case == 'max_streams':
_max_streams(stub)
else:
raise ValueError("Invalid test case: %s" % test_case)
if test_case == 'goaway':
_goaway(stub)
elif test_case == 'rst_after_header':
_rst_after_header(stub)
elif test_case == 'rst_during_data':
_rst_during_data(stub)
elif test_case == 'rst_after_data':
_rst_after_data(stub)
elif test_case == 'ping':
_ping(stub)
elif test_case == 'max_streams':
_max_streams(stub)
else:
raise ValueError("Invalid test case: %s" % test_case)
def _args():
parser = argparse.ArgumentParser()
parser.add_argument(
'--server_host', help='the host to which to connect', type=str,
default="127.0.0.1")
parser.add_argument(
'--server_port', help='the port to which to connect', type=int,
default="8080")
parser.add_argument(
'--test_case', help='the test case to execute', type=str,
default="goaway")
return parser.parse_args()
parser = argparse.ArgumentParser()
parser.add_argument(
'--server_host',
help='the host to which to connect',
type=str,
default="127.0.0.1")
parser.add_argument(
'--server_port',
help='the port to which to connect',
type=int,
default="8080")
parser.add_argument(
'--test_case',
help='the test case to execute',
type=str,
default="goaway")
return parser.parse_args()
def _stub(server_host, server_port):
target = '{}:{}'.format(server_host, server_port)
channel = grpc.insecure_channel(target)
return test_pb2.TestServiceStub(channel)
target = '{}:{}'.format(server_host, server_port)
channel = grpc.insecure_channel(target)
return test_pb2.TestServiceStub(channel)
def main():
args = _args()
stub = _stub(args.server_host, args.server_port)
_run_test_case(args.test_case, stub)
args = _args()
stub = _stub(args.server_host, args.server_port)
_run_test_case(args.test_case, stub)
if __name__ == '__main__':
main()
main()

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Insecure client-server interoperability as a unit test."""
from concurrent import futures
@ -40,19 +39,18 @@ from tests.interop import methods
from tests.interop import server
class InsecureIntraopTest(
_intraop_test_case.IntraopTestCase,
unittest.TestCase):
class InsecureIntraopTest(_intraop_test_case.IntraopTestCase,
unittest.TestCase):
def setUp(self):
self.server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
test_pb2.add_TestServiceServicer_to_server(
methods.TestService(), self.server)
port = self.server.add_insecure_port('[::]:0')
self.server.start()
self.stub = test_pb2.TestServiceStub(
grpc.insecure_channel('localhost:{}'.format(port)))
def setUp(self):
self.server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
test_pb2.add_TestServiceServicer_to_server(methods.TestService(),
self.server)
port = self.server.add_insecure_port('[::]:0')
self.server.start()
self.stub = test_pb2.TestServiceStub(
grpc.insecure_channel('localhost:{}'.format(port)))
if __name__ == '__main__':
unittest.main(verbosity=2)
unittest.main(verbosity=2)

@ -26,39 +26,41 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Common code for unit tests of the interoperability test code."""
from tests.interop import methods
class IntraopTestCase(object):
"""Unit test methods.
"""Unit test methods.
This class must be mixed in with unittest.TestCase and a class that defines
setUp and tearDown methods that manage a stub attribute.
"""
def testEmptyUnary(self):
methods.TestCase.EMPTY_UNARY.test_interoperability(self.stub, None)
def testEmptyUnary(self):
methods.TestCase.EMPTY_UNARY.test_interoperability(self.stub, None)
def testLargeUnary(self):
methods.TestCase.LARGE_UNARY.test_interoperability(self.stub, None)
def testLargeUnary(self):
methods.TestCase.LARGE_UNARY.test_interoperability(self.stub, None)
def testServerStreaming(self):
methods.TestCase.SERVER_STREAMING.test_interoperability(self.stub, None)
def testServerStreaming(self):
methods.TestCase.SERVER_STREAMING.test_interoperability(self.stub, None)
def testClientStreaming(self):
methods.TestCase.CLIENT_STREAMING.test_interoperability(self.stub, None)
def testClientStreaming(self):
methods.TestCase.CLIENT_STREAMING.test_interoperability(self.stub, None)
def testPingPong(self):
methods.TestCase.PING_PONG.test_interoperability(self.stub, None)
def testPingPong(self):
methods.TestCase.PING_PONG.test_interoperability(self.stub, None)
def testCancelAfterBegin(self):
methods.TestCase.CANCEL_AFTER_BEGIN.test_interoperability(self.stub, None)
def testCancelAfterBegin(self):
methods.TestCase.CANCEL_AFTER_BEGIN.test_interoperability(self.stub,
None)
def testCancelAfterFirstResponse(self):
methods.TestCase.CANCEL_AFTER_FIRST_RESPONSE.test_interoperability(self.stub, None)
def testCancelAfterFirstResponse(self):
methods.TestCase.CANCEL_AFTER_FIRST_RESPONSE.test_interoperability(
self.stub, None)
def testTimeoutOnSleepingServer(self):
methods.TestCase.TIMEOUT_ON_SLEEPING_SERVER.test_interoperability(self.stub, None)
def testTimeoutOnSleepingServer(self):
methods.TestCase.TIMEOUT_ON_SLEEPING_SERVER.test_interoperability(
self.stub, None)

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Secure client-server interoperability as a unit test."""
from concurrent import futures
@ -42,24 +41,24 @@ from tests.interop import resources
_SERVER_HOST_OVERRIDE = 'foo.test.google.fr'
class SecureIntraopTest(
_intraop_test_case.IntraopTestCase,
unittest.TestCase):
class SecureIntraopTest(_intraop_test_case.IntraopTestCase, unittest.TestCase):
def setUp(self):
self.server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
test_pb2.add_TestServiceServicer_to_server(
methods.TestService(), self.server)
port = self.server.add_secure_port(
'[::]:0', grpc.ssl_server_credentials(
[(resources.private_key(), resources.certificate_chain())]))
self.server.start()
self.stub = test_pb2.TestServiceStub(
grpc.secure_channel(
'localhost:{}'.format(port),
grpc.ssl_channel_credentials(resources.test_root_certificates()),
(('grpc.ssl_target_name_override', _SERVER_HOST_OVERRIDE,),)))
def setUp(self):
self.server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
test_pb2.add_TestServiceServicer_to_server(methods.TestService(),
self.server)
port = self.server.add_secure_port(
'[::]:0',
grpc.ssl_server_credentials(
[(resources.private_key(), resources.certificate_chain())]))
self.server.start()
self.stub = test_pb2.TestServiceStub(
grpc.secure_channel('localhost:{}'.format(port),
grpc.ssl_channel_credentials(
resources.test_root_certificates()), ((
'grpc.ssl_target_name_override',
_SERVER_HOST_OVERRIDE,),)))
if __name__ == '__main__':
unittest.main(verbosity=2)
unittest.main(verbosity=2)

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""The Python implementation of the GRPC interoperability test client."""
import argparse
@ -41,93 +40,107 @@ from tests.interop import resources
def _args():
parser = argparse.ArgumentParser()
parser.add_argument(
'--server_host', help='the host to which to connect', type=str,
default="127.0.0.1")
parser.add_argument(
'--server_port', help='the port to which to connect', type=int)
parser.add_argument(
'--test_case', help='the test case to execute', type=str,
default="large_unary")
parser.add_argument(
'--use_tls', help='require a secure connection', default=False,
type=resources.parse_bool)
parser.add_argument(
'--use_test_ca', help='replace platform root CAs with ca.pem',
default=False, type=resources.parse_bool)
parser.add_argument(
'--server_host_override', default="foo.test.google.fr",
help='the server host to which to claim to connect', type=str)
parser.add_argument('--oauth_scope', help='scope for OAuth tokens', type=str)
parser.add_argument(
'--default_service_account',
help='email address of the default service account', type=str)
return parser.parse_args()
parser = argparse.ArgumentParser()
parser.add_argument(
'--server_host',
help='the host to which to connect',
type=str,
default="127.0.0.1")
parser.add_argument(
'--server_port', help='the port to which to connect', type=int)
parser.add_argument(
'--test_case',
help='the test case to execute',
type=str,
default="large_unary")
parser.add_argument(
'--use_tls',
help='require a secure connection',
default=False,
type=resources.parse_bool)
parser.add_argument(
'--use_test_ca',
help='replace platform root CAs with ca.pem',
default=False,
type=resources.parse_bool)
parser.add_argument(
'--server_host_override',
default="foo.test.google.fr",
help='the server host to which to claim to connect',
type=str)
parser.add_argument(
'--oauth_scope', help='scope for OAuth tokens', type=str)
parser.add_argument(
'--default_service_account',
help='email address of the default service account',
type=str)
return parser.parse_args()
def _application_default_credentials():
return oauth2client_client.GoogleCredentials.get_application_default()
return oauth2client_client.GoogleCredentials.get_application_default()
def _stub(args):
target = '{}:{}'.format(args.server_host, args.server_port)
if args.test_case == 'oauth2_auth_token':
google_credentials = _application_default_credentials()
scoped_credentials = google_credentials.create_scoped([args.oauth_scope])
access_token = scoped_credentials.get_access_token().access_token
call_credentials = grpc.access_token_call_credentials(access_token)
elif args.test_case == 'compute_engine_creds':
google_credentials = _application_default_credentials()
scoped_credentials = google_credentials.create_scoped([args.oauth_scope])
# TODO(https://github.com/grpc/grpc/issues/6799): Eliminate this last
# remaining use of the Beta API.
call_credentials = implementations.google_call_credentials(
scoped_credentials)
elif args.test_case == 'jwt_token_creds':
google_credentials = _application_default_credentials()
# TODO(https://github.com/grpc/grpc/issues/6799): Eliminate this last
# remaining use of the Beta API.
call_credentials = implementations.google_call_credentials(
google_credentials)
else:
call_credentials = None
if args.use_tls:
if args.use_test_ca:
root_certificates = resources.test_root_certificates()
target = '{}:{}'.format(args.server_host, args.server_port)
if args.test_case == 'oauth2_auth_token':
google_credentials = _application_default_credentials()
scoped_credentials = google_credentials.create_scoped(
[args.oauth_scope])
access_token = scoped_credentials.get_access_token().access_token
call_credentials = grpc.access_token_call_credentials(access_token)
elif args.test_case == 'compute_engine_creds':
google_credentials = _application_default_credentials()
scoped_credentials = google_credentials.create_scoped(
[args.oauth_scope])
# TODO(https://github.com/grpc/grpc/issues/6799): Eliminate this last
# remaining use of the Beta API.
call_credentials = implementations.google_call_credentials(
scoped_credentials)
elif args.test_case == 'jwt_token_creds':
google_credentials = _application_default_credentials()
# TODO(https://github.com/grpc/grpc/issues/6799): Eliminate this last
# remaining use of the Beta API.
call_credentials = implementations.google_call_credentials(
google_credentials)
else:
root_certificates = None # will load default roots.
channel_credentials = grpc.ssl_channel_credentials(root_certificates)
if call_credentials is not None:
channel_credentials = grpc.composite_channel_credentials(
channel_credentials, call_credentials)
channel = grpc.secure_channel(
target, channel_credentials,
(('grpc.ssl_target_name_override', args.server_host_override,),))
else:
channel = grpc.insecure_channel(target)
if args.test_case == "unimplemented_service":
return test_pb2.UnimplementedServiceStub(channel)
else:
return test_pb2.TestServiceStub(channel)
call_credentials = None
if args.use_tls:
if args.use_test_ca:
root_certificates = resources.test_root_certificates()
else:
root_certificates = None # will load default roots.
channel_credentials = grpc.ssl_channel_credentials(root_certificates)
if call_credentials is not None:
channel_credentials = grpc.composite_channel_credentials(
channel_credentials, call_credentials)
channel = grpc.secure_channel(target, channel_credentials, ((
'grpc.ssl_target_name_override',
args.server_host_override,),))
else:
channel = grpc.insecure_channel(target)
if args.test_case == "unimplemented_service":
return test_pb2.UnimplementedServiceStub(channel)
else:
return test_pb2.TestServiceStub(channel)
def _test_case_from_arg(test_case_arg):
for test_case in methods.TestCase:
if test_case_arg == test_case.value:
return test_case
else:
raise ValueError('No test case "%s"!' % test_case_arg)
for test_case in methods.TestCase:
if test_case_arg == test_case.value:
return test_case
else:
raise ValueError('No test case "%s"!' % test_case_arg)
def test_interoperability():
args = _args()
stub = _stub(args)
test_case = _test_case_from_arg(args.test_case)
test_case.test_interoperability(stub, args)
args = _args()
stub = _stub(args)
test_case = _test_case_from_arg(args.test_case)
test_case.test_interoperability(stub, args)
if __name__ == '__main__':
test_interoperability()
test_interoperability()

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Implementations of interoperability test methods."""
import enum
@ -46,463 +45,483 @@ from src.proto.grpc.testing import test_pb2
_INITIAL_METADATA_KEY = "x-grpc-test-echo-initial"
_TRAILING_METADATA_KEY = "x-grpc-test-echo-trailing-bin"
def _maybe_echo_metadata(servicer_context):
"""Copies metadata from request to response if it is present."""
invocation_metadata = dict(servicer_context.invocation_metadata())
if _INITIAL_METADATA_KEY in invocation_metadata:
initial_metadatum = (
_INITIAL_METADATA_KEY, invocation_metadata[_INITIAL_METADATA_KEY])
servicer_context.send_initial_metadata((initial_metadatum,))
if _TRAILING_METADATA_KEY in invocation_metadata:
trailing_metadatum = (
_TRAILING_METADATA_KEY, invocation_metadata[_TRAILING_METADATA_KEY])
servicer_context.set_trailing_metadata((trailing_metadatum,))
"""Copies metadata from request to response if it is present."""
invocation_metadata = dict(servicer_context.invocation_metadata())
if _INITIAL_METADATA_KEY in invocation_metadata:
initial_metadatum = (_INITIAL_METADATA_KEY,
invocation_metadata[_INITIAL_METADATA_KEY])
servicer_context.send_initial_metadata((initial_metadatum,))
if _TRAILING_METADATA_KEY in invocation_metadata:
trailing_metadatum = (_TRAILING_METADATA_KEY,
invocation_metadata[_TRAILING_METADATA_KEY])
servicer_context.set_trailing_metadata((trailing_metadatum,))
def _maybe_echo_status_and_message(request, servicer_context):
"""Sets the response context code and details if the request asks for them"""
if request.HasField('response_status'):
servicer_context.set_code(request.response_status.code)
servicer_context.set_details(request.response_status.message)
"""Sets the response context code and details if the request asks for them"""
if request.HasField('response_status'):
servicer_context.set_code(request.response_status.code)
servicer_context.set_details(request.response_status.message)
class TestService(test_pb2.TestServiceServicer):
def EmptyCall(self, request, context):
_maybe_echo_metadata(context)
return empty_pb2.Empty()
def EmptyCall(self, request, context):
_maybe_echo_metadata(context)
return empty_pb2.Empty()
def UnaryCall(self, request, context):
_maybe_echo_metadata(context)
_maybe_echo_status_and_message(request, context)
return messages_pb2.SimpleResponse(
payload=messages_pb2.Payload(
def UnaryCall(self, request, context):
_maybe_echo_metadata(context)
_maybe_echo_status_and_message(request, context)
return messages_pb2.SimpleResponse(payload=messages_pb2.Payload(
type=messages_pb2.COMPRESSABLE,
body=b'\x00' * request.response_size))
def StreamingOutputCall(self, request, context):
_maybe_echo_status_and_message(request, context)
for response_parameters in request.response_parameters:
yield messages_pb2.StreamingOutputCallResponse(
payload=messages_pb2.Payload(
type=request.response_type,
body=b'\x00' * response_parameters.size))
def StreamingInputCall(self, request_iterator, context):
aggregate_size = 0
for request in request_iterator:
if request.payload is not None and request.payload.body:
aggregate_size += len(request.payload.body)
return messages_pb2.StreamingInputCallResponse(
aggregated_payload_size=aggregate_size)
def FullDuplexCall(self, request_iterator, context):
_maybe_echo_metadata(context)
for request in request_iterator:
_maybe_echo_status_and_message(request, context)
for response_parameters in request.response_parameters:
yield messages_pb2.StreamingOutputCallResponse(
payload=messages_pb2.Payload(
type=request.payload.type,
body=b'\x00' * response_parameters.size))
# NOTE(nathaniel): Apparently this is the same as the full-duplex call?
# NOTE(atash): It isn't even called in the interop spec (Oct 22 2015)...
def HalfDuplexCall(self, request_iterator, context):
return self.FullDuplexCall(request_iterator, context)
def StreamingOutputCall(self, request, context):
_maybe_echo_status_and_message(request, context)
for response_parameters in request.response_parameters:
yield messages_pb2.StreamingOutputCallResponse(
payload=messages_pb2.Payload(
type=request.response_type,
body=b'\x00' * response_parameters.size))
def StreamingInputCall(self, request_iterator, context):
aggregate_size = 0
for request in request_iterator:
if request.payload is not None and request.payload.body:
aggregate_size += len(request.payload.body)
return messages_pb2.StreamingInputCallResponse(
aggregated_payload_size=aggregate_size)
def FullDuplexCall(self, request_iterator, context):
_maybe_echo_metadata(context)
for request in request_iterator:
_maybe_echo_status_and_message(request, context)
for response_parameters in request.response_parameters:
yield messages_pb2.StreamingOutputCallResponse(
payload=messages_pb2.Payload(
type=request.payload.type,
body=b'\x00' * response_parameters.size))
# NOTE(nathaniel): Apparently this is the same as the full-duplex call?
# NOTE(atash): It isn't even called in the interop spec (Oct 22 2015)...
def HalfDuplexCall(self, request_iterator, context):
return self.FullDuplexCall(request_iterator, context)
def _expect_status_code(call, expected_code):
if call.code() != expected_code:
raise ValueError(
'expected code %s, got %s' % (expected_code, call.code()))
if call.code() != expected_code:
raise ValueError('expected code %s, got %s' %
(expected_code, call.code()))
def _expect_status_details(call, expected_details):
if call.details() != expected_details:
raise ValueError(
'expected message %s, got %s' % (expected_details, call.details()))
if call.details() != expected_details:
raise ValueError('expected message %s, got %s' %
(expected_details, call.details()))
def _validate_status_code_and_details(call, expected_code, expected_details):
_expect_status_code(call, expected_code)
_expect_status_details(call, expected_details)
_expect_status_code(call, expected_code)
_expect_status_details(call, expected_details)
def _validate_payload_type_and_length(response, expected_type, expected_length):
if response.payload.type is not expected_type:
raise ValueError(
'expected payload type %s, got %s' %
(expected_type, type(response.payload.type)))
elif len(response.payload.body) != expected_length:
raise ValueError(
'expected payload body size %d, got %d' %
(expected_length, len(response.payload.body)))
def _large_unary_common_behavior(
stub, fill_username, fill_oauth_scope, call_credentials):
size = 314159
request = messages_pb2.SimpleRequest(
response_type=messages_pb2.COMPRESSABLE, response_size=size,
payload=messages_pb2.Payload(body=b'\x00' * 271828),
fill_username=fill_username, fill_oauth_scope=fill_oauth_scope)
response_future = stub.UnaryCall.future(
request, credentials=call_credentials)
response = response_future.result()
_validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, size)
return response
if response.payload.type is not expected_type:
raise ValueError('expected payload type %s, got %s' %
(expected_type, type(response.payload.type)))
elif len(response.payload.body) != expected_length:
raise ValueError('expected payload body size %d, got %d' %
(expected_length, len(response.payload.body)))
def _large_unary_common_behavior(stub, fill_username, fill_oauth_scope,
call_credentials):
size = 314159
request = messages_pb2.SimpleRequest(
response_type=messages_pb2.COMPRESSABLE,
response_size=size,
payload=messages_pb2.Payload(body=b'\x00' * 271828),
fill_username=fill_username,
fill_oauth_scope=fill_oauth_scope)
response_future = stub.UnaryCall.future(
request, credentials=call_credentials)
response = response_future.result()
_validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, size)
return response
def _empty_unary(stub):
response = stub.EmptyCall(empty_pb2.Empty())
if not isinstance(response, empty_pb2.Empty):
raise TypeError(
'response is of type "%s", not empty_pb2.Empty!', type(response))
response = stub.EmptyCall(empty_pb2.Empty())
if not isinstance(response, empty_pb2.Empty):
raise TypeError('response is of type "%s", not empty_pb2.Empty!',
type(response))
def _large_unary(stub):
_large_unary_common_behavior(stub, False, False, None)
_large_unary_common_behavior(stub, False, False, None)
def _client_streaming(stub):
payload_body_sizes = (27182, 8, 1828, 45904,)
payloads = (
messages_pb2.Payload(body=b'\x00' * size)
for size in payload_body_sizes)
requests = (
messages_pb2.StreamingInputCallRequest(payload=payload)
for payload in payloads)
response = stub.StreamingInputCall(requests)
if response.aggregated_payload_size != 74922:
raise ValueError(
'incorrect size %d!' % response.aggregated_payload_size)
payload_body_sizes = (
27182,
8,
1828,
45904,)
payloads = (messages_pb2.Payload(body=b'\x00' * size)
for size in payload_body_sizes)
requests = (messages_pb2.StreamingInputCallRequest(payload=payload)
for payload in payloads)
response = stub.StreamingInputCall(requests)
if response.aggregated_payload_size != 74922:
raise ValueError('incorrect size %d!' %
response.aggregated_payload_size)
def _server_streaming(stub):
sizes = (31415, 9, 2653, 58979,)
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
response_parameters=(
messages_pb2.ResponseParameters(size=sizes[0]),
messages_pb2.ResponseParameters(size=sizes[1]),
messages_pb2.ResponseParameters(size=sizes[2]),
messages_pb2.ResponseParameters(size=sizes[3]),
)
)
response_iterator = stub.StreamingOutputCall(request)
for index, response in enumerate(response_iterator):
_validate_payload_type_and_length(
response, messages_pb2.COMPRESSABLE, sizes[index])
sizes = (
31415,
9,
2653,
58979,)
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
response_parameters=(
messages_pb2.ResponseParameters(size=sizes[0]),
messages_pb2.ResponseParameters(size=sizes[1]),
messages_pb2.ResponseParameters(size=sizes[2]),
messages_pb2.ResponseParameters(size=sizes[3]),))
response_iterator = stub.StreamingOutputCall(request)
for index, response in enumerate(response_iterator):
_validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE,
sizes[index])
class _Pipe(object):
def __init__(self):
self._condition = threading.Condition()
self._values = []
self._open = True
def __init__(self):
self._condition = threading.Condition()
self._values = []
self._open = True
def __iter__(self):
return self
def __iter__(self):
return self
def __next__(self):
return self.next()
def __next__(self):
return self.next()
def next(self):
with self._condition:
while not self._values and self._open:
self._condition.wait()
if self._values:
return self._values.pop(0)
else:
raise StopIteration()
def next(self):
with self._condition:
while not self._values and self._open:
self._condition.wait()
if self._values:
return self._values.pop(0)
else:
raise StopIteration()
def add(self, value):
with self._condition:
self._values.append(value)
self._condition.notify()
def add(self, value):
with self._condition:
self._values.append(value)
self._condition.notify()
def close(self):
with self._condition:
self._open = False
self._condition.notify()
def close(self):
with self._condition:
self._open = False
self._condition.notify()
def __enter__(self):
return self
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def __exit__(self, type, value, traceback):
self.close()
def _ping_pong(stub):
request_response_sizes = (31415, 9, 2653, 58979,)
request_payload_sizes = (27182, 8, 1828, 45904,)
with _Pipe() as pipe:
response_iterator = stub.FullDuplexCall(pipe)
for response_size, payload_size in zip(
request_response_sizes, request_payload_sizes):
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
response_parameters=(
messages_pb2.ResponseParameters(size=response_size),),
payload=messages_pb2.Payload(body=b'\x00' * payload_size))
pipe.add(request)
response = next(response_iterator)
_validate_payload_type_and_length(
response, messages_pb2.COMPRESSABLE, response_size)
request_response_sizes = (
31415,
9,
2653,
58979,)
request_payload_sizes = (
27182,
8,
1828,
45904,)
with _Pipe() as pipe:
response_iterator = stub.FullDuplexCall(pipe)
for response_size, payload_size in zip(request_response_sizes,
request_payload_sizes):
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
response_parameters=(
messages_pb2.ResponseParameters(size=response_size),),
payload=messages_pb2.Payload(body=b'\x00' * payload_size))
pipe.add(request)
response = next(response_iterator)
_validate_payload_type_and_length(
response, messages_pb2.COMPRESSABLE, response_size)
def _cancel_after_begin(stub):
with _Pipe() as pipe:
response_future = stub.StreamingInputCall.future(pipe)
response_future.cancel()
if not response_future.cancelled():
raise ValueError('expected cancelled method to return True')
if response_future.code() is not grpc.StatusCode.CANCELLED:
raise ValueError('expected status code CANCELLED')
with _Pipe() as pipe:
response_future = stub.StreamingInputCall.future(pipe)
response_future.cancel()
if not response_future.cancelled():
raise ValueError('expected cancelled method to return True')
if response_future.code() is not grpc.StatusCode.CANCELLED:
raise ValueError('expected status code CANCELLED')
def _cancel_after_first_response(stub):
request_response_sizes = (31415, 9, 2653, 58979,)
request_payload_sizes = (27182, 8, 1828, 45904,)
with _Pipe() as pipe:
response_iterator = stub.FullDuplexCall(pipe)
response_size = request_response_sizes[0]
payload_size = request_payload_sizes[0]
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
response_parameters=(
messages_pb2.ResponseParameters(size=response_size),),
payload=messages_pb2.Payload(body=b'\x00' * payload_size))
pipe.add(request)
response = next(response_iterator)
# We test the contents of `response` in the Ping Pong test - don't check
# them here.
response_iterator.cancel()
try:
next(response_iterator)
except grpc.RpcError as rpc_error:
if rpc_error.code() is not grpc.StatusCode.CANCELLED:
raise
else:
raise ValueError('expected call to be cancelled')
request_response_sizes = (
31415,
9,
2653,
58979,)
request_payload_sizes = (
27182,
8,
1828,
45904,)
with _Pipe() as pipe:
response_iterator = stub.FullDuplexCall(pipe)
response_size = request_response_sizes[0]
payload_size = request_payload_sizes[0]
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
response_parameters=(
messages_pb2.ResponseParameters(size=response_size),),
payload=messages_pb2.Payload(body=b'\x00' * payload_size))
pipe.add(request)
response = next(response_iterator)
# We test the contents of `response` in the Ping Pong test - don't check
# them here.
response_iterator.cancel()
try:
next(response_iterator)
except grpc.RpcError as rpc_error:
if rpc_error.code() is not grpc.StatusCode.CANCELLED:
raise
else:
raise ValueError('expected call to be cancelled')
def _timeout_on_sleeping_server(stub):
request_payload_size = 27182
with _Pipe() as pipe:
response_iterator = stub.FullDuplexCall(pipe, timeout=0.001)
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
payload=messages_pb2.Payload(body=b'\x00' * request_payload_size))
pipe.add(request)
try:
next(response_iterator)
except grpc.RpcError as rpc_error:
if rpc_error.code() is not grpc.StatusCode.DEADLINE_EXCEEDED:
raise
else:
raise ValueError('expected call to exceed deadline')
request_payload_size = 27182
with _Pipe() as pipe:
response_iterator = stub.FullDuplexCall(pipe, timeout=0.001)
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
payload=messages_pb2.Payload(body=b'\x00' * request_payload_size))
pipe.add(request)
try:
next(response_iterator)
except grpc.RpcError as rpc_error:
if rpc_error.code() is not grpc.StatusCode.DEADLINE_EXCEEDED:
raise
else:
raise ValueError('expected call to exceed deadline')
def _empty_stream(stub):
with _Pipe() as pipe:
response_iterator = stub.FullDuplexCall(pipe)
pipe.close()
try:
next(response_iterator)
raise ValueError('expected exactly 0 responses')
except StopIteration:
pass
with _Pipe() as pipe:
response_iterator = stub.FullDuplexCall(pipe)
pipe.close()
try:
next(response_iterator)
raise ValueError('expected exactly 0 responses')
except StopIteration:
pass
def _status_code_and_message(stub):
details = 'test status message'
code = 2
status = grpc.StatusCode.UNKNOWN # code = 2
# Test with a UnaryCall
request = messages_pb2.SimpleRequest(
response_type=messages_pb2.COMPRESSABLE,
response_size=1,
payload=messages_pb2.Payload(body=b'\x00'),
response_status=messages_pb2.EchoStatus(code=code, message=details)
)
response_future = stub.UnaryCall.future(request)
_validate_status_code_and_details(response_future, status, details)
# Test with a FullDuplexCall
with _Pipe() as pipe:
response_iterator = stub.FullDuplexCall(pipe)
request = messages_pb2.StreamingOutputCallRequest(
details = 'test status message'
code = 2
status = grpc.StatusCode.UNKNOWN # code = 2
# Test with a UnaryCall
request = messages_pb2.SimpleRequest(
response_type=messages_pb2.COMPRESSABLE,
response_parameters=(
messages_pb2.ResponseParameters(size=1),),
response_size=1,
payload=messages_pb2.Payload(body=b'\x00'),
response_status=messages_pb2.EchoStatus(code=code, message=details))
pipe.add(request) # sends the initial request.
# Dropping out of with block closes the pipe
_validate_status_code_and_details(response_iterator, status, details)
response_status=messages_pb2.EchoStatus(
code=code, message=details))
response_future = stub.UnaryCall.future(request)
_validate_status_code_and_details(response_future, status, details)
# Test with a FullDuplexCall
with _Pipe() as pipe:
response_iterator = stub.FullDuplexCall(pipe)
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
response_parameters=(messages_pb2.ResponseParameters(size=1),),
payload=messages_pb2.Payload(body=b'\x00'),
response_status=messages_pb2.EchoStatus(
code=code, message=details))
pipe.add(request) # sends the initial request.
# Dropping out of with block closes the pipe
_validate_status_code_and_details(response_iterator, status, details)
def _unimplemented_method(test_service_stub):
response_future = (
test_service_stub.UnimplementedCall.future(empty_pb2.Empty()))
_expect_status_code(response_future, grpc.StatusCode.UNIMPLEMENTED)
response_future = (
test_service_stub.UnimplementedCall.future(empty_pb2.Empty()))
_expect_status_code(response_future, grpc.StatusCode.UNIMPLEMENTED)
def _unimplemented_service(unimplemented_service_stub):
response_future = (
unimplemented_service_stub.UnimplementedCall.future(empty_pb2.Empty()))
_expect_status_code(response_future, grpc.StatusCode.UNIMPLEMENTED)
response_future = (
unimplemented_service_stub.UnimplementedCall.future(empty_pb2.Empty()))
_expect_status_code(response_future, grpc.StatusCode.UNIMPLEMENTED)
def _custom_metadata(stub):
initial_metadata_value = "test_initial_metadata_value"
trailing_metadata_value = "\x0a\x0b\x0a\x0b\x0a\x0b"
metadata = (
(_INITIAL_METADATA_KEY, initial_metadata_value),
(_TRAILING_METADATA_KEY, trailing_metadata_value))
def _validate_metadata(response):
initial_metadata = dict(response.initial_metadata())
if initial_metadata[_INITIAL_METADATA_KEY] != initial_metadata_value:
raise ValueError(
'expected initial metadata %s, got %s' % (
initial_metadata_value, initial_metadata[_INITIAL_METADATA_KEY]))
trailing_metadata = dict(response.trailing_metadata())
if trailing_metadata[_TRAILING_METADATA_KEY] != trailing_metadata_value:
raise ValueError(
'expected trailing metadata %s, got %s' % (
trailing_metadata_value, initial_metadata[_TRAILING_METADATA_KEY]))
# Testing with UnaryCall
request = messages_pb2.SimpleRequest(
response_type=messages_pb2.COMPRESSABLE,
response_size=1,
payload=messages_pb2.Payload(body=b'\x00'))
response_future = stub.UnaryCall.future(request, metadata=metadata)
_validate_metadata(response_future)
# Testing with FullDuplexCall
with _Pipe() as pipe:
response_iterator = stub.FullDuplexCall(pipe, metadata=metadata)
request = messages_pb2.StreamingOutputCallRequest(
initial_metadata_value = "test_initial_metadata_value"
trailing_metadata_value = "\x0a\x0b\x0a\x0b\x0a\x0b"
metadata = ((_INITIAL_METADATA_KEY, initial_metadata_value),
(_TRAILING_METADATA_KEY, trailing_metadata_value))
def _validate_metadata(response):
initial_metadata = dict(response.initial_metadata())
if initial_metadata[_INITIAL_METADATA_KEY] != initial_metadata_value:
raise ValueError('expected initial metadata %s, got %s' %
(initial_metadata_value,
initial_metadata[_INITIAL_METADATA_KEY]))
trailing_metadata = dict(response.trailing_metadata())
if trailing_metadata[_TRAILING_METADATA_KEY] != trailing_metadata_value:
raise ValueError('expected trailing metadata %s, got %s' %
(trailing_metadata_value,
initial_metadata[_TRAILING_METADATA_KEY]))
# Testing with UnaryCall
request = messages_pb2.SimpleRequest(
response_type=messages_pb2.COMPRESSABLE,
response_parameters=(
messages_pb2.ResponseParameters(size=1),))
pipe.add(request) # Sends the request
next(response_iterator) # Causes server to send trailing metadata
# Dropping out of the with block closes the pipe
_validate_metadata(response_iterator)
response_size=1,
payload=messages_pb2.Payload(body=b'\x00'))
response_future = stub.UnaryCall.future(request, metadata=metadata)
_validate_metadata(response_future)
# Testing with FullDuplexCall
with _Pipe() as pipe:
response_iterator = stub.FullDuplexCall(pipe, metadata=metadata)
request = messages_pb2.StreamingOutputCallRequest(
response_type=messages_pb2.COMPRESSABLE,
response_parameters=(messages_pb2.ResponseParameters(size=1),))
pipe.add(request) # Sends the request
next(response_iterator) # Causes server to send trailing metadata
# Dropping out of the with block closes the pipe
_validate_metadata(response_iterator)
def _compute_engine_creds(stub, args):
response = _large_unary_common_behavior(stub, True, True, None)
if args.default_service_account != response.username:
raise ValueError(
'expected username %s, got %s' % (
args.default_service_account, response.username))
response = _large_unary_common_behavior(stub, True, True, None)
if args.default_service_account != response.username:
raise ValueError('expected username %s, got %s' %
(args.default_service_account, response.username))
def _oauth2_auth_token(stub, args):
json_key_filename = os.environ[
oauth2client_client.GOOGLE_APPLICATION_CREDENTIALS]
wanted_email = json.load(open(json_key_filename, 'rb'))['client_email']
response = _large_unary_common_behavior(stub, True, True, None)
if wanted_email != response.username:
raise ValueError(
'expected username %s, got %s' % (wanted_email, response.username))
if args.oauth_scope.find(response.oauth_scope) == -1:
raise ValueError(
'expected to find oauth scope "{}" in received "{}"'.format(
response.oauth_scope, args.oauth_scope))
json_key_filename = os.environ[
oauth2client_client.GOOGLE_APPLICATION_CREDENTIALS]
wanted_email = json.load(open(json_key_filename, 'rb'))['client_email']
response = _large_unary_common_behavior(stub, True, True, None)
if wanted_email != response.username:
raise ValueError('expected username %s, got %s' %
(wanted_email, response.username))
if args.oauth_scope.find(response.oauth_scope) == -1:
raise ValueError('expected to find oauth scope "{}" in received "{}"'.
format(response.oauth_scope, args.oauth_scope))
def _jwt_token_creds(stub, args):
json_key_filename = os.environ[
oauth2client_client.GOOGLE_APPLICATION_CREDENTIALS]
wanted_email = json.load(open(json_key_filename, 'rb'))['client_email']
response = _large_unary_common_behavior(stub, True, False, None)
if wanted_email != response.username:
raise ValueError(
'expected username %s, got %s' % (wanted_email, response.username))
json_key_filename = os.environ[
oauth2client_client.GOOGLE_APPLICATION_CREDENTIALS]
wanted_email = json.load(open(json_key_filename, 'rb'))['client_email']
response = _large_unary_common_behavior(stub, True, False, None)
if wanted_email != response.username:
raise ValueError('expected username %s, got %s' %
(wanted_email, response.username))
def _per_rpc_creds(stub, args):
json_key_filename = os.environ[
oauth2client_client.GOOGLE_APPLICATION_CREDENTIALS]
wanted_email = json.load(open(json_key_filename, 'rb'))['client_email']
credentials = oauth2client_client.GoogleCredentials.get_application_default()
scoped_credentials = credentials.create_scoped([args.oauth_scope])
# TODO(https://github.com/grpc/grpc/issues/6799): Eliminate this last
# remaining use of the Beta API.
call_credentials = implementations.google_call_credentials(
scoped_credentials)
response = _large_unary_common_behavior(stub, True, False, call_credentials)
if wanted_email != response.username:
raise ValueError(
'expected username %s, got %s' % (wanted_email, response.username))
json_key_filename = os.environ[
oauth2client_client.GOOGLE_APPLICATION_CREDENTIALS]
wanted_email = json.load(open(json_key_filename, 'rb'))['client_email']
credentials = oauth2client_client.GoogleCredentials.get_application_default(
)
scoped_credentials = credentials.create_scoped([args.oauth_scope])
# TODO(https://github.com/grpc/grpc/issues/6799): Eliminate this last
# remaining use of the Beta API.
call_credentials = implementations.google_call_credentials(
scoped_credentials)
response = _large_unary_common_behavior(stub, True, False, call_credentials)
if wanted_email != response.username:
raise ValueError('expected username %s, got %s' %
(wanted_email, response.username))
@enum.unique
class TestCase(enum.Enum):
EMPTY_UNARY = 'empty_unary'
LARGE_UNARY = 'large_unary'
SERVER_STREAMING = 'server_streaming'
CLIENT_STREAMING = 'client_streaming'
PING_PONG = 'ping_pong'
CANCEL_AFTER_BEGIN = 'cancel_after_begin'
CANCEL_AFTER_FIRST_RESPONSE = 'cancel_after_first_response'
EMPTY_STREAM = 'empty_stream'
STATUS_CODE_AND_MESSAGE = 'status_code_and_message'
UNIMPLEMENTED_METHOD = 'unimplemented_method'
UNIMPLEMENTED_SERVICE = 'unimplemented_service'
CUSTOM_METADATA = "custom_metadata"
COMPUTE_ENGINE_CREDS = 'compute_engine_creds'
OAUTH2_AUTH_TOKEN = 'oauth2_auth_token'
JWT_TOKEN_CREDS = 'jwt_token_creds'
PER_RPC_CREDS = 'per_rpc_creds'
TIMEOUT_ON_SLEEPING_SERVER = 'timeout_on_sleeping_server'
def test_interoperability(self, stub, args):
if self is TestCase.EMPTY_UNARY:
_empty_unary(stub)
elif self is TestCase.LARGE_UNARY:
_large_unary(stub)
elif self is TestCase.SERVER_STREAMING:
_server_streaming(stub)
elif self is TestCase.CLIENT_STREAMING:
_client_streaming(stub)
elif self is TestCase.PING_PONG:
_ping_pong(stub)
elif self is TestCase.CANCEL_AFTER_BEGIN:
_cancel_after_begin(stub)
elif self is TestCase.CANCEL_AFTER_FIRST_RESPONSE:
_cancel_after_first_response(stub)
elif self is TestCase.TIMEOUT_ON_SLEEPING_SERVER:
_timeout_on_sleeping_server(stub)
elif self is TestCase.EMPTY_STREAM:
_empty_stream(stub)
elif self is TestCase.STATUS_CODE_AND_MESSAGE:
_status_code_and_message(stub)
elif self is TestCase.UNIMPLEMENTED_METHOD:
_unimplemented_method(stub)
elif self is TestCase.UNIMPLEMENTED_SERVICE:
_unimplemented_service(stub)
elif self is TestCase.CUSTOM_METADATA:
_custom_metadata(stub)
elif self is TestCase.COMPUTE_ENGINE_CREDS:
_compute_engine_creds(stub, args)
elif self is TestCase.OAUTH2_AUTH_TOKEN:
_oauth2_auth_token(stub, args)
elif self is TestCase.JWT_TOKEN_CREDS:
_jwt_token_creds(stub, args)
elif self is TestCase.PER_RPC_CREDS:
_per_rpc_creds(stub, args)
else:
raise NotImplementedError('Test case "%s" not implemented!' % self.name)
EMPTY_UNARY = 'empty_unary'
LARGE_UNARY = 'large_unary'
SERVER_STREAMING = 'server_streaming'
CLIENT_STREAMING = 'client_streaming'
PING_PONG = 'ping_pong'
CANCEL_AFTER_BEGIN = 'cancel_after_begin'
CANCEL_AFTER_FIRST_RESPONSE = 'cancel_after_first_response'
EMPTY_STREAM = 'empty_stream'
STATUS_CODE_AND_MESSAGE = 'status_code_and_message'
UNIMPLEMENTED_METHOD = 'unimplemented_method'
UNIMPLEMENTED_SERVICE = 'unimplemented_service'
CUSTOM_METADATA = "custom_metadata"
COMPUTE_ENGINE_CREDS = 'compute_engine_creds'
OAUTH2_AUTH_TOKEN = 'oauth2_auth_token'
JWT_TOKEN_CREDS = 'jwt_token_creds'
PER_RPC_CREDS = 'per_rpc_creds'
TIMEOUT_ON_SLEEPING_SERVER = 'timeout_on_sleeping_server'
def test_interoperability(self, stub, args):
if self is TestCase.EMPTY_UNARY:
_empty_unary(stub)
elif self is TestCase.LARGE_UNARY:
_large_unary(stub)
elif self is TestCase.SERVER_STREAMING:
_server_streaming(stub)
elif self is TestCase.CLIENT_STREAMING:
_client_streaming(stub)
elif self is TestCase.PING_PONG:
_ping_pong(stub)
elif self is TestCase.CANCEL_AFTER_BEGIN:
_cancel_after_begin(stub)
elif self is TestCase.CANCEL_AFTER_FIRST_RESPONSE:
_cancel_after_first_response(stub)
elif self is TestCase.TIMEOUT_ON_SLEEPING_SERVER:
_timeout_on_sleeping_server(stub)
elif self is TestCase.EMPTY_STREAM:
_empty_stream(stub)
elif self is TestCase.STATUS_CODE_AND_MESSAGE:
_status_code_and_message(stub)
elif self is TestCase.UNIMPLEMENTED_METHOD:
_unimplemented_method(stub)
elif self is TestCase.UNIMPLEMENTED_SERVICE:
_unimplemented_service(stub)
elif self is TestCase.CUSTOM_METADATA:
_custom_metadata(stub)
elif self is TestCase.COMPUTE_ENGINE_CREDS:
_compute_engine_creds(stub, args)
elif self is TestCase.OAUTH2_AUTH_TOKEN:
_oauth2_auth_token(stub, args)
elif self is TestCase.JWT_TOKEN_CREDS:
_jwt_token_creds(stub, args)
elif self is TestCase.PER_RPC_CREDS:
_per_rpc_creds(stub, args)
else:
raise NotImplementedError('Test case "%s" not implemented!' %
self.name)

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Constants and functions for data used in interoperability testing."""
import argparse
@ -40,22 +39,22 @@ _CERTIFICATE_CHAIN_RESOURCE_PATH = 'credentials/server1.pem'
def test_root_certificates():
return pkg_resources.resource_string(
__name__, _ROOT_CERTIFICATES_RESOURCE_PATH)
return pkg_resources.resource_string(__name__,
_ROOT_CERTIFICATES_RESOURCE_PATH)
def private_key():
return pkg_resources.resource_string(__name__, _PRIVATE_KEY_RESOURCE_PATH)
return pkg_resources.resource_string(__name__, _PRIVATE_KEY_RESOURCE_PATH)
def certificate_chain():
return pkg_resources.resource_string(
__name__, _CERTIFICATE_CHAIN_RESOURCE_PATH)
return pkg_resources.resource_string(__name__,
_CERTIFICATE_CHAIN_RESOURCE_PATH)
def parse_bool(value):
if value == 'true':
return True
if value == 'false':
return False
raise argparse.ArgumentTypeError('Only true/false allowed')
if value == 'true':
return True
if value == 'false':
return False
raise argparse.ArgumentTypeError('Only true/false allowed')

@ -26,7 +26,6 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""The Python implementation of the GRPC interoperability test server."""
import argparse
@ -44,34 +43,36 @@ _ONE_DAY_IN_SECONDS = 60 * 60 * 24
def serve():
parser = argparse.ArgumentParser()
parser.add_argument(
'--port', help='the port on which to serve', type=int)
parser.add_argument(
'--use_tls', help='require a secure connection',
default=False, type=resources.parse_bool)
args = parser.parse_args()
parser = argparse.ArgumentParser()
parser.add_argument('--port', help='the port on which to serve', type=int)
parser.add_argument(
'--use_tls',
help='require a secure connection',
default=False,
type=resources.parse_bool)
args = parser.parse_args()
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
test_pb2.add_TestServiceServicer_to_server(methods.TestService(), server)
if args.use_tls:
private_key = resources.private_key()
certificate_chain = resources.certificate_chain()
credentials = grpc.ssl_server_credentials((
(private_key, certificate_chain),))
server.add_secure_port('[::]:{}'.format(args.port), credentials)
else:
server.add_insecure_port('[::]:{}'.format(args.port))
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
test_pb2.add_TestServiceServicer_to_server(methods.TestService(), server)
if args.use_tls:
private_key = resources.private_key()
certificate_chain = resources.certificate_chain()
credentials = grpc.ssl_server_credentials(
((private_key, certificate_chain),))
server.add_secure_port('[::]:{}'.format(args.port), credentials)
else:
server.add_insecure_port('[::]:{}'.format(args.port))
server.start()
logging.info('Server serving.')
try:
while True:
time.sleep(_ONE_DAY_IN_SECONDS)
except BaseException as e:
logging.info('Caught exception "%s"; stopping server...', e)
server.stop(None)
logging.info('Server stopped; exiting.')
server.start()
logging.info('Server serving.')
try:
while True:
time.sleep(_ONE_DAY_IN_SECONDS)
except BaseException as e:
logging.info('Caught exception "%s"; stopping server...', e)
server.stop(None)
logging.info('Server stopped; exiting.')
if __name__ == '__main__':
serve()
serve()

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -58,436 +58,440 @@ ADD_SERVICER_TO_SERVER_IDENTIFIER = 'add_TestServiceServicer_to_server'
class _ServicerMethods(object):
def __init__(self):
self._condition = threading.Condition()
self._paused = False
self._fail = False
@contextlib.contextmanager
def pause(self): # pylint: disable=invalid-name
with self._condition:
self._paused = True
yield
with self._condition:
self._paused = False
self._condition.notify_all()
@contextlib.contextmanager
def fail(self): # pylint: disable=invalid-name
with self._condition:
self._fail = True
yield
with self._condition:
self._fail = False
def _control(self): # pylint: disable=invalid-name
with self._condition:
if self._fail:
raise ValueError()
while self._paused:
self._condition.wait()
def UnaryCall(self, request, unused_rpc_context):
response = response_pb2.SimpleResponse()
response.payload.payload_type = payload_pb2.COMPRESSABLE
response.payload.payload_compressable = 'a' * request.response_size
self._control()
return response
def StreamingOutputCall(self, request, unused_rpc_context):
for parameter in request.response_parameters:
response = response_pb2.StreamingOutputCallResponse()
response.payload.payload_type = payload_pb2.COMPRESSABLE
response.payload.payload_compressable = 'a' * parameter.size
self._control()
yield response
def StreamingInputCall(self, request_iter, unused_rpc_context):
response = response_pb2.StreamingInputCallResponse()
aggregated_payload_size = 0
for request in request_iter:
aggregated_payload_size += len(request.payload.payload_compressable)
response.aggregated_payload_size = aggregated_payload_size
self._control()
return response
def FullDuplexCall(self, request_iter, unused_rpc_context):
for request in request_iter:
for parameter in request.response_parameters:
response = response_pb2.StreamingOutputCallResponse()
response.payload.payload_type = payload_pb2.COMPRESSABLE
response.payload.payload_compressable = 'a' * parameter.size
self._control()
yield response
def __init__(self):
self._condition = threading.Condition()
self._paused = False
self._fail = False
@contextlib.contextmanager
def pause(self): # pylint: disable=invalid-name
with self._condition:
self._paused = True
yield
with self._condition:
self._paused = False
self._condition.notify_all()
def HalfDuplexCall(self, request_iter, unused_rpc_context):
responses = []
for request in request_iter:
for parameter in request.response_parameters:
response = response_pb2.StreamingOutputCallResponse()
@contextlib.contextmanager
def fail(self): # pylint: disable=invalid-name
with self._condition:
self._fail = True
yield
with self._condition:
self._fail = False
def _control(self): # pylint: disable=invalid-name
with self._condition:
if self._fail:
raise ValueError()
while self._paused:
self._condition.wait()
def UnaryCall(self, request, unused_rpc_context):
response = response_pb2.SimpleResponse()
response.payload.payload_type = payload_pb2.COMPRESSABLE
response.payload.payload_compressable = 'a' * parameter.size
response.payload.payload_compressable = 'a' * request.response_size
self._control()
return response
def StreamingOutputCall(self, request, unused_rpc_context):
for parameter in request.response_parameters:
response = response_pb2.StreamingOutputCallResponse()
response.payload.payload_type = payload_pb2.COMPRESSABLE
response.payload.payload_compressable = 'a' * parameter.size
self._control()
yield response
def StreamingInputCall(self, request_iter, unused_rpc_context):
response = response_pb2.StreamingInputCallResponse()
aggregated_payload_size = 0
for request in request_iter:
aggregated_payload_size += len(request.payload.payload_compressable)
response.aggregated_payload_size = aggregated_payload_size
self._control()
responses.append(response)
for response in responses:
yield response
return response
def FullDuplexCall(self, request_iter, unused_rpc_context):
for request in request_iter:
for parameter in request.response_parameters:
response = response_pb2.StreamingOutputCallResponse()
response.payload.payload_type = payload_pb2.COMPRESSABLE
response.payload.payload_compressable = 'a' * parameter.size
self._control()
yield response
def HalfDuplexCall(self, request_iter, unused_rpc_context):
responses = []
for request in request_iter:
for parameter in request.response_parameters:
response = response_pb2.StreamingOutputCallResponse()
response.payload.payload_type = payload_pb2.COMPRESSABLE
response.payload.payload_compressable = 'a' * parameter.size
self._control()
responses.append(response)
for response in responses:
yield response
class _Service(
collections.namedtuple(
'_Service', ('servicer_methods', 'server', 'stub',))):
"""A live and running service.
collections.namedtuple('_Service', (
'servicer_methods',
'server',
'stub',))):
"""A live and running service.
Attributes:
servicer_methods: The _ServicerMethods servicing RPCs.
server: The grpc.Server servicing RPCs.
stub: A stub on which to invoke RPCs.
"""
def _CreateService():
"""Provides a servicer backend and a stub.
"""Provides a servicer backend and a stub.
Returns:
A _Service with which to test RPCs.
"""
servicer_methods = _ServicerMethods()
servicer_methods = _ServicerMethods()
class Servicer(getattr(service_pb2, SERVICER_IDENTIFIER)):
class Servicer(getattr(service_pb2, SERVICER_IDENTIFIER)):
def UnaryCall(self, request, context):
return servicer_methods.UnaryCall(request, context)
def UnaryCall(self, request, context):
return servicer_methods.UnaryCall(request, context)
def StreamingOutputCall(self, request, context):
return servicer_methods.StreamingOutputCall(request, context)
def StreamingOutputCall(self, request, context):
return servicer_methods.StreamingOutputCall(request, context)
def StreamingInputCall(self, request_iter, context):
return servicer_methods.StreamingInputCall(request_iter, context)
def StreamingInputCall(self, request_iter, context):
return servicer_methods.StreamingInputCall(request_iter, context)
def FullDuplexCall(self, request_iter, context):
return servicer_methods.FullDuplexCall(request_iter, context)
def FullDuplexCall(self, request_iter, context):
return servicer_methods.FullDuplexCall(request_iter, context)
def HalfDuplexCall(self, request_iter, context):
return servicer_methods.HalfDuplexCall(request_iter, context)
def HalfDuplexCall(self, request_iter, context):
return servicer_methods.HalfDuplexCall(request_iter, context)
server = grpc.server(
futures.ThreadPoolExecutor(max_workers=test_constants.POOL_SIZE))
getattr(service_pb2, ADD_SERVICER_TO_SERVER_IDENTIFIER)(Servicer(), server)
port = server.add_insecure_port('[::]:0')
server.start()
channel = grpc.insecure_channel('localhost:{}'.format(port))
stub = getattr(service_pb2, STUB_IDENTIFIER)(channel)
return _Service(servicer_methods, server, stub)
server = grpc.server(
futures.ThreadPoolExecutor(max_workers=test_constants.POOL_SIZE))
getattr(service_pb2, ADD_SERVICER_TO_SERVER_IDENTIFIER)(Servicer(), server)
port = server.add_insecure_port('[::]:0')
server.start()
channel = grpc.insecure_channel('localhost:{}'.format(port))
stub = getattr(service_pb2, STUB_IDENTIFIER)(channel)
return _Service(servicer_methods, server, stub)
def _CreateIncompleteService():
"""Provides a servicer backend that fails to implement methods and its stub.
"""Provides a servicer backend that fails to implement methods and its stub.
Returns:
A _Service with which to test RPCs. The returned _Service's
servicer_methods implements none of the methods required of it.
"""
class Servicer(getattr(service_pb2, SERVICER_IDENTIFIER)):
pass
class Servicer(getattr(service_pb2, SERVICER_IDENTIFIER)):
pass
server = grpc.server(
futures.ThreadPoolExecutor(max_workers=test_constants.POOL_SIZE))
getattr(service_pb2, ADD_SERVICER_TO_SERVER_IDENTIFIER)(Servicer(), server)
port = server.add_insecure_port('[::]:0')
server.start()
channel = grpc.insecure_channel('localhost:{}'.format(port))
stub = getattr(service_pb2, STUB_IDENTIFIER)(channel)
return _Service(None, server, stub)
server = grpc.server(
futures.ThreadPoolExecutor(max_workers=test_constants.POOL_SIZE))
getattr(service_pb2, ADD_SERVICER_TO_SERVER_IDENTIFIER)(Servicer(), server)
port = server.add_insecure_port('[::]:0')
server.start()
channel = grpc.insecure_channel('localhost:{}'.format(port))
stub = getattr(service_pb2, STUB_IDENTIFIER)(channel)
return _Service(None, server, stub)
def _streaming_input_request_iterator():
for _ in range(3):
request = request_pb2.StreamingInputCallRequest()
request.payload.payload_type = payload_pb2.COMPRESSABLE
request.payload.payload_compressable = 'a'
yield request
for _ in range(3):
request = request_pb2.StreamingInputCallRequest()
request.payload.payload_type = payload_pb2.COMPRESSABLE
request.payload.payload_compressable = 'a'
yield request
def _streaming_output_request():
request = request_pb2.StreamingOutputCallRequest()
sizes = [1, 2, 3]
request.response_parameters.add(size=sizes[0], interval_us=0)
request.response_parameters.add(size=sizes[1], interval_us=0)
request.response_parameters.add(size=sizes[2], interval_us=0)
return request
request = request_pb2.StreamingOutputCallRequest()
sizes = [1, 2, 3]
request.response_parameters.add(size=sizes[0], interval_us=0)
request.response_parameters.add(size=sizes[1], interval_us=0)
request.response_parameters.add(size=sizes[2], interval_us=0)
return request
def _full_duplex_request_iterator():
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=1, interval_us=0)
yield request
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=2, interval_us=0)
request.response_parameters.add(size=3, interval_us=0)
yield request
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=1, interval_us=0)
yield request
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=2, interval_us=0)
request.response_parameters.add(size=3, interval_us=0)
yield request
class PythonPluginTest(unittest.TestCase):
"""Test case for the gRPC Python protoc-plugin.
"""Test case for the gRPC Python protoc-plugin.
While reading these tests, remember that the futures API
(`stub.method.future()`) only gives futures for the *response-unary*
methods and does not exist for response-streaming methods.
"""
def testImportAttributes(self):
# check that we can access the generated module and its members.
self.assertIsNotNone(
getattr(service_pb2, STUB_IDENTIFIER, None))
self.assertIsNotNone(
getattr(service_pb2, SERVICER_IDENTIFIER, None))
self.assertIsNotNone(
getattr(service_pb2, ADD_SERVICER_TO_SERVER_IDENTIFIER, None))
def testUpDown(self):
service = _CreateService()
self.assertIsNotNone(service.servicer_methods)
self.assertIsNotNone(service.server)
self.assertIsNotNone(service.stub)
def testIncompleteServicer(self):
service = _CreateIncompleteService()
request = request_pb2.SimpleRequest(response_size=13)
with self.assertRaises(grpc.RpcError) as exception_context:
service.stub.UnaryCall(request)
self.assertIs(
exception_context.exception.code(), grpc.StatusCode.UNIMPLEMENTED)
def testUnaryCall(self):
service = _CreateService()
request = request_pb2.SimpleRequest(response_size=13)
response = service.stub.UnaryCall(request)
expected_response = service.servicer_methods.UnaryCall(
request, 'not a real context!')
self.assertEqual(expected_response, response)
def testUnaryCallFuture(self):
service = _CreateService()
request = request_pb2.SimpleRequest(response_size=13)
# Check that the call does not block waiting for the server to respond.
with service.servicer_methods.pause():
response_future = service.stub.UnaryCall.future(request)
response = response_future.result()
expected_response = service.servicer_methods.UnaryCall(
request, 'not a real RpcContext!')
self.assertEqual(expected_response, response)
def testUnaryCallFutureExpired(self):
service = _CreateService()
request = request_pb2.SimpleRequest(response_size=13)
with service.servicer_methods.pause():
response_future = service.stub.UnaryCall.future(
request, timeout=test_constants.SHORT_TIMEOUT)
with self.assertRaises(grpc.RpcError) as exception_context:
response_future.result()
self.assertIs(
exception_context.exception.code(), grpc.StatusCode.DEADLINE_EXCEEDED)
self.assertIs(response_future.code(), grpc.StatusCode.DEADLINE_EXCEEDED)
def testUnaryCallFutureCancelled(self):
service = _CreateService()
request = request_pb2.SimpleRequest(response_size=13)
with service.servicer_methods.pause():
response_future = service.stub.UnaryCall.future(request)
response_future.cancel()
self.assertTrue(response_future.cancelled())
self.assertIs(response_future.code(), grpc.StatusCode.CANCELLED)
def testUnaryCallFutureFailed(self):
service = _CreateService()
request = request_pb2.SimpleRequest(response_size=13)
with service.servicer_methods.fail():
response_future = service.stub.UnaryCall.future(request)
self.assertIsNotNone(response_future.exception())
self.assertIs(response_future.code(), grpc.StatusCode.UNKNOWN)
def testStreamingOutputCall(self):
service = _CreateService()
request = _streaming_output_request()
responses = service.stub.StreamingOutputCall(request)
expected_responses = service.servicer_methods.StreamingOutputCall(
request, 'not a real RpcContext!')
for expected_response, response in moves.zip_longest(
expected_responses, responses):
self.assertEqual(expected_response, response)
def testStreamingOutputCallExpired(self):
service = _CreateService()
request = _streaming_output_request()
with service.servicer_methods.pause():
responses = service.stub.StreamingOutputCall(
request, timeout=test_constants.SHORT_TIMEOUT)
with self.assertRaises(grpc.RpcError) as exception_context:
list(responses)
self.assertIs(
exception_context.exception.code(), grpc.StatusCode.DEADLINE_EXCEEDED)
def testStreamingOutputCallCancelled(self):
service = _CreateService()
request = _streaming_output_request()
responses = service.stub.StreamingOutputCall(request)
next(responses)
responses.cancel()
with self.assertRaises(grpc.RpcError) as exception_context:
next(responses)
self.assertIs(responses.code(), grpc.StatusCode.CANCELLED)
def testStreamingOutputCallFailed(self):
service = _CreateService()
request = _streaming_output_request()
with service.servicer_methods.fail():
responses = service.stub.StreamingOutputCall(request)
self.assertIsNotNone(responses)
with self.assertRaises(grpc.RpcError) as exception_context:
next(responses)
self.assertIs(exception_context.exception.code(), grpc.StatusCode.UNKNOWN)
def testStreamingInputCall(self):
service = _CreateService()
response = service.stub.StreamingInputCall(
_streaming_input_request_iterator())
expected_response = service.servicer_methods.StreamingInputCall(
_streaming_input_request_iterator(),
'not a real RpcContext!')
self.assertEqual(expected_response, response)
def testStreamingInputCallFuture(self):
service = _CreateService()
with service.servicer_methods.pause():
response_future = service.stub.StreamingInputCall.future(
_streaming_input_request_iterator())
response = response_future.result()
expected_response = service.servicer_methods.StreamingInputCall(
_streaming_input_request_iterator(),
'not a real RpcContext!')
self.assertEqual(expected_response, response)
def testStreamingInputCallFutureExpired(self):
service = _CreateService()
with service.servicer_methods.pause():
response_future = service.stub.StreamingInputCall.future(
_streaming_input_request_iterator(),
timeout=test_constants.SHORT_TIMEOUT)
with self.assertRaises(grpc.RpcError) as exception_context:
response_future.result()
self.assertIsInstance(response_future.exception(), grpc.RpcError)
self.assertIs(
response_future.exception().code(), grpc.StatusCode.DEADLINE_EXCEEDED)
self.assertIs(
exception_context.exception.code(), grpc.StatusCode.DEADLINE_EXCEEDED)
def testStreamingInputCallFutureCancelled(self):
service = _CreateService()
with service.servicer_methods.pause():
response_future = service.stub.StreamingInputCall.future(
_streaming_input_request_iterator())
response_future.cancel()
self.assertTrue(response_future.cancelled())
with self.assertRaises(grpc.FutureCancelledError):
response_future.result()
def testStreamingInputCallFutureFailed(self):
service = _CreateService()
with service.servicer_methods.fail():
response_future = service.stub.StreamingInputCall.future(
_streaming_input_request_iterator())
self.assertIsNotNone(response_future.exception())
self.assertIs(response_future.code(), grpc.StatusCode.UNKNOWN)
def testFullDuplexCall(self):
service = _CreateService()
responses = service.stub.FullDuplexCall(
_full_duplex_request_iterator())
expected_responses = service.servicer_methods.FullDuplexCall(
_full_duplex_request_iterator(),
'not a real RpcContext!')
for expected_response, response in moves.zip_longest(
expected_responses, responses):
self.assertEqual(expected_response, response)
def testFullDuplexCallExpired(self):
request_iterator = _full_duplex_request_iterator()
service = _CreateService()
with service.servicer_methods.pause():
responses = service.stub.FullDuplexCall(
request_iterator, timeout=test_constants.SHORT_TIMEOUT)
with self.assertRaises(grpc.RpcError) as exception_context:
list(responses)
self.assertIs(
exception_context.exception.code(), grpc.StatusCode.DEADLINE_EXCEEDED)
def testFullDuplexCallCancelled(self):
service = _CreateService()
request_iterator = _full_duplex_request_iterator()
responses = service.stub.FullDuplexCall(request_iterator)
next(responses)
responses.cancel()
with self.assertRaises(grpc.RpcError) as exception_context:
next(responses)
self.assertIs(
exception_context.exception.code(), grpc.StatusCode.CANCELLED)
def testFullDuplexCallFailed(self):
request_iterator = _full_duplex_request_iterator()
service = _CreateService()
with service.servicer_methods.fail():
responses = service.stub.FullDuplexCall(request_iterator)
with self.assertRaises(grpc.RpcError) as exception_context:
def testImportAttributes(self):
# check that we can access the generated module and its members.
self.assertIsNotNone(getattr(service_pb2, STUB_IDENTIFIER, None))
self.assertIsNotNone(getattr(service_pb2, SERVICER_IDENTIFIER, None))
self.assertIsNotNone(
getattr(service_pb2, ADD_SERVICER_TO_SERVER_IDENTIFIER, None))
def testUpDown(self):
service = _CreateService()
self.assertIsNotNone(service.servicer_methods)
self.assertIsNotNone(service.server)
self.assertIsNotNone(service.stub)
def testIncompleteServicer(self):
service = _CreateIncompleteService()
request = request_pb2.SimpleRequest(response_size=13)
with self.assertRaises(grpc.RpcError) as exception_context:
service.stub.UnaryCall(request)
self.assertIs(exception_context.exception.code(),
grpc.StatusCode.UNIMPLEMENTED)
def testUnaryCall(self):
service = _CreateService()
request = request_pb2.SimpleRequest(response_size=13)
response = service.stub.UnaryCall(request)
expected_response = service.servicer_methods.UnaryCall(
request, 'not a real context!')
self.assertEqual(expected_response, response)
def testUnaryCallFuture(self):
service = _CreateService()
request = request_pb2.SimpleRequest(response_size=13)
# Check that the call does not block waiting for the server to respond.
with service.servicer_methods.pause():
response_future = service.stub.UnaryCall.future(request)
response = response_future.result()
expected_response = service.servicer_methods.UnaryCall(
request, 'not a real RpcContext!')
self.assertEqual(expected_response, response)
def testUnaryCallFutureExpired(self):
service = _CreateService()
request = request_pb2.SimpleRequest(response_size=13)
with service.servicer_methods.pause():
response_future = service.stub.UnaryCall.future(
request, timeout=test_constants.SHORT_TIMEOUT)
with self.assertRaises(grpc.RpcError) as exception_context:
response_future.result()
self.assertIs(exception_context.exception.code(),
grpc.StatusCode.DEADLINE_EXCEEDED)
self.assertIs(response_future.code(), grpc.StatusCode.DEADLINE_EXCEEDED)
def testUnaryCallFutureCancelled(self):
service = _CreateService()
request = request_pb2.SimpleRequest(response_size=13)
with service.servicer_methods.pause():
response_future = service.stub.UnaryCall.future(request)
response_future.cancel()
self.assertTrue(response_future.cancelled())
self.assertIs(response_future.code(), grpc.StatusCode.CANCELLED)
def testUnaryCallFutureFailed(self):
service = _CreateService()
request = request_pb2.SimpleRequest(response_size=13)
with service.servicer_methods.fail():
response_future = service.stub.UnaryCall.future(request)
self.assertIsNotNone(response_future.exception())
self.assertIs(response_future.code(), grpc.StatusCode.UNKNOWN)
def testStreamingOutputCall(self):
service = _CreateService()
request = _streaming_output_request()
responses = service.stub.StreamingOutputCall(request)
expected_responses = service.servicer_methods.StreamingOutputCall(
request, 'not a real RpcContext!')
for expected_response, response in moves.zip_longest(expected_responses,
responses):
self.assertEqual(expected_response, response)
def testStreamingOutputCallExpired(self):
service = _CreateService()
request = _streaming_output_request()
with service.servicer_methods.pause():
responses = service.stub.StreamingOutputCall(
request, timeout=test_constants.SHORT_TIMEOUT)
with self.assertRaises(grpc.RpcError) as exception_context:
list(responses)
self.assertIs(exception_context.exception.code(),
grpc.StatusCode.DEADLINE_EXCEEDED)
def testStreamingOutputCallCancelled(self):
service = _CreateService()
request = _streaming_output_request()
responses = service.stub.StreamingOutputCall(request)
next(responses)
self.assertIs(exception_context.exception.code(), grpc.StatusCode.UNKNOWN)
def testHalfDuplexCall(self):
service = _CreateService()
def half_duplex_request_iterator():
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=1, interval_us=0)
yield request
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=2, interval_us=0)
request.response_parameters.add(size=3, interval_us=0)
yield request
responses = service.stub.HalfDuplexCall(half_duplex_request_iterator())
expected_responses = service.servicer_methods.HalfDuplexCall(
half_duplex_request_iterator(), 'not a real RpcContext!')
for expected_response, response in moves.zip_longest(
expected_responses, responses):
self.assertEqual(expected_response, response)
def testHalfDuplexCallWedged(self):
condition = threading.Condition()
wait_cell = [False]
@contextlib.contextmanager
def wait(): # pylint: disable=invalid-name
# Where's Python 3's 'nonlocal' statement when you need it?
with condition:
wait_cell[0] = True
yield
with condition:
wait_cell[0] = False
condition.notify_all()
def half_duplex_request_iterator():
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=1, interval_us=0)
yield request
with condition:
while wait_cell[0]:
condition.wait()
service = _CreateService()
with wait():
responses = service.stub.HalfDuplexCall(
half_duplex_request_iterator(), timeout=test_constants.SHORT_TIMEOUT)
# half-duplex waits for the client to send all info
with self.assertRaises(grpc.RpcError) as exception_context:
responses.cancel()
with self.assertRaises(grpc.RpcError) as exception_context:
next(responses)
self.assertIs(responses.code(), grpc.StatusCode.CANCELLED)
def testStreamingOutputCallFailed(self):
service = _CreateService()
request = _streaming_output_request()
with service.servicer_methods.fail():
responses = service.stub.StreamingOutputCall(request)
self.assertIsNotNone(responses)
with self.assertRaises(grpc.RpcError) as exception_context:
next(responses)
self.assertIs(exception_context.exception.code(),
grpc.StatusCode.UNKNOWN)
def testStreamingInputCall(self):
service = _CreateService()
response = service.stub.StreamingInputCall(
_streaming_input_request_iterator())
expected_response = service.servicer_methods.StreamingInputCall(
_streaming_input_request_iterator(), 'not a real RpcContext!')
self.assertEqual(expected_response, response)
def testStreamingInputCallFuture(self):
service = _CreateService()
with service.servicer_methods.pause():
response_future = service.stub.StreamingInputCall.future(
_streaming_input_request_iterator())
response = response_future.result()
expected_response = service.servicer_methods.StreamingInputCall(
_streaming_input_request_iterator(), 'not a real RpcContext!')
self.assertEqual(expected_response, response)
def testStreamingInputCallFutureExpired(self):
service = _CreateService()
with service.servicer_methods.pause():
response_future = service.stub.StreamingInputCall.future(
_streaming_input_request_iterator(),
timeout=test_constants.SHORT_TIMEOUT)
with self.assertRaises(grpc.RpcError) as exception_context:
response_future.result()
self.assertIsInstance(response_future.exception(), grpc.RpcError)
self.assertIs(response_future.exception().code(),
grpc.StatusCode.DEADLINE_EXCEEDED)
self.assertIs(exception_context.exception.code(),
grpc.StatusCode.DEADLINE_EXCEEDED)
def testStreamingInputCallFutureCancelled(self):
service = _CreateService()
with service.servicer_methods.pause():
response_future = service.stub.StreamingInputCall.future(
_streaming_input_request_iterator())
response_future.cancel()
self.assertTrue(response_future.cancelled())
with self.assertRaises(grpc.FutureCancelledError):
response_future.result()
def testStreamingInputCallFutureFailed(self):
service = _CreateService()
with service.servicer_methods.fail():
response_future = service.stub.StreamingInputCall.future(
_streaming_input_request_iterator())
self.assertIsNotNone(response_future.exception())
self.assertIs(response_future.code(), grpc.StatusCode.UNKNOWN)
def testFullDuplexCall(self):
service = _CreateService()
responses = service.stub.FullDuplexCall(_full_duplex_request_iterator())
expected_responses = service.servicer_methods.FullDuplexCall(
_full_duplex_request_iterator(), 'not a real RpcContext!')
for expected_response, response in moves.zip_longest(expected_responses,
responses):
self.assertEqual(expected_response, response)
def testFullDuplexCallExpired(self):
request_iterator = _full_duplex_request_iterator()
service = _CreateService()
with service.servicer_methods.pause():
responses = service.stub.FullDuplexCall(
request_iterator, timeout=test_constants.SHORT_TIMEOUT)
with self.assertRaises(grpc.RpcError) as exception_context:
list(responses)
self.assertIs(exception_context.exception.code(),
grpc.StatusCode.DEADLINE_EXCEEDED)
def testFullDuplexCallCancelled(self):
service = _CreateService()
request_iterator = _full_duplex_request_iterator()
responses = service.stub.FullDuplexCall(request_iterator)
next(responses)
self.assertIs(
exception_context.exception.code(), grpc.StatusCode.DEADLINE_EXCEEDED)
responses.cancel()
with self.assertRaises(grpc.RpcError) as exception_context:
next(responses)
self.assertIs(exception_context.exception.code(),
grpc.StatusCode.CANCELLED)
def testFullDuplexCallFailed(self):
request_iterator = _full_duplex_request_iterator()
service = _CreateService()
with service.servicer_methods.fail():
responses = service.stub.FullDuplexCall(request_iterator)
with self.assertRaises(grpc.RpcError) as exception_context:
next(responses)
self.assertIs(exception_context.exception.code(),
grpc.StatusCode.UNKNOWN)
def testHalfDuplexCall(self):
service = _CreateService()
def half_duplex_request_iterator():
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=1, interval_us=0)
yield request
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=2, interval_us=0)
request.response_parameters.add(size=3, interval_us=0)
yield request
responses = service.stub.HalfDuplexCall(half_duplex_request_iterator())
expected_responses = service.servicer_methods.HalfDuplexCall(
half_duplex_request_iterator(), 'not a real RpcContext!')
for expected_response, response in moves.zip_longest(expected_responses,
responses):
self.assertEqual(expected_response, response)
def testHalfDuplexCallWedged(self):
condition = threading.Condition()
wait_cell = [False]
@contextlib.contextmanager
def wait(): # pylint: disable=invalid-name
# Where's Python 3's 'nonlocal' statement when you need it?
with condition:
wait_cell[0] = True
yield
with condition:
wait_cell[0] = False
condition.notify_all()
def half_duplex_request_iterator():
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=1, interval_us=0)
yield request
with condition:
while wait_cell[0]:
condition.wait()
service = _CreateService()
with wait():
responses = service.stub.HalfDuplexCall(
half_duplex_request_iterator(),
timeout=test_constants.SHORT_TIMEOUT)
# half-duplex waits for the client to send all info
with self.assertRaises(grpc.RpcError) as exception_context:
next(responses)
self.assertIs(exception_context.exception.code(),
grpc.StatusCode.DEADLINE_EXCEEDED)
if __name__ == '__main__':
unittest.main(verbosity=2)
unittest.main(verbosity=2)

@ -49,256 +49,264 @@ from tests.unit.framework.common import test_constants
_MESSAGES_IMPORT = b'import "messages.proto";'
@contextlib.contextmanager
def _system_path(path):
old_system_path = sys.path[:]
sys.path = sys.path[0:1] + path + sys.path[1:]
yield
sys.path = old_system_path
old_system_path = sys.path[:]
sys.path = sys.path[0:1] + path + sys.path[1:]
yield
sys.path = old_system_path
class DummySplitServicer(object):
def __init__(self, request_class, response_class):
self.request_class = request_class
self.response_class = response_class
def __init__(self, request_class, response_class):
self.request_class = request_class
self.response_class = response_class
def Call(self, request, context):
return self.response_class()
def Call(self, request, context):
return self.response_class()
class SeparateTestMixin(object):
def testImportAttributes(self):
with _system_path([self.python_out_directory]):
pb2 = importlib.import_module(self.pb2_import)
pb2.Request
pb2.Response
if self.should_find_services_in_pb2:
pb2.TestServiceServicer
else:
with self.assertRaises(AttributeError):
pb2.TestServiceServicer
with _system_path([self.grpc_python_out_directory]):
pb2_grpc = importlib.import_module(self.pb2_grpc_import)
pb2_grpc.TestServiceServicer
with self.assertRaises(AttributeError):
pb2_grpc.Request
with self.assertRaises(AttributeError):
pb2_grpc.Response
def testCall(self):
with _system_path([self.python_out_directory]):
pb2 = importlib.import_module(self.pb2_import)
with _system_path([self.grpc_python_out_directory]):
pb2_grpc = importlib.import_module(self.pb2_grpc_import)
server = grpc.server(
futures.ThreadPoolExecutor(max_workers=test_constants.POOL_SIZE))
pb2_grpc.add_TestServiceServicer_to_server(
DummySplitServicer(
pb2.Request, pb2.Response), server)
port = server.add_insecure_port('[::]:0')
server.start()
channel = grpc.insecure_channel('localhost:{}'.format(port))
stub = pb2_grpc.TestServiceStub(channel)
request = pb2.Request()
expected_response = pb2.Response()
response = stub.Call(request)
self.assertEqual(expected_response, response)
def testImportAttributes(self):
with _system_path([self.python_out_directory]):
pb2 = importlib.import_module(self.pb2_import)
pb2.Request
pb2.Response
if self.should_find_services_in_pb2:
pb2.TestServiceServicer
else:
with self.assertRaises(AttributeError):
pb2.TestServiceServicer
with _system_path([self.grpc_python_out_directory]):
pb2_grpc = importlib.import_module(self.pb2_grpc_import)
pb2_grpc.TestServiceServicer
with self.assertRaises(AttributeError):
pb2_grpc.Request
with self.assertRaises(AttributeError):
pb2_grpc.Response
def testCall(self):
with _system_path([self.python_out_directory]):
pb2 = importlib.import_module(self.pb2_import)
with _system_path([self.grpc_python_out_directory]):
pb2_grpc = importlib.import_module(self.pb2_grpc_import)
server = grpc.server(
futures.ThreadPoolExecutor(max_workers=test_constants.POOL_SIZE))
pb2_grpc.add_TestServiceServicer_to_server(
DummySplitServicer(pb2.Request, pb2.Response), server)
port = server.add_insecure_port('[::]:0')
server.start()
channel = grpc.insecure_channel('localhost:{}'.format(port))
stub = pb2_grpc.TestServiceStub(channel)
request = pb2.Request()
expected_response = pb2.Response()
response = stub.Call(request)
self.assertEqual(expected_response, response)
class CommonTestMixin(object):
def testImportAttributes(self):
with _system_path([self.python_out_directory]):
pb2 = importlib.import_module(self.pb2_import)
pb2.Request
pb2.Response
if self.should_find_services_in_pb2:
pb2.TestServiceServicer
else:
with self.assertRaises(AttributeError):
pb2.TestServiceServicer
with _system_path([self.grpc_python_out_directory]):
pb2_grpc = importlib.import_module(self.pb2_grpc_import)
pb2_grpc.TestServiceServicer
with self.assertRaises(AttributeError):
pb2_grpc.Request
with self.assertRaises(AttributeError):
pb2_grpc.Response
def testCall(self):
with _system_path([self.python_out_directory]):
pb2 = importlib.import_module(self.pb2_import)
with _system_path([self.grpc_python_out_directory]):
pb2_grpc = importlib.import_module(self.pb2_grpc_import)
server = grpc.server(
futures.ThreadPoolExecutor(max_workers=test_constants.POOL_SIZE))
pb2_grpc.add_TestServiceServicer_to_server(
DummySplitServicer(
pb2.Request, pb2.Response), server)
port = server.add_insecure_port('[::]:0')
server.start()
channel = grpc.insecure_channel('localhost:{}'.format(port))
stub = pb2_grpc.TestServiceStub(channel)
request = pb2.Request()
expected_response = pb2.Response()
response = stub.Call(request)
self.assertEqual(expected_response, response)
def testImportAttributes(self):
with _system_path([self.python_out_directory]):
pb2 = importlib.import_module(self.pb2_import)
pb2.Request
pb2.Response
if self.should_find_services_in_pb2:
pb2.TestServiceServicer
else:
with self.assertRaises(AttributeError):
pb2.TestServiceServicer
with _system_path([self.grpc_python_out_directory]):
pb2_grpc = importlib.import_module(self.pb2_grpc_import)
pb2_grpc.TestServiceServicer
with self.assertRaises(AttributeError):
pb2_grpc.Request
with self.assertRaises(AttributeError):
pb2_grpc.Response
def testCall(self):
with _system_path([self.python_out_directory]):
pb2 = importlib.import_module(self.pb2_import)
with _system_path([self.grpc_python_out_directory]):
pb2_grpc = importlib.import_module(self.pb2_grpc_import)
server = grpc.server(
futures.ThreadPoolExecutor(max_workers=test_constants.POOL_SIZE))
pb2_grpc.add_TestServiceServicer_to_server(
DummySplitServicer(pb2.Request, pb2.Response), server)
port = server.add_insecure_port('[::]:0')
server.start()
channel = grpc.insecure_channel('localhost:{}'.format(port))
stub = pb2_grpc.TestServiceStub(channel)
request = pb2.Request()
expected_response = pb2.Response()
response = stub.Call(request)
self.assertEqual(expected_response, response)
class SameSeparateTest(unittest.TestCase, SeparateTestMixin):
def setUp(self):
same_proto_contents = pkgutil.get_data(
'tests.protoc_plugin.protos.invocation_testing', 'same.proto')
self.directory = tempfile.mkdtemp(suffix='same_separate', dir='.')
self.proto_directory = os.path.join(self.directory, 'proto_path')
self.python_out_directory = os.path.join(self.directory, 'python_out')
self.grpc_python_out_directory = os.path.join(self.directory, 'grpc_python_out')
os.makedirs(self.proto_directory)
os.makedirs(self.python_out_directory)
os.makedirs(self.grpc_python_out_directory)
same_proto_file = os.path.join(self.proto_directory, 'same_separate.proto')
open(same_proto_file, 'wb').write(same_proto_contents)
protoc_result = protoc.main([
'',
'--proto_path={}'.format(self.proto_directory),
'--python_out={}'.format(self.python_out_directory),
'--grpc_python_out=grpc_2_0:{}'.format(self.grpc_python_out_directory),
same_proto_file,
])
if protoc_result != 0:
raise Exception("unexpected protoc error")
open(os.path.join(self.grpc_python_out_directory, '__init__.py'), 'w').write('')
open(os.path.join(self.python_out_directory, '__init__.py'), 'w').write('')
self.pb2_import = 'same_separate_pb2'
self.pb2_grpc_import = 'same_separate_pb2_grpc'
self.should_find_services_in_pb2 = False
def tearDown(self):
shutil.rmtree(self.directory)
def setUp(self):
same_proto_contents = pkgutil.get_data(
'tests.protoc_plugin.protos.invocation_testing', 'same.proto')
self.directory = tempfile.mkdtemp(suffix='same_separate', dir='.')
self.proto_directory = os.path.join(self.directory, 'proto_path')
self.python_out_directory = os.path.join(self.directory, 'python_out')
self.grpc_python_out_directory = os.path.join(self.directory,
'grpc_python_out')
os.makedirs(self.proto_directory)
os.makedirs(self.python_out_directory)
os.makedirs(self.grpc_python_out_directory)
same_proto_file = os.path.join(self.proto_directory,
'same_separate.proto')
open(same_proto_file, 'wb').write(same_proto_contents)
protoc_result = protoc.main([
'',
'--proto_path={}'.format(self.proto_directory),
'--python_out={}'.format(self.python_out_directory),
'--grpc_python_out=grpc_2_0:{}'.format(
self.grpc_python_out_directory),
same_proto_file,
])
if protoc_result != 0:
raise Exception("unexpected protoc error")
open(os.path.join(self.grpc_python_out_directory, '__init__.py'),
'w').write('')
open(os.path.join(self.python_out_directory, '__init__.py'),
'w').write('')
self.pb2_import = 'same_separate_pb2'
self.pb2_grpc_import = 'same_separate_pb2_grpc'
self.should_find_services_in_pb2 = False
def tearDown(self):
shutil.rmtree(self.directory)
class SameCommonTest(unittest.TestCase, CommonTestMixin):
def setUp(self):
same_proto_contents = pkgutil.get_data(
'tests.protoc_plugin.protos.invocation_testing', 'same.proto')
self.directory = tempfile.mkdtemp(suffix='same_common', dir='.')
self.proto_directory = os.path.join(self.directory, 'proto_path')
self.python_out_directory = os.path.join(self.directory, 'python_out')
self.grpc_python_out_directory = self.python_out_directory
os.makedirs(self.proto_directory)
os.makedirs(self.python_out_directory)
same_proto_file = os.path.join(self.proto_directory, 'same_common.proto')
open(same_proto_file, 'wb').write(same_proto_contents)
protoc_result = protoc.main([
'',
'--proto_path={}'.format(self.proto_directory),
'--python_out={}'.format(self.python_out_directory),
'--grpc_python_out={}'.format(self.grpc_python_out_directory),
same_proto_file,
])
if protoc_result != 0:
raise Exception("unexpected protoc error")
open(os.path.join(self.python_out_directory, '__init__.py'), 'w').write('')
self.pb2_import = 'same_common_pb2'
self.pb2_grpc_import = 'same_common_pb2_grpc'
self.should_find_services_in_pb2 = True
def tearDown(self):
shutil.rmtree(self.directory)
def setUp(self):
same_proto_contents = pkgutil.get_data(
'tests.protoc_plugin.protos.invocation_testing', 'same.proto')
self.directory = tempfile.mkdtemp(suffix='same_common', dir='.')
self.proto_directory = os.path.join(self.directory, 'proto_path')
self.python_out_directory = os.path.join(self.directory, 'python_out')
self.grpc_python_out_directory = self.python_out_directory
os.makedirs(self.proto_directory)
os.makedirs(self.python_out_directory)
same_proto_file = os.path.join(self.proto_directory,
'same_common.proto')
open(same_proto_file, 'wb').write(same_proto_contents)
protoc_result = protoc.main([
'',
'--proto_path={}'.format(self.proto_directory),
'--python_out={}'.format(self.python_out_directory),
'--grpc_python_out={}'.format(self.grpc_python_out_directory),
same_proto_file,
])
if protoc_result != 0:
raise Exception("unexpected protoc error")
open(os.path.join(self.python_out_directory, '__init__.py'),
'w').write('')
self.pb2_import = 'same_common_pb2'
self.pb2_grpc_import = 'same_common_pb2_grpc'
self.should_find_services_in_pb2 = True
def tearDown(self):
shutil.rmtree(self.directory)
class SplitCommonTest(unittest.TestCase, CommonTestMixin):
def setUp(self):
services_proto_contents = pkgutil.get_data(
'tests.protoc_plugin.protos.invocation_testing.split_services',
'services.proto')
messages_proto_contents = pkgutil.get_data(
'tests.protoc_plugin.protos.invocation_testing.split_messages',
'messages.proto')
self.directory = tempfile.mkdtemp(suffix='split_common', dir='.')
self.proto_directory = os.path.join(self.directory, 'proto_path')
self.python_out_directory = os.path.join(self.directory, 'python_out')
self.grpc_python_out_directory = self.python_out_directory
os.makedirs(self.proto_directory)
os.makedirs(self.python_out_directory)
services_proto_file = os.path.join(self.proto_directory,
'split_common_services.proto')
messages_proto_file = os.path.join(self.proto_directory,
'split_common_messages.proto')
open(services_proto_file, 'wb').write(services_proto_contents.replace(
_MESSAGES_IMPORT,
b'import "split_common_messages.proto";'
))
open(messages_proto_file, 'wb').write(messages_proto_contents)
protoc_result = protoc.main([
'',
'--proto_path={}'.format(self.proto_directory),
'--python_out={}'.format(self.python_out_directory),
'--grpc_python_out={}'.format(self.grpc_python_out_directory),
services_proto_file,
messages_proto_file,
])
if protoc_result != 0:
raise Exception("unexpected protoc error")
open(os.path.join(self.python_out_directory, '__init__.py'), 'w').write('')
self.pb2_import = 'split_common_messages_pb2'
self.pb2_grpc_import = 'split_common_services_pb2_grpc'
self.should_find_services_in_pb2 = False
def tearDown(self):
shutil.rmtree(self.directory)
def setUp(self):
services_proto_contents = pkgutil.get_data(
'tests.protoc_plugin.protos.invocation_testing.split_services',
'services.proto')
messages_proto_contents = pkgutil.get_data(
'tests.protoc_plugin.protos.invocation_testing.split_messages',
'messages.proto')
self.directory = tempfile.mkdtemp(suffix='split_common', dir='.')
self.proto_directory = os.path.join(self.directory, 'proto_path')
self.python_out_directory = os.path.join(self.directory, 'python_out')
self.grpc_python_out_directory = self.python_out_directory
os.makedirs(self.proto_directory)
os.makedirs(self.python_out_directory)
services_proto_file = os.path.join(self.proto_directory,
'split_common_services.proto')
messages_proto_file = os.path.join(self.proto_directory,
'split_common_messages.proto')
open(services_proto_file, 'wb').write(
services_proto_contents.replace(
_MESSAGES_IMPORT, b'import "split_common_messages.proto";'))
open(messages_proto_file, 'wb').write(messages_proto_contents)
protoc_result = protoc.main([
'',
'--proto_path={}'.format(self.proto_directory),
'--python_out={}'.format(self.python_out_directory),
'--grpc_python_out={}'.format(self.grpc_python_out_directory),
services_proto_file,
messages_proto_file,
])
if protoc_result != 0:
raise Exception("unexpected protoc error")
open(os.path.join(self.python_out_directory, '__init__.py'),
'w').write('')
self.pb2_import = 'split_common_messages_pb2'
self.pb2_grpc_import = 'split_common_services_pb2_grpc'
self.should_find_services_in_pb2 = False
def tearDown(self):
shutil.rmtree(self.directory)
class SplitSeparateTest(unittest.TestCase, SeparateTestMixin):
def setUp(self):
services_proto_contents = pkgutil.get_data(
'tests.protoc_plugin.protos.invocation_testing.split_services',
'services.proto')
messages_proto_contents = pkgutil.get_data(
'tests.protoc_plugin.protos.invocation_testing.split_messages',
'messages.proto')
self.directory = tempfile.mkdtemp(suffix='split_separate', dir='.')
self.proto_directory = os.path.join(self.directory, 'proto_path')
self.python_out_directory = os.path.join(self.directory, 'python_out')
self.grpc_python_out_directory = os.path.join(self.directory, 'grpc_python_out')
os.makedirs(self.proto_directory)
os.makedirs(self.python_out_directory)
os.makedirs(self.grpc_python_out_directory)
services_proto_file = os.path.join(self.proto_directory,
'split_separate_services.proto')
messages_proto_file = os.path.join(self.proto_directory,
'split_separate_messages.proto')
open(services_proto_file, 'wb').write(services_proto_contents.replace(
_MESSAGES_IMPORT,
b'import "split_separate_messages.proto";'
))
open(messages_proto_file, 'wb').write(messages_proto_contents)
protoc_result = protoc.main([
'',
'--proto_path={}'.format(self.proto_directory),
'--python_out={}'.format(self.python_out_directory),
'--grpc_python_out=grpc_2_0:{}'.format(self.grpc_python_out_directory),
services_proto_file,
messages_proto_file,
])
if protoc_result != 0:
raise Exception("unexpected protoc error")
open(os.path.join(self.python_out_directory, '__init__.py'), 'w').write('')
self.pb2_import = 'split_separate_messages_pb2'
self.pb2_grpc_import = 'split_separate_services_pb2_grpc'
self.should_find_services_in_pb2 = False
def tearDown(self):
shutil.rmtree(self.directory)
def setUp(self):
services_proto_contents = pkgutil.get_data(
'tests.protoc_plugin.protos.invocation_testing.split_services',
'services.proto')
messages_proto_contents = pkgutil.get_data(
'tests.protoc_plugin.protos.invocation_testing.split_messages',
'messages.proto')
self.directory = tempfile.mkdtemp(suffix='split_separate', dir='.')
self.proto_directory = os.path.join(self.directory, 'proto_path')
self.python_out_directory = os.path.join(self.directory, 'python_out')
self.grpc_python_out_directory = os.path.join(self.directory,
'grpc_python_out')
os.makedirs(self.proto_directory)
os.makedirs(self.python_out_directory)
os.makedirs(self.grpc_python_out_directory)
services_proto_file = os.path.join(self.proto_directory,
'split_separate_services.proto')
messages_proto_file = os.path.join(self.proto_directory,
'split_separate_messages.proto')
open(services_proto_file, 'wb').write(
services_proto_contents.replace(
_MESSAGES_IMPORT, b'import "split_separate_messages.proto";'))
open(messages_proto_file, 'wb').write(messages_proto_contents)
protoc_result = protoc.main([
'',
'--proto_path={}'.format(self.proto_directory),
'--python_out={}'.format(self.python_out_directory),
'--grpc_python_out=grpc_2_0:{}'.format(
self.grpc_python_out_directory),
services_proto_file,
messages_proto_file,
])
if protoc_result != 0:
raise Exception("unexpected protoc error")
open(os.path.join(self.python_out_directory, '__init__.py'),
'w').write('')
self.pb2_import = 'split_separate_messages_pb2'
self.pb2_grpc_import = 'split_separate_services_pb2_grpc'
self.should_find_services_in_pb2 = False
def tearDown(self):
shutil.rmtree(self.directory)
if __name__ == '__main__':
unittest.main(verbosity=2)
unittest.main(verbosity=2)

@ -64,84 +64,84 @@ STUB_FACTORY_IDENTIFIER = 'beta_create_TestService_stub'
class _ServicerMethods(object):
def __init__(self):
self._condition = threading.Condition()
self._paused = False
self._fail = False
@contextlib.contextmanager
def pause(self): # pylint: disable=invalid-name
with self._condition:
self._paused = True
yield
with self._condition:
self._paused = False
self._condition.notify_all()
@contextlib.contextmanager
def fail(self): # pylint: disable=invalid-name
with self._condition:
self._fail = True
yield
with self._condition:
self._fail = False
def _control(self): # pylint: disable=invalid-name
with self._condition:
if self._fail:
raise ValueError()
while self._paused:
self._condition.wait()
def UnaryCall(self, request, unused_rpc_context):
response = response_pb2.SimpleResponse()
response.payload.payload_type = payload_pb2.COMPRESSABLE
response.payload.payload_compressable = 'a' * request.response_size
self._control()
return response
def StreamingOutputCall(self, request, unused_rpc_context):
for parameter in request.response_parameters:
response = response_pb2.StreamingOutputCallResponse()
response.payload.payload_type = payload_pb2.COMPRESSABLE
response.payload.payload_compressable = 'a' * parameter.size
self._control()
yield response
def StreamingInputCall(self, request_iter, unused_rpc_context):
response = response_pb2.StreamingInputCallResponse()
aggregated_payload_size = 0
for request in request_iter:
aggregated_payload_size += len(request.payload.payload_compressable)
response.aggregated_payload_size = aggregated_payload_size
self._control()
return response
def FullDuplexCall(self, request_iter, unused_rpc_context):
for request in request_iter:
for parameter in request.response_parameters:
response = response_pb2.StreamingOutputCallResponse()
response.payload.payload_type = payload_pb2.COMPRESSABLE
response.payload.payload_compressable = 'a' * parameter.size
self._control()
yield response
def __init__(self):
self._condition = threading.Condition()
self._paused = False
self._fail = False
@contextlib.contextmanager
def pause(self): # pylint: disable=invalid-name
with self._condition:
self._paused = True
yield
with self._condition:
self._paused = False
self._condition.notify_all()
def HalfDuplexCall(self, request_iter, unused_rpc_context):
responses = []
for request in request_iter:
for parameter in request.response_parameters:
response = response_pb2.StreamingOutputCallResponse()
@contextlib.contextmanager
def fail(self): # pylint: disable=invalid-name
with self._condition:
self._fail = True
yield
with self._condition:
self._fail = False
def _control(self): # pylint: disable=invalid-name
with self._condition:
if self._fail:
raise ValueError()
while self._paused:
self._condition.wait()
def UnaryCall(self, request, unused_rpc_context):
response = response_pb2.SimpleResponse()
response.payload.payload_type = payload_pb2.COMPRESSABLE
response.payload.payload_compressable = 'a' * parameter.size
response.payload.payload_compressable = 'a' * request.response_size
self._control()
return response
def StreamingOutputCall(self, request, unused_rpc_context):
for parameter in request.response_parameters:
response = response_pb2.StreamingOutputCallResponse()
response.payload.payload_type = payload_pb2.COMPRESSABLE
response.payload.payload_compressable = 'a' * parameter.size
self._control()
yield response
def StreamingInputCall(self, request_iter, unused_rpc_context):
response = response_pb2.StreamingInputCallResponse()
aggregated_payload_size = 0
for request in request_iter:
aggregated_payload_size += len(request.payload.payload_compressable)
response.aggregated_payload_size = aggregated_payload_size
self._control()
responses.append(response)
for response in responses:
yield response
return response
def FullDuplexCall(self, request_iter, unused_rpc_context):
for request in request_iter:
for parameter in request.response_parameters:
response = response_pb2.StreamingOutputCallResponse()
response.payload.payload_type = payload_pb2.COMPRESSABLE
response.payload.payload_compressable = 'a' * parameter.size
self._control()
yield response
def HalfDuplexCall(self, request_iter, unused_rpc_context):
responses = []
for request in request_iter:
for parameter in request.response_parameters:
response = response_pb2.StreamingOutputCallResponse()
response.payload.payload_type = payload_pb2.COMPRESSABLE
response.payload.payload_compressable = 'a' * parameter.size
self._control()
responses.append(response)
for response in responses:
yield response
@contextlib.contextmanager
def _CreateService():
"""Provides a servicer backend and a stub.
"""Provides a servicer backend and a stub.
The servicer is just the implementation of the actual servicer passed to the
face player of the python RPC implementation; the two are detached.
@ -151,38 +151,38 @@ def _CreateService():
the service bound to the stub and and stub is the stub on which to invoke
RPCs.
"""
servicer_methods = _ServicerMethods()
servicer_methods = _ServicerMethods()
class Servicer(getattr(service_pb2, SERVICER_IDENTIFIER)):
class Servicer(getattr(service_pb2, SERVICER_IDENTIFIER)):
def UnaryCall(self, request, context):
return servicer_methods.UnaryCall(request, context)
def UnaryCall(self, request, context):
return servicer_methods.UnaryCall(request, context)
def StreamingOutputCall(self, request, context):
return servicer_methods.StreamingOutputCall(request, context)
def StreamingOutputCall(self, request, context):
return servicer_methods.StreamingOutputCall(request, context)
def StreamingInputCall(self, request_iter, context):
return servicer_methods.StreamingInputCall(request_iter, context)
def StreamingInputCall(self, request_iter, context):
return servicer_methods.StreamingInputCall(request_iter, context)
def FullDuplexCall(self, request_iter, context):
return servicer_methods.FullDuplexCall(request_iter, context)
def FullDuplexCall(self, request_iter, context):
return servicer_methods.FullDuplexCall(request_iter, context)
def HalfDuplexCall(self, request_iter, context):
return servicer_methods.HalfDuplexCall(request_iter, context)
def HalfDuplexCall(self, request_iter, context):
return servicer_methods.HalfDuplexCall(request_iter, context)
servicer = Servicer()
server = getattr(service_pb2, SERVER_FACTORY_IDENTIFIER)(servicer)
port = server.add_insecure_port('[::]:0')
server.start()
channel = implementations.insecure_channel('localhost', port)
stub = getattr(service_pb2, STUB_FACTORY_IDENTIFIER)(channel)
yield (servicer_methods, stub)
server.stop(0)
servicer = Servicer()
server = getattr(service_pb2, SERVER_FACTORY_IDENTIFIER)(servicer)
port = server.add_insecure_port('[::]:0')
server.start()
channel = implementations.insecure_channel('localhost', port)
stub = getattr(service_pb2, STUB_FACTORY_IDENTIFIER)(channel)
yield (servicer_methods, stub)
server.stop(0)
@contextlib.contextmanager
def _CreateIncompleteService():
"""Provides a servicer backend that fails to implement methods and its stub.
"""Provides a servicer backend that fails to implement methods and its stub.
The servicer is just the implementation of the actual servicer passed to the
face player of the python RPC implementation; the two are detached.
@ -194,297 +194,297 @@ def _CreateIncompleteService():
RPCs.
"""
class Servicer(getattr(service_pb2, SERVICER_IDENTIFIER)):
pass
class Servicer(getattr(service_pb2, SERVICER_IDENTIFIER)):
pass
servicer = Servicer()
server = getattr(service_pb2, SERVER_FACTORY_IDENTIFIER)(servicer)
port = server.add_insecure_port('[::]:0')
server.start()
channel = implementations.insecure_channel('localhost', port)
stub = getattr(service_pb2, STUB_FACTORY_IDENTIFIER)(channel)
yield None, stub
server.stop(0)
servicer = Servicer()
server = getattr(service_pb2, SERVER_FACTORY_IDENTIFIER)(servicer)
port = server.add_insecure_port('[::]:0')
server.start()
channel = implementations.insecure_channel('localhost', port)
stub = getattr(service_pb2, STUB_FACTORY_IDENTIFIER)(channel)
yield None, stub
server.stop(0)
def _streaming_input_request_iterator():
for _ in range(3):
request = request_pb2.StreamingInputCallRequest()
request.payload.payload_type = payload_pb2.COMPRESSABLE
request.payload.payload_compressable = 'a'
yield request
for _ in range(3):
request = request_pb2.StreamingInputCallRequest()
request.payload.payload_type = payload_pb2.COMPRESSABLE
request.payload.payload_compressable = 'a'
yield request
def _streaming_output_request():
request = request_pb2.StreamingOutputCallRequest()
sizes = [1, 2, 3]
request.response_parameters.add(size=sizes[0], interval_us=0)
request.response_parameters.add(size=sizes[1], interval_us=0)
request.response_parameters.add(size=sizes[2], interval_us=0)
return request
request = request_pb2.StreamingOutputCallRequest()
sizes = [1, 2, 3]
request.response_parameters.add(size=sizes[0], interval_us=0)
request.response_parameters.add(size=sizes[1], interval_us=0)
request.response_parameters.add(size=sizes[2], interval_us=0)
return request
def _full_duplex_request_iterator():
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=1, interval_us=0)
yield request
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=2, interval_us=0)
request.response_parameters.add(size=3, interval_us=0)
yield request
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=1, interval_us=0)
yield request
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=2, interval_us=0)
request.response_parameters.add(size=3, interval_us=0)
yield request
class PythonPluginTest(unittest.TestCase):
"""Test case for the gRPC Python protoc-plugin.
"""Test case for the gRPC Python protoc-plugin.
While reading these tests, remember that the futures API
(`stub.method.future()`) only gives futures for the *response-unary*
methods and does not exist for response-streaming methods.
"""
def testImportAttributes(self):
# check that we can access the generated module and its members.
self.assertIsNotNone(
getattr(service_pb2, SERVICER_IDENTIFIER, None))
self.assertIsNotNone(
getattr(service_pb2, STUB_IDENTIFIER, None))
self.assertIsNotNone(
getattr(service_pb2, SERVER_FACTORY_IDENTIFIER, None))
self.assertIsNotNone(
getattr(service_pb2, STUB_FACTORY_IDENTIFIER, None))
def testUpDown(self):
with _CreateService():
request_pb2.SimpleRequest(response_size=13)
def testIncompleteServicer(self):
with _CreateIncompleteService() as (_, stub):
request = request_pb2.SimpleRequest(response_size=13)
try:
stub.UnaryCall(request, test_constants.LONG_TIMEOUT)
except face.AbortionError as error:
self.assertEqual(interfaces.StatusCode.UNIMPLEMENTED, error.code)
def testUnaryCall(self):
with _CreateService() as (methods, stub):
request = request_pb2.SimpleRequest(response_size=13)
response = stub.UnaryCall(request, test_constants.LONG_TIMEOUT)
expected_response = methods.UnaryCall(request, 'not a real context!')
self.assertEqual(expected_response, response)
def testUnaryCallFuture(self):
with _CreateService() as (methods, stub):
request = request_pb2.SimpleRequest(response_size=13)
# Check that the call does not block waiting for the server to respond.
with methods.pause():
response_future = stub.UnaryCall.future(
request, test_constants.LONG_TIMEOUT)
response = response_future.result()
expected_response = methods.UnaryCall(request, 'not a real RpcContext!')
self.assertEqual(expected_response, response)
def testUnaryCallFutureExpired(self):
with _CreateService() as (methods, stub):
request = request_pb2.SimpleRequest(response_size=13)
with methods.pause():
response_future = stub.UnaryCall.future(
request, test_constants.SHORT_TIMEOUT)
with self.assertRaises(face.ExpirationError):
response_future.result()
def testUnaryCallFutureCancelled(self):
with _CreateService() as (methods, stub):
request = request_pb2.SimpleRequest(response_size=13)
with methods.pause():
response_future = stub.UnaryCall.future(request, 1)
response_future.cancel()
self.assertTrue(response_future.cancelled())
def testUnaryCallFutureFailed(self):
with _CreateService() as (methods, stub):
request = request_pb2.SimpleRequest(response_size=13)
with methods.fail():
response_future = stub.UnaryCall.future(
request, test_constants.LONG_TIMEOUT)
self.assertIsNotNone(response_future.exception())
def testStreamingOutputCall(self):
with _CreateService() as (methods, stub):
request = _streaming_output_request()
responses = stub.StreamingOutputCall(
request, test_constants.LONG_TIMEOUT)
expected_responses = methods.StreamingOutputCall(
request, 'not a real RpcContext!')
for expected_response, response in moves.zip_longest(
expected_responses, responses):
def testImportAttributes(self):
# check that we can access the generated module and its members.
self.assertIsNotNone(getattr(service_pb2, SERVICER_IDENTIFIER, None))
self.assertIsNotNone(getattr(service_pb2, STUB_IDENTIFIER, None))
self.assertIsNotNone(
getattr(service_pb2, SERVER_FACTORY_IDENTIFIER, None))
self.assertIsNotNone(
getattr(service_pb2, STUB_FACTORY_IDENTIFIER, None))
def testUpDown(self):
with _CreateService():
request_pb2.SimpleRequest(response_size=13)
def testIncompleteServicer(self):
with _CreateIncompleteService() as (_, stub):
request = request_pb2.SimpleRequest(response_size=13)
try:
stub.UnaryCall(request, test_constants.LONG_TIMEOUT)
except face.AbortionError as error:
self.assertEqual(interfaces.StatusCode.UNIMPLEMENTED,
error.code)
def testUnaryCall(self):
with _CreateService() as (methods, stub):
request = request_pb2.SimpleRequest(response_size=13)
response = stub.UnaryCall(request, test_constants.LONG_TIMEOUT)
expected_response = methods.UnaryCall(request, 'not a real context!')
self.assertEqual(expected_response, response)
def testStreamingOutputCallExpired(self):
with _CreateService() as (methods, stub):
request = _streaming_output_request()
with methods.pause():
responses = stub.StreamingOutputCall(
request, test_constants.SHORT_TIMEOUT)
with self.assertRaises(face.ExpirationError):
list(responses)
def testStreamingOutputCallCancelled(self):
with _CreateService() as (methods, stub):
request = _streaming_output_request()
responses = stub.StreamingOutputCall(
request, test_constants.LONG_TIMEOUT)
next(responses)
responses.cancel()
with self.assertRaises(face.CancellationError):
next(responses)
def testStreamingOutputCallFailed(self):
with _CreateService() as (methods, stub):
request = _streaming_output_request()
with methods.fail():
responses = stub.StreamingOutputCall(request, 1)
self.assertIsNotNone(responses)
with self.assertRaises(face.RemoteError):
next(responses)
def testStreamingInputCall(self):
with _CreateService() as (methods, stub):
response = stub.StreamingInputCall(
_streaming_input_request_iterator(),
test_constants.LONG_TIMEOUT)
expected_response = methods.StreamingInputCall(
_streaming_input_request_iterator(),
'not a real RpcContext!')
self.assertEqual(expected_response, response)
def testStreamingInputCallFuture(self):
with _CreateService() as (methods, stub):
with methods.pause():
response_future = stub.StreamingInputCall.future(
_streaming_input_request_iterator(),
test_constants.LONG_TIMEOUT)
response = response_future.result()
expected_response = methods.StreamingInputCall(
_streaming_input_request_iterator(),
'not a real RpcContext!')
self.assertEqual(expected_response, response)
def testStreamingInputCallFutureExpired(self):
with _CreateService() as (methods, stub):
with methods.pause():
response_future = stub.StreamingInputCall.future(
_streaming_input_request_iterator(),
test_constants.SHORT_TIMEOUT)
with self.assertRaises(face.ExpirationError):
response_future.result()
self.assertIsInstance(
response_future.exception(), face.ExpirationError)
def testStreamingInputCallFutureCancelled(self):
with _CreateService() as (methods, stub):
with methods.pause():
response_future = stub.StreamingInputCall.future(
_streaming_input_request_iterator(),
test_constants.LONG_TIMEOUT)
response_future.cancel()
self.assertTrue(response_future.cancelled())
with self.assertRaises(future.CancelledError):
response_future.result()
def testStreamingInputCallFutureFailed(self):
with _CreateService() as (methods, stub):
with methods.fail():
response_future = stub.StreamingInputCall.future(
_streaming_input_request_iterator(),
test_constants.LONG_TIMEOUT)
self.assertIsNotNone(response_future.exception())
def testFullDuplexCall(self):
with _CreateService() as (methods, stub):
responses = stub.FullDuplexCall(
_full_duplex_request_iterator(),
test_constants.LONG_TIMEOUT)
expected_responses = methods.FullDuplexCall(
_full_duplex_request_iterator(),
'not a real RpcContext!')
for expected_response, response in moves.zip_longest(
expected_responses, responses):
def testUnaryCallFuture(self):
with _CreateService() as (methods, stub):
request = request_pb2.SimpleRequest(response_size=13)
# Check that the call does not block waiting for the server to respond.
with methods.pause():
response_future = stub.UnaryCall.future(
request, test_constants.LONG_TIMEOUT)
response = response_future.result()
expected_response = methods.UnaryCall(request, 'not a real RpcContext!')
self.assertEqual(expected_response, response)
def testFullDuplexCallExpired(self):
request_iterator = _full_duplex_request_iterator()
with _CreateService() as (methods, stub):
with methods.pause():
responses = stub.FullDuplexCall(
request_iterator, test_constants.SHORT_TIMEOUT)
with self.assertRaises(face.ExpirationError):
list(responses)
def testFullDuplexCallCancelled(self):
with _CreateService() as (methods, stub):
request_iterator = _full_duplex_request_iterator()
responses = stub.FullDuplexCall(
request_iterator, test_constants.LONG_TIMEOUT)
next(responses)
responses.cancel()
with self.assertRaises(face.CancellationError):
next(responses)
def testFullDuplexCallFailed(self):
request_iterator = _full_duplex_request_iterator()
with _CreateService() as (methods, stub):
with methods.fail():
responses = stub.FullDuplexCall(
request_iterator, test_constants.LONG_TIMEOUT)
self.assertIsNotNone(responses)
with self.assertRaises(face.RemoteError):
next(responses)
def testHalfDuplexCall(self):
with _CreateService() as (methods, stub):
def half_duplex_request_iterator():
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=1, interval_us=0)
yield request
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=2, interval_us=0)
request.response_parameters.add(size=3, interval_us=0)
yield request
responses = stub.HalfDuplexCall(
half_duplex_request_iterator(), test_constants.LONG_TIMEOUT)
expected_responses = methods.HalfDuplexCall(
half_duplex_request_iterator(), 'not a real RpcContext!')
for check in moves.zip_longest(expected_responses, responses):
expected_response, response = check
def testUnaryCallFutureExpired(self):
with _CreateService() as (methods, stub):
request = request_pb2.SimpleRequest(response_size=13)
with methods.pause():
response_future = stub.UnaryCall.future(
request, test_constants.SHORT_TIMEOUT)
with self.assertRaises(face.ExpirationError):
response_future.result()
def testUnaryCallFutureCancelled(self):
with _CreateService() as (methods, stub):
request = request_pb2.SimpleRequest(response_size=13)
with methods.pause():
response_future = stub.UnaryCall.future(request, 1)
response_future.cancel()
self.assertTrue(response_future.cancelled())
def testUnaryCallFutureFailed(self):
with _CreateService() as (methods, stub):
request = request_pb2.SimpleRequest(response_size=13)
with methods.fail():
response_future = stub.UnaryCall.future(
request, test_constants.LONG_TIMEOUT)
self.assertIsNotNone(response_future.exception())
def testStreamingOutputCall(self):
with _CreateService() as (methods, stub):
request = _streaming_output_request()
responses = stub.StreamingOutputCall(request,
test_constants.LONG_TIMEOUT)
expected_responses = methods.StreamingOutputCall(
request, 'not a real RpcContext!')
for expected_response, response in moves.zip_longest(
expected_responses, responses):
self.assertEqual(expected_response, response)
def testStreamingOutputCallExpired(self):
with _CreateService() as (methods, stub):
request = _streaming_output_request()
with methods.pause():
responses = stub.StreamingOutputCall(
request, test_constants.SHORT_TIMEOUT)
with self.assertRaises(face.ExpirationError):
list(responses)
def testStreamingOutputCallCancelled(self):
with _CreateService() as (methods, stub):
request = _streaming_output_request()
responses = stub.StreamingOutputCall(request,
test_constants.LONG_TIMEOUT)
next(responses)
responses.cancel()
with self.assertRaises(face.CancellationError):
next(responses)
def testStreamingOutputCallFailed(self):
with _CreateService() as (methods, stub):
request = _streaming_output_request()
with methods.fail():
responses = stub.StreamingOutputCall(request, 1)
self.assertIsNotNone(responses)
with self.assertRaises(face.RemoteError):
next(responses)
def testStreamingInputCall(self):
with _CreateService() as (methods, stub):
response = stub.StreamingInputCall(
_streaming_input_request_iterator(),
test_constants.LONG_TIMEOUT)
expected_response = methods.StreamingInputCall(
_streaming_input_request_iterator(), 'not a real RpcContext!')
self.assertEqual(expected_response, response)
def testHalfDuplexCallWedged(self):
condition = threading.Condition()
wait_cell = [False]
@contextlib.contextmanager
def wait(): # pylint: disable=invalid-name
# Where's Python 3's 'nonlocal' statement when you need it?
with condition:
wait_cell[0] = True
yield
with condition:
wait_cell[0] = False
condition.notify_all()
def half_duplex_request_iterator():
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=1, interval_us=0)
yield request
with condition:
while wait_cell[0]:
condition.wait()
with _CreateService() as (methods, stub):
with wait():
responses = stub.HalfDuplexCall(
half_duplex_request_iterator(), test_constants.SHORT_TIMEOUT)
# half-duplex waits for the client to send all info
with self.assertRaises(face.ExpirationError):
next(responses)
def testStreamingInputCallFuture(self):
with _CreateService() as (methods, stub):
with methods.pause():
response_future = stub.StreamingInputCall.future(
_streaming_input_request_iterator(),
test_constants.LONG_TIMEOUT)
response = response_future.result()
expected_response = methods.StreamingInputCall(
_streaming_input_request_iterator(), 'not a real RpcContext!')
self.assertEqual(expected_response, response)
def testStreamingInputCallFutureExpired(self):
with _CreateService() as (methods, stub):
with methods.pause():
response_future = stub.StreamingInputCall.future(
_streaming_input_request_iterator(),
test_constants.SHORT_TIMEOUT)
with self.assertRaises(face.ExpirationError):
response_future.result()
self.assertIsInstance(response_future.exception(),
face.ExpirationError)
def testStreamingInputCallFutureCancelled(self):
with _CreateService() as (methods, stub):
with methods.pause():
response_future = stub.StreamingInputCall.future(
_streaming_input_request_iterator(),
test_constants.LONG_TIMEOUT)
response_future.cancel()
self.assertTrue(response_future.cancelled())
with self.assertRaises(future.CancelledError):
response_future.result()
def testStreamingInputCallFutureFailed(self):
with _CreateService() as (methods, stub):
with methods.fail():
response_future = stub.StreamingInputCall.future(
_streaming_input_request_iterator(),
test_constants.LONG_TIMEOUT)
self.assertIsNotNone(response_future.exception())
def testFullDuplexCall(self):
with _CreateService() as (methods, stub):
responses = stub.FullDuplexCall(_full_duplex_request_iterator(),
test_constants.LONG_TIMEOUT)
expected_responses = methods.FullDuplexCall(
_full_duplex_request_iterator(), 'not a real RpcContext!')
for expected_response, response in moves.zip_longest(
expected_responses, responses):
self.assertEqual(expected_response, response)
def testFullDuplexCallExpired(self):
request_iterator = _full_duplex_request_iterator()
with _CreateService() as (methods, stub):
with methods.pause():
responses = stub.FullDuplexCall(request_iterator,
test_constants.SHORT_TIMEOUT)
with self.assertRaises(face.ExpirationError):
list(responses)
def testFullDuplexCallCancelled(self):
with _CreateService() as (methods, stub):
request_iterator = _full_duplex_request_iterator()
responses = stub.FullDuplexCall(request_iterator,
test_constants.LONG_TIMEOUT)
next(responses)
responses.cancel()
with self.assertRaises(face.CancellationError):
next(responses)
def testFullDuplexCallFailed(self):
request_iterator = _full_duplex_request_iterator()
with _CreateService() as (methods, stub):
with methods.fail():
responses = stub.FullDuplexCall(request_iterator,
test_constants.LONG_TIMEOUT)
self.assertIsNotNone(responses)
with self.assertRaises(face.RemoteError):
next(responses)
def testHalfDuplexCall(self):
with _CreateService() as (methods, stub):
def half_duplex_request_iterator():
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=1, interval_us=0)
yield request
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=2, interval_us=0)
request.response_parameters.add(size=3, interval_us=0)
yield request
responses = stub.HalfDuplexCall(half_duplex_request_iterator(),
test_constants.LONG_TIMEOUT)
expected_responses = methods.HalfDuplexCall(
half_duplex_request_iterator(), 'not a real RpcContext!')
for check in moves.zip_longest(expected_responses, responses):
expected_response, response = check
self.assertEqual(expected_response, response)
def testHalfDuplexCallWedged(self):
condition = threading.Condition()
wait_cell = [False]
@contextlib.contextmanager
def wait(): # pylint: disable=invalid-name
# Where's Python 3's 'nonlocal' statement when you need it?
with condition:
wait_cell[0] = True
yield
with condition:
wait_cell[0] = False
condition.notify_all()
def half_duplex_request_iterator():
request = request_pb2.StreamingOutputCallRequest()
request.response_parameters.add(size=1, interval_us=0)
yield request
with condition:
while wait_cell[0]:
condition.wait()
with _CreateService() as (methods, stub):
with wait():
responses = stub.HalfDuplexCall(half_duplex_request_iterator(),
test_constants.SHORT_TIMEOUT)
# half-duplex waits for the client to send all info
with self.assertRaises(face.ExpirationError):
next(responses)
if __name__ == '__main__':
unittest.main(verbosity=2)
unittest.main(verbosity=2)

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -26,5 +26,3 @@
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save