Merge github.com:grpc/grpc into connected-subchannel

reviewable/pr4232/r8
Craig Tiller 9 years ago
commit edb2801829
  1. 98
      Makefile
  2. 30
      build.yaml
  3. 1
      examples/objective-c/auth_sample/Podfile
  4. 1
      examples/objective-c/helloworld/Podfile
  5. 1
      examples/objective-c/route_guide/Podfile
  6. 2
      gRPC.podspec
  7. 13
      src/core/surface/call.h
  8. 23
      src/core/surface/call_log_batch.c
  9. 21
      src/core/transport/chttp2/hpack_parser.c
  10. 2
      src/core/transport/chttp2/hpack_parser.h
  11. 8
      src/core/tsi/transport_security.c
  12. 6
      src/csharp/Grpc.Auth/GoogleAuthInterceptors.cs
  13. 84
      src/csharp/Grpc.Core/AsyncAuthInterceptor.cs
  14. 8
      src/csharp/Grpc.Core/CallCredentials.cs
  15. 7
      src/csharp/Grpc.Core/Grpc.Core.csproj
  16. 13
      src/csharp/Grpc.Core/Internal/NativeMetadataCredentialsPlugin.cs
  17. 2
      src/csharp/Grpc.IntegrationTesting/MetadataCredentialsTest.cs
  18. 7
      src/csharp/ext/grpc_csharp_ext.c
  19. 5
      src/node/ext/call.cc
  20. 5
      src/node/src/credentials.js
  21. 1428
      src/objective-c/BoringSSL.podspec
  22. 43
      src/objective-c/GRPCClient/GRPCCall.h
  23. 2
      src/objective-c/GRPCClient/GRPCCall.m
  24. 13
      src/objective-c/GRPCClient/private/GRPCRequestHeaders.h
  25. 43
      src/objective-c/GRPCClient/private/GRPCRequestHeaders.m
  26. 2
      src/objective-c/GRPCClient/private/GRPCWrappedCall.h
  27. 2
      src/objective-c/GRPCClient/private/GRPCWrappedCall.m
  28. 1
      src/objective-c/examples/Sample/Podfile
  29. 1
      src/objective-c/examples/SwiftSample/Podfile
  30. 1
      src/objective-c/tests/Podfile
  31. 7
      src/python/grpcio/.gitignore
  32. 1
      src/python/grpcio/MANIFEST.in
  33. 127
      src/python/grpcio/commands.py
  34. 175
      src/python/grpcio/grpc/beta/_server.py
  35. 124
      src/python/grpcio/grpc/beta/_stub.py
  36. 60
      src/python/grpcio/grpc/framework/core/_end.py
  37. 1
      src/python/grpcio/requirements.txt
  38. 6
      src/python/grpcio/setup.cfg
  39. 120
      src/python/grpcio/setup.py
  40. 68
      src/python/grpcio/tests/__init__.py
  41. 119
      src/python/grpcio/tests/_loader.py
  42. 451
      src/python/grpcio/tests/_result.py
  43. 224
      src/python/grpcio/tests/_runner.py
  44. 0
      src/python/grpcio/tests/interop/__init__.py
  45. 8
      src/python/grpcio/tests/interop/_insecure_interop_test.py
  46. 2
      src/python/grpcio/tests/interop/_interop_test_case.py
  47. 10
      src/python/grpcio/tests/interop/_secure_interop_test.py
  48. 13
      src/python/grpcio/tests/interop/client.py
  49. 0
      src/python/grpcio/tests/interop/credentials/README
  50. 0
      src/python/grpcio/tests/interop/credentials/ca.pem
  51. 0
      src/python/grpcio/tests/interop/credentials/server1.key
  52. 0
      src/python/grpcio/tests/interop/credentials/server1.pem
  53. 0
      src/python/grpcio/tests/interop/empty.proto
  54. 0
      src/python/grpcio/tests/interop/messages.proto
  55. 6
      src/python/grpcio/tests/interop/methods.py
  56. 0
      src/python/grpcio/tests/interop/resources.py
  57. 8
      src/python/grpcio/tests/interop/server.py
  58. 4
      src/python/grpcio/tests/interop/test.proto
  59. 0
      src/python/grpcio/tests/protoc_plugin/__init__.py
  60. 73
      src/python/grpcio/tests/protoc_plugin/beta_python_plugin_test.py
  61. 0
      src/python/grpcio/tests/protoc_plugin/protoc_plugin_test.proto
  62. 0
      src/python/grpcio/tests/unit/__init__.py
  63. 0
      src/python/grpcio/tests/unit/_adapter/.gitignore
  64. 0
      src/python/grpcio/tests/unit/_adapter/__init__.py
  65. 0
      src/python/grpcio/tests/unit/_adapter/_c_test.py
  66. 0
      src/python/grpcio/tests/unit/_adapter/_intermediary_low_test.py
  67. 2
      src/python/grpcio/tests/unit/_adapter/_low_test.py
  68. 2
      src/python/grpcio/tests/unit/_adapter/_proto_scenarios.py
  69. 8
      src/python/grpcio/tests/unit/_core_over_links_base_interface_test.py
  70. 8
      src/python/grpcio/tests/unit/_crust_over_core_over_links_face_interface_test.py
  71. 0
      src/python/grpcio/tests/unit/_cython/.gitignore
  72. 0
      src/python/grpcio/tests/unit/_cython/__init__.py
  73. 0
      src/python/grpcio/tests/unit/_cython/adapter_low_test.py
  74. 4
      src/python/grpcio/tests/unit/_cython/cygrpc_test.py
  75. 0
      src/python/grpcio/tests/unit/_cython/test_utilities.py
  76. 0
      src/python/grpcio/tests/unit/_junkdrawer/__init__.py
  77. 0
      src/python/grpcio/tests/unit/_junkdrawer/math_pb2.py
  78. 0
      src/python/grpcio/tests/unit/_junkdrawer/stock_pb2.py
  79. 0
      src/python/grpcio/tests/unit/_links/__init__.py
  80. 6
      src/python/grpcio/tests/unit/_links/_lonely_invocation_link_test.py
  81. 4
      src/python/grpcio/tests/unit/_links/_proto_scenarios.py
  82. 10
      src/python/grpcio/tests/unit/_links/_transmission_test.py
  83. 0
      src/python/grpcio/tests/unit/beta/__init__.py
  84. 79
      src/python/grpcio/tests/unit/beta/_beta_features_test.py
  85. 2
      src/python/grpcio/tests/unit/beta/_connectivity_channel_test.py
  86. 12
      src/python/grpcio/tests/unit/beta/_face_interface_test.py
  87. 2
      src/python/grpcio/tests/unit/beta/_not_found_test.py
  88. 2
      src/python/grpcio/tests/unit/beta/_utilities_test.py
  89. 0
      src/python/grpcio/tests/unit/beta/test_utilities.py
  90. 0
      src/python/grpcio/tests/unit/credentials/README
  91. 0
      src/python/grpcio/tests/unit/credentials/ca.pem
  92. 0
      src/python/grpcio/tests/unit/credentials/server1.key
  93. 0
      src/python/grpcio/tests/unit/credentials/server1.pem
  94. 0
      src/python/grpcio/tests/unit/framework/__init__.py
  95. 8
      src/python/grpcio/tests/unit/framework/_crust_over_core_face_interface_test.py
  96. 0
      src/python/grpcio/tests/unit/framework/common/__init__.py
  97. 0
      src/python/grpcio/tests/unit/framework/common/test_constants.py
  98. 0
      src/python/grpcio/tests/unit/framework/common/test_control.py
  99. 0
      src/python/grpcio/tests/unit/framework/common/test_coverage.py
  100. 0
      src/python/grpcio/tests/unit/framework/core/__init__.py
  101. Some files were not shown because too many files have changed in this diff Show More

File diff suppressed because one or more lines are too long

@ -1325,6 +1325,26 @@ targets:
- mac
- linux
- posix
- name: init_test
build: test
language: c
src:
- test/core/surface/init_test.c
deps:
- grpc_test_util
- grpc
- gpr_test_util
- gpr
- name: invalid_call_argument_test
build: test
language: c
src:
- test/core/end2end/invalid_call_argument_test.c
deps:
- grpc_test_util
- grpc
- gpr_test_util
- gpr
- name: json_rewrite
build: test
run: false
@ -1398,16 +1418,6 @@ targets:
- grpc
- gpr_test_util
- gpr
- name: multi_init_test
build: test
language: c
src:
- test/core/surface/multi_init_test.c
deps:
- grpc_test_util
- grpc
- gpr_test_util
- gpr
- name: multiple_server_queues_test
build: test
language: c

@ -2,6 +2,7 @@ source 'https://github.com/CocoaPods/Specs.git'
platform :ios, '8.0'
pod 'Protobuf', :path => "../../../third_party/protobuf"
pod 'BoringSSL', :podspec => "../../../src/objective-c"
pod 'gRPC', :path => "../../.."
target 'AuthSample' do

@ -2,6 +2,7 @@ source 'https://github.com/CocoaPods/Specs.git'
platform :ios, '8.0'
pod 'Protobuf', :path => "../../../third_party/protobuf"
pod 'BoringSSL', :podspec => "../../../src/objective-c"
pod 'gRPC', :path => "../../.."
target 'HelloWorld' do

@ -3,6 +3,7 @@ platform :ios, '8.0'
target 'RouteGuideClient' do
pod 'Protobuf', :path => "../../../third_party/protobuf"
pod 'BoringSSL', :podspec => "../../../src/objective-c"
pod 'gRPC', :path => "../../.."
# Depend on the generated RouteGuide library.
pod 'RouteGuide', :path => '.'

@ -589,7 +589,7 @@ Pod::Spec.new do |s|
ss.requires_arc = false
ss.libraries = 'z'
ss.dependency 'OpenSSL', '~> 1.0.204.1'
ss.dependency 'BoringSSL', '~> 1.0'
# ss.compiler_flags = '-GCC_WARN_INHIBIT_ALL_WARNINGS', '-w'
end

@ -91,19 +91,6 @@ void grpc_call_log_batch(char *file, int line, gpr_log_severity severity,
grpc_call *call, const grpc_op *ops, size_t nops,
void *tag);
void grpc_server_log_request_call(char *file, int line,
gpr_log_severity severity,
grpc_server *server, grpc_call **call,
grpc_call_details *details,
grpc_metadata_array *initial_metadata,
grpc_completion_queue *cq_bound_to_call,
grpc_completion_queue *cq_for_notification,
void *tag);
void grpc_server_log_shutdown(char *file, int line, gpr_log_severity severity,
grpc_server *server, grpc_completion_queue *cq,
void *tag);
/* Set a context pointer.
No thread safety guarantees are made wrt this value. */
void grpc_call_context_set(grpc_call *call, grpc_context_index elem,

@ -117,26 +117,3 @@ void grpc_call_log_batch(char *file, int line, gpr_log_severity severity,
}
}
void grpc_server_log_request_call(char *file, int line,
gpr_log_severity severity,
grpc_server *server, grpc_call **call,
grpc_call_details *details,
grpc_metadata_array *initial_metadata,
grpc_completion_queue *cq_bound_to_call,
grpc_completion_queue *cq_for_notification,
void *tag) {
gpr_log(file, line, severity,
"grpc_server_request_call(server=%p, call=%p, details=%p, "
"initial_metadata=%p, cq_bound_to_call=%p, cq_for_notification=%p, "
"tag=%p)",
server, call, details, initial_metadata, cq_bound_to_call,
cq_for_notification, tag);
}
void grpc_server_log_shutdown(char *file, int line, gpr_log_severity severity,
grpc_server *server, grpc_completion_queue *cq,
void *tag) {
gpr_log(file, line, severity,
"grpc_server_shutdown_and_notify(server=%p, cq=%p, tag=%p)", server,
cq, tag);
}

@ -728,6 +728,7 @@ static int finish_indexed_field(grpc_chttp2_hpack_parser *p,
/* parse an indexed field with index < 127 */
static int parse_indexed_field(grpc_chttp2_hpack_parser *p,
const gpr_uint8 *cur, const gpr_uint8 *end) {
p->dynamic_table_update_allowed = 0;
p->index = (*cur) & 0x7f;
return finish_indexed_field(p, cur + 1, end);
}
@ -737,6 +738,7 @@ static int parse_indexed_field_x(grpc_chttp2_hpack_parser *p,
const gpr_uint8 *cur, const gpr_uint8 *end) {
static const grpc_chttp2_hpack_parser_state and_then[] = {
finish_indexed_field};
p->dynamic_table_update_allowed = 0;
p->next_state = and_then;
p->index = 0x7f;
p->parsing.value = &p->index;
@ -769,6 +771,7 @@ static int parse_lithdr_incidx(grpc_chttp2_hpack_parser *p,
const gpr_uint8 *cur, const gpr_uint8 *end) {
static const grpc_chttp2_hpack_parser_state and_then[] = {
parse_value_string_with_indexed_key, finish_lithdr_incidx};
p->dynamic_table_update_allowed = 0;
p->next_state = and_then;
p->index = (*cur) & 0x3f;
return parse_string_prefix(p, cur + 1, end);
@ -780,6 +783,7 @@ static int parse_lithdr_incidx_x(grpc_chttp2_hpack_parser *p,
static const grpc_chttp2_hpack_parser_state and_then[] = {
parse_string_prefix, parse_value_string_with_indexed_key,
finish_lithdr_incidx};
p->dynamic_table_update_allowed = 0;
p->next_state = and_then;
p->index = 0x3f;
p->parsing.value = &p->index;
@ -792,6 +796,7 @@ static int parse_lithdr_incidx_v(grpc_chttp2_hpack_parser *p,
static const grpc_chttp2_hpack_parser_state and_then[] = {
parse_key_string, parse_string_prefix,
parse_value_string_with_literal_key, finish_lithdr_incidx_v};
p->dynamic_table_update_allowed = 0;
p->next_state = and_then;
return parse_string_prefix(p, cur + 1, end);
}
@ -821,6 +826,7 @@ static int parse_lithdr_notidx(grpc_chttp2_hpack_parser *p,
const gpr_uint8 *cur, const gpr_uint8 *end) {
static const grpc_chttp2_hpack_parser_state and_then[] = {
parse_value_string_with_indexed_key, finish_lithdr_notidx};
p->dynamic_table_update_allowed = 0;
p->next_state = and_then;
p->index = (*cur) & 0xf;
return parse_string_prefix(p, cur + 1, end);
@ -832,6 +838,7 @@ static int parse_lithdr_notidx_x(grpc_chttp2_hpack_parser *p,
static const grpc_chttp2_hpack_parser_state and_then[] = {
parse_string_prefix, parse_value_string_with_indexed_key,
finish_lithdr_notidx};
p->dynamic_table_update_allowed = 0;
p->next_state = and_then;
p->index = 0xf;
p->parsing.value = &p->index;
@ -844,6 +851,7 @@ static int parse_lithdr_notidx_v(grpc_chttp2_hpack_parser *p,
static const grpc_chttp2_hpack_parser_state and_then[] = {
parse_key_string, parse_string_prefix,
parse_value_string_with_literal_key, finish_lithdr_notidx_v};
p->dynamic_table_update_allowed = 0;
p->next_state = and_then;
return parse_string_prefix(p, cur + 1, end);
}
@ -873,6 +881,7 @@ static int parse_lithdr_nvridx(grpc_chttp2_hpack_parser *p,
const gpr_uint8 *cur, const gpr_uint8 *end) {
static const grpc_chttp2_hpack_parser_state and_then[] = {
parse_value_string_with_indexed_key, finish_lithdr_nvridx};
p->dynamic_table_update_allowed = 0;
p->next_state = and_then;
p->index = (*cur) & 0xf;
return parse_string_prefix(p, cur + 1, end);
@ -884,6 +893,7 @@ static int parse_lithdr_nvridx_x(grpc_chttp2_hpack_parser *p,
static const grpc_chttp2_hpack_parser_state and_then[] = {
parse_string_prefix, parse_value_string_with_indexed_key,
finish_lithdr_nvridx};
p->dynamic_table_update_allowed = 0;
p->next_state = and_then;
p->index = 0xf;
p->parsing.value = &p->index;
@ -896,6 +906,7 @@ static int parse_lithdr_nvridx_v(grpc_chttp2_hpack_parser *p,
static const grpc_chttp2_hpack_parser_state and_then[] = {
parse_key_string, parse_string_prefix,
parse_value_string_with_literal_key, finish_lithdr_nvridx_v};
p->dynamic_table_update_allowed = 0;
p->next_state = and_then;
return parse_string_prefix(p, cur + 1, end);
}
@ -911,6 +922,10 @@ static int finish_max_tbl_size(grpc_chttp2_hpack_parser *p,
/* parse a max table size change, max size < 15 */
static int parse_max_tbl_size(grpc_chttp2_hpack_parser *p, const gpr_uint8 *cur,
const gpr_uint8 *end) {
if (p->dynamic_table_update_allowed == 0) {
return 0;
}
p->dynamic_table_update_allowed--;
p->index = (*cur) & 0x1f;
return finish_max_tbl_size(p, cur + 1, end);
}
@ -920,6 +935,10 @@ static int parse_max_tbl_size_x(grpc_chttp2_hpack_parser *p,
const gpr_uint8 *cur, const gpr_uint8 *end) {
static const grpc_chttp2_hpack_parser_state and_then[] = {
finish_max_tbl_size};
if (p->dynamic_table_update_allowed == 0) {
return 0;
}
p->dynamic_table_update_allowed--;
p->next_state = and_then;
p->index = 0x1f;
p->parsing.value = &p->index;
@ -1357,6 +1376,7 @@ void grpc_chttp2_hpack_parser_init(grpc_chttp2_hpack_parser *p) {
p->value.str = NULL;
p->value.capacity = 0;
p->value.length = 0;
p->dynamic_table_update_allowed = 2;
grpc_chttp2_hptbl_init(&p->table);
}
@ -1412,6 +1432,7 @@ grpc_chttp2_parse_error grpc_chttp2_header_parser_parse(
parser->on_header_user_data = NULL;
parser->is_boundary = 0xde;
parser->is_eof = 0xde;
parser->dynamic_table_update_allowed = 2;
}
GPR_TIMER_END("grpc_chttp2_hpack_parser_parse", 0);
return GRPC_CHTTP2_PARSE_OK;

@ -85,6 +85,8 @@ struct grpc_chttp2_hpack_parser {
gpr_uint8 binary;
/* is the current string huffman encoded? */
gpr_uint8 huff;
/* is a dynamic table update allowed? */
gpr_uint8 dynamic_table_update_allowed;
/* set by higher layers, used by grpc_chttp2_header_parser_parse to signal
it should append a metadata boundary at the end of frame */
gpr_uint8 is_boundary;

@ -145,7 +145,9 @@ void tsi_frame_protector_destroy(tsi_frame_protector *self) {
tsi_result tsi_handshaker_get_bytes_to_send_to_peer(tsi_handshaker *self,
unsigned char *bytes,
size_t *bytes_size) {
if (self == NULL) return TSI_INVALID_ARGUMENT;
if (self == NULL || bytes == NULL || bytes_size == NULL) {
return TSI_INVALID_ARGUMENT;
}
if (self->frame_protector_created) return TSI_FAILED_PRECONDITION;
return self->vtable->get_bytes_to_send_to_peer(self, bytes, bytes_size);
}
@ -153,7 +155,9 @@ tsi_result tsi_handshaker_get_bytes_to_send_to_peer(tsi_handshaker *self,
tsi_result tsi_handshaker_process_bytes_from_peer(tsi_handshaker *self,
const unsigned char *bytes,
size_t *bytes_size) {
if (self == NULL) return TSI_INVALID_ARGUMENT;
if (self == NULL || bytes == NULL || bytes_size == NULL) {
return TSI_INVALID_ARGUMENT;
}
if (self->frame_protector_created) return TSI_FAILED_PRECONDITION;
return self->vtable->process_bytes_from_peer(self, bytes, bytes_size);
}

@ -57,9 +57,9 @@ namespace Grpc.Auth
/// <returns>The interceptor.</returns>
public static AsyncAuthInterceptor FromCredential(ITokenAccess credential)
{
return new AsyncAuthInterceptor(async (authUri, metadata) =>
return new AsyncAuthInterceptor(async (context, metadata) =>
{
var accessToken = await credential.GetAccessTokenForRequestAsync(authUri, CancellationToken.None).ConfigureAwait(false);
var accessToken = await credential.GetAccessTokenForRequestAsync(context.ServiceUrl, CancellationToken.None).ConfigureAwait(false);
metadata.Add(CreateBearerTokenHeader(accessToken));
});
}
@ -72,7 +72,7 @@ namespace Grpc.Auth
public static AsyncAuthInterceptor FromAccessToken(string accessToken)
{
Preconditions.CheckNotNull(accessToken);
return new AsyncAuthInterceptor(async (authUri, metadata) =>
return new AsyncAuthInterceptor(async (context, metadata) =>
{
metadata.Add(CreateBearerTokenHeader(accessToken));
});

@ -0,0 +1,84 @@
#region Copyright notice and license
// Copyright 2015, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#endregion
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Grpc.Core.Internal;
using Grpc.Core.Utils;
namespace Grpc.Core
{
/// <summary>
/// Asynchronous authentication interceptor for <see cref="CallCredentials"/>.
/// </summary>
/// <param name="context">The interceptor context.</param>
/// <param name="metadata">Metadata to populate with entries that will be added to outgoing call's headers.</param>
/// <returns></returns>
public delegate Task AsyncAuthInterceptor(AuthInterceptorContext context, Metadata metadata);
/// <summary>
/// Context for an RPC being intercepted by <see cref="AsyncAuthInterceptor"/>.
/// </summary>
public class AuthInterceptorContext
{
readonly string serviceUrl;
readonly string methodName;
/// <summary>
/// Initializes a new instance of <c>AuthInterceptorContext</c>.
/// </summary>
public AuthInterceptorContext(string serviceUrl, string methodName)
{
this.serviceUrl = Preconditions.CheckNotNull(serviceUrl);
this.methodName = Preconditions.CheckNotNull(methodName);
}
/// <summary>
/// The fully qualified service URL for the RPC being called.
/// </summary>
public string ServiceUrl
{
get { return serviceUrl; }
}
/// <summary>
/// The method name of the RPC being called.
/// </summary>
public string MethodName
{
get { return methodName; }
}
}
}

@ -40,14 +40,6 @@ using Grpc.Core.Utils;
namespace Grpc.Core
{
/// <summary>
/// Asynchronous authentication interceptor for <see cref="CallCredentials"/>.
/// </summary>
/// <param name="authUri">URL of a service to which current remote call needs to authenticate</param>
/// <param name="metadata">Metadata to populate with entries that will be added to outgoing call's headers.</param>
/// <returns></returns>
public delegate Task AsyncAuthInterceptor(string authUri, Metadata metadata);
/// <summary>
/// Client-side call credentials. Provide authorization with per-call granularity.
/// </summary>

@ -46,6 +46,7 @@
<ItemGroup>
<Compile Include="AsyncDuplexStreamingCall.cs" />
<Compile Include="AsyncServerStreamingCall.cs" />
<Compile Include="AsyncAuthInterceptor.cs" />
<Compile Include="CallCredentials.cs" />
<Compile Include="IClientStreamWriter.cs" />
<Compile Include="Internal\NativeMetadataCredentialsPlugin.cs" />
@ -148,8 +149,8 @@
<Error Condition="!Exists('..\packages\grpc.dependencies.openssl.redist.1.0.204.1\build\portable-net45+netcore45+wpa81+wp8\grpc.dependencies.openssl.redist.targets')" Text="$([System.String]::Format('$(ErrorText)', '..\packages\grpc.dependencies.openssl.redist.1.0.204.1\build\portable-net45+netcore45+wpa81+wp8\grpc.dependencies.openssl.redist.targets'))" />
<Error Condition="!Exists('..\packages\grpc.dependencies.zlib.redist.1.2.8.10\build\portable-net45+netcore45+wpa81+wp8\grpc.dependencies.zlib.redist.targets')" Text="$([System.String]::Format('$(ErrorText)', '..\packages\grpc.dependencies.zlib.redist.1.2.8.10\build\portable-net45+netcore45+wpa81+wp8\grpc.dependencies.zlib.redist.targets'))" />
</Target>
<ItemGroup />
<ItemGroup />
<Import Project="..\packages\grpc.dependencies.openssl.redist.1.0.204.1\build\portable-net45+netcore45+wpa81+wp8\grpc.dependencies.openssl.redist.targets" Condition="Exists('..\packages\grpc.dependencies.openssl.redist.1.0.204.1\build\portable-net45+netcore45+wpa81+wp8\grpc.dependencies.openssl.redist.targets')" />
<Import Project="..\packages\grpc.dependencies.zlib.redist.1.2.8.10\build\portable-net45+netcore45+wpa81+wp8\grpc.dependencies.zlib.redist.targets" Condition="Exists('..\packages\grpc.dependencies.zlib.redist.1.2.8.10\build\portable-net45+netcore45+wpa81+wp8\grpc.dependencies.zlib.redist.targets')" />
</Project>
<ItemGroup />
<ItemGroup />
</Project>

@ -38,7 +38,7 @@ using Grpc.Core.Utils;
namespace Grpc.Core.Internal
{
internal delegate void NativeMetadataInterceptor(IntPtr statePtr, IntPtr serviceUrlPtr, IntPtr callbackPtr, IntPtr userDataPtr, bool isDestroy);
internal delegate void NativeMetadataInterceptor(IntPtr statePtr, IntPtr serviceUrlPtr, IntPtr methodNamePtr, IntPtr callbackPtr, IntPtr userDataPtr, bool isDestroy);
internal class NativeMetadataCredentialsPlugin
{
@ -71,7 +71,7 @@ namespace Grpc.Core.Internal
get { return credentials; }
}
private void NativeMetadataInterceptorHandler(IntPtr statePtr, IntPtr serviceUrlPtr, IntPtr callbackPtr, IntPtr userDataPtr, bool isDestroy)
private void NativeMetadataInterceptorHandler(IntPtr statePtr, IntPtr serviceUrlPtr, IntPtr methodNamePtr, IntPtr callbackPtr, IntPtr userDataPtr, bool isDestroy)
{
if (isDestroy)
{
@ -81,8 +81,9 @@ namespace Grpc.Core.Internal
try
{
string serviceUrl = Marshal.PtrToStringAnsi(serviceUrlPtr);
StartGetMetadata(serviceUrl, callbackPtr, userDataPtr);
var context = new AuthInterceptorContext(Marshal.PtrToStringAnsi(serviceUrlPtr),
Marshal.PtrToStringAnsi(methodNamePtr));
StartGetMetadata(context, callbackPtr, userDataPtr);
}
catch (Exception e)
{
@ -91,12 +92,12 @@ namespace Grpc.Core.Internal
}
}
private async void StartGetMetadata(string serviceUrl, IntPtr callbackPtr, IntPtr userDataPtr)
private async void StartGetMetadata(AuthInterceptorContext context, IntPtr callbackPtr, IntPtr userDataPtr)
{
try
{
var metadata = new Metadata();
await interceptor(serviceUrl, metadata).ConfigureAwait(false);
await interceptor(context, metadata).ConfigureAwait(false);
using (var metadataArray = MetadataArraySafeHandle.Create(metadata))
{

@ -67,7 +67,7 @@ namespace Grpc.IntegrationTesting
new ChannelOption(ChannelOptions.SslTargetNameOverride, TestCredentials.DefaultHostOverride)
};
var asyncAuthInterceptor = new AsyncAuthInterceptor(async (authUri, metadata) =>
var asyncAuthInterceptor = new AsyncAuthInterceptor(async (context, metadata) =>
{
await Task.Delay(100); // make sure the operation is asynchronous.
metadata.Add("authorization", "SECRET_TOKEN");

@ -927,7 +927,8 @@ GPR_EXPORT void GPR_CALLTYPE grpcsharp_metadata_credentials_notify_from_plugin(
}
typedef void(GPR_CALLTYPE *grpcsharp_metadata_interceptor_func)(
void *state, const char *service_url, grpc_credentials_plugin_metadata_cb cb,
void *state, const char *service_url, const char *method_name,
grpc_credentials_plugin_metadata_cb cb,
void *user_data, gpr_int32 is_destroy);
static void grpcsharp_get_metadata_handler(
@ -935,13 +936,13 @@ static void grpcsharp_get_metadata_handler(
grpc_credentials_plugin_metadata_cb cb, void *user_data) {
grpcsharp_metadata_interceptor_func interceptor =
(grpcsharp_metadata_interceptor_func)(gpr_intptr)state;
interceptor(state, context.service_url, cb, user_data, 0);
interceptor(state, context.service_url, context.method_name, cb, user_data, 0);
}
static void grpcsharp_metadata_credentials_destroy_handler(void *state) {
grpcsharp_metadata_interceptor_func interceptor =
(grpcsharp_metadata_interceptor_func)(gpr_intptr)state;
interceptor(state, NULL, NULL, NULL, 1);
interceptor(state, NULL, NULL, NULL, NULL, 1);
}
GPR_EXPORT grpc_call_credentials *GPR_CALLTYPE grpcsharp_metadata_credentials_create_from_plugin(

@ -234,7 +234,9 @@ class SendMetadataOp : public Op {
class SendMessageOp : public Op {
public:
SendMessageOp() { send_message = NULL; }
SendMessageOp() {
send_message = NULL;
}
~SendMessageOp() {
if (send_message != NULL) {
grpc_byte_buffer_destroy(send_message);
@ -269,7 +271,6 @@ class SendMessageOp : public Op {
std::string GetTypeString() const {
return "send_message";
}
private:
grpc_byte_buffer *send_message;
};

@ -91,7 +91,7 @@ exports.createSsl = ChannelCredentials.createSsl;
*/
exports.createFromMetadataGenerator = function(metadata_generator) {
return CallCredentials.createFromPlugin(function(service_url, callback) {
metadata_generator(service_url, function(error, metadata) {
metadata_generator({service_url: service_url}, function(error, metadata) {
var code = grpc.status.OK;
var message = '';
if (error) {
@ -114,7 +114,8 @@ exports.createFromMetadataGenerator = function(metadata_generator) {
* @return {CallCredentials} The resulting credentials object
*/
exports.createFromGoogleCredential = function(google_credential) {
return exports.createFromMetadataGenerator(function(service_url, callback) {
return exports.createFromMetadataGenerator(function(auth_context, callback) {
var service_url = auth_context.service_url;
google_credential.getRequestMetadata(service_url, function(err, header) {
if (err) {
callback(err);

File diff suppressed because it is too large Load Diff

@ -50,6 +50,8 @@
#import <Foundation/Foundation.h>
#import <RxLibrary/GRXWriter.h>
#include <AvailabilityMacros.h>
#pragma mark gRPC errors
/** Domain of NSError objects produced by gRPC. */
@ -161,6 +163,9 @@ extern id const kGRPCTrailersKey;
#pragma mark GRPCCall
/** Represents a single gRPC remote call. */
@interface GRPCCall : GRXWriter
/**
* The container of the request headers of an RPC conforms to this protocol, which is a subset of
* NSMutableDictionary's interface. It will become a NSMutableDictionary later on.
@ -170,21 +175,6 @@ extern id const kGRPCTrailersKey;
* A header value is a NSString object (with only ASCII characters), unless the header name has the
* suffix "-bin", in which case the value has to be a NSData object.
*/
@protocol GRPCRequestHeaders <NSObject>
@property(nonatomic, readonly) NSUInteger count;
- (id)objectForKeyedSubscript:(NSString *)key;
- (void)setObject:(id)obj forKeyedSubscript:(NSString *)key;
- (void)removeAllObjects;
- (void)removeObjectForKey:(NSString *)key;
@end
/** Represents a single gRPC remote call. */
@interface GRPCCall : GRXWriter
/**
* These HTTP headers will be passed to the server as part of this call. Each HTTP header is a
* name-value pair with string names and either string or binary values.
@ -200,7 +190,7 @@ extern id const kGRPCTrailersKey;
*
* The property is initialized to an empty NSMutableDictionary.
*/
@property(atomic, readonly) id<GRPCRequestHeaders> requestHeaders;
@property(atomic, readonly) NSMutableDictionary *requestHeaders;
/**
* This dictionary is populated with the HTTP headers received from the server. This happens before
@ -243,3 +233,24 @@ extern id const kGRPCTrailersKey;
// TODO(jcanizales): Let specify a deadline. As a category of GRXWriter?
@end
#pragma mark Backwards compatibiity
/** This protocol is kept for backwards compatibility with existing code. */
DEPRECATED_MSG_ATTRIBUTE("Use NSDictionary or NSMutableDictionary instead.")
@protocol GRPCRequestHeaders <NSObject>
@property(nonatomic, readonly) NSUInteger count;
- (id)objectForKeyedSubscript:(NSString *)key;
- (void)setObject:(id)obj forKeyedSubscript:(NSString *)key;
- (void)removeAllObjects;
- (void)removeObjectForKey:(NSString *)key;
@end
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated"
/** This is only needed for backwards-compatibility. */
@interface NSMutableDictionary (GRPCRequestHeaders) <GRPCRequestHeaders>
@end
#pragma clang diagnostic pop

@ -221,7 +221,7 @@ NSString * const kGRPCTrailersKey = @"io.grpc.TrailersKey";
#pragma mark Send headers
- (void)sendHeaders:(id<GRPCRequestHeaders>)headers {
- (void)sendHeaders:(NSDictionary *)headers {
// TODO(jcanizales): Add error handlers for async failures
[_wrappedCall startBatchWithOperations:@[[[GRPCOpSendMetadata alloc] initWithMetadata:headers
handler:nil]]];

@ -32,21 +32,14 @@
*/
#import <Foundation/Foundation.h>
#include <grpc/grpc.h>
#import "../GRPCCall.h"
@interface GRPCRequestHeaders : NSObject<GRPCRequestHeaders>
@property(nonatomic, readonly) NSUInteger count;
@property(nonatomic, readonly) grpc_metadata *grpc_metadataArray;
@interface GRPCRequestHeaders : NSMutableDictionary
- (instancetype)initWithCall:(GRPCCall *)call;
- (id)objectForKeyedSubscript:(NSString *)key;
- (void)setObject:(id)obj forKeyedSubscript:(NSString *)key;
- (void)removeAllObjects;
- (void)removeObjectForKey:(NSString *)key;
- (instancetype)initWithCall:(GRPCCall *)call
storage:(NSMutableDictionary *)storage NS_DESIGNATED_INITIALIZER;
@end

@ -68,17 +68,44 @@ static void CheckKeyValuePairIsValid(NSString *key, id value) {
@implementation GRPCRequestHeaders {
__weak GRPCCall *_call;
// The NSMutableDictionary superclass doesn't hold any storage (so that people can implement their
// own in subclasses). As that's not the reason we're subclassing, we just delegate storage to the
// default NSMutableDictionary subclass returned by the cluster (e.g. __NSDictionaryM on iOS 9).
NSMutableDictionary *_delegate;
}
- (instancetype)init {
return [self initWithCall:nil];
}
- (instancetype)initWithCapacity:(NSUInteger)numItems {
return [self init];
}
- (instancetype)initWithCoder:(NSCoder *)aDecoder {
return [self init];
}
- (instancetype)initWithCall:(GRPCCall *)call {
return [self initWithCall:call storage:[NSMutableDictionary dictionary]];
}
// Designated initializer
- (instancetype)initWithCall:(GRPCCall *)call storage:(NSMutableDictionary *)storage {
// TODO(jcanizales): Throw if call or storage are nil.
if ((self = [super init])) {
_call = call;
_delegate = [NSMutableDictionary dictionary];
_delegate = storage;
}
return self;
}
- (instancetype)initWithObjects:(const id _Nonnull __unsafe_unretained *)objects
forKeys:(const id<NSCopying> _Nonnull __unsafe_unretained *)keys
count:(NSUInteger)cnt {
return [self init];
}
- (void)checkCallIsNotStarted {
if (_call.state != GRXWriterStateNotStarted) {
[NSException raise:@"Invalid modification"
@ -86,11 +113,11 @@ static void CheckKeyValuePairIsValid(NSString *key, id value) {
}
}
- (id)objectForKeyedSubscript:(NSString *)key {
- (id)objectForKey:(NSString *)key {
return _delegate[key.lowercaseString];
}
- (void)setObject:(id)obj forKeyedSubscript:(NSString *)key {
- (void)setObject:(id)obj forKey:(NSString *)key {
[self checkCallIsNotStarted];
CheckIsNonNilASCII(@"Header name", key);
key = key.lowercaseString;
@ -103,16 +130,12 @@ static void CheckKeyValuePairIsValid(NSString *key, id value) {
[_delegate removeObjectForKey:key.lowercaseString];
}
- (void)removeAllObjects {
[self checkCallIsNotStarted];
[_delegate removeAllObjects];
}
- (NSUInteger)count {
return _delegate.count;
}
- (grpc_metadata *)grpc_metadataArray {
return _delegate.grpc_metadataArray;
- (NSEnumerator * _Nonnull)keyEnumerator {
return [_delegate keyEnumerator];
}
@end

@ -45,7 +45,7 @@
@interface GRPCOpSendMetadata : GRPCOperation
- (instancetype)initWithMetadata:(GRPCRequestHeaders *)metadata
- (instancetype)initWithMetadata:(NSDictionary *)metadata
handler:(void(^)())handler NS_DESIGNATED_INITIALIZER;
@end

@ -65,7 +65,7 @@
return [self initWithMetadata:nil handler:nil];
}
- (instancetype)initWithMetadata:(GRPCRequestHeaders *)metadata handler:(void (^)())handler {
- (instancetype)initWithMetadata:(NSDictionary *)metadata handler:(void (^)())handler {
if (self = [super init]) {
_op.op = GRPC_OP_SEND_INITIAL_METADATA;
_op.data.send_initial_metadata.count = metadata.count;

@ -2,6 +2,7 @@ source 'https://github.com/CocoaPods/Specs.git'
platform :ios, '8.0'
pod 'Protobuf', :path => "../../../../third_party/protobuf"
pod 'BoringSSL', :podspec => "../.."
pod 'gRPC', :path => "../../../.."
pod 'RemoteTest', :path => "../RemoteTestClient"

@ -2,6 +2,7 @@ source 'https://github.com/CocoaPods/Specs.git'
platform :ios, '8.0'
pod 'Protobuf', :path => "../../../../third_party/protobuf"
pod 'BoringSSL', :podspec => "../.."
pod 'gRPC', :path => "../../../.."
pod 'RemoteTest', :path => "../RemoteTestClient"

@ -2,6 +2,7 @@ source 'https://github.com/CocoaPods/Specs.git'
platform :ios, '8.0'
pod 'Protobuf', :path => "../../../third_party/protobuf"
pod 'BoringSSL', :podspec => ".."
pod 'gRPC', :path => "../../.."
pod 'RemoteTest', :path => "RemoteTestClient"

@ -5,5 +5,12 @@ dist/
*.egg
*.egg/
*.eggs/
*_pb2.py
.coverage
.coverage.*
.cache/
.tox/
nosetests.xml
doc/
_grpcio_metadata.py
htmlcov/

@ -1,3 +1,4 @@
graft grpc
graft tests
include commands.py
include requirements.txt

@ -29,14 +29,18 @@
"""Provides distutils command classes for the GRPC Python setup process."""
import distutils
import os
import os.path
import re
import subprocess
import sys
import setuptools
from setuptools.command import build_py
from setuptools.command import test
_CONF_PY_ADDENDUM = """
CONF_PY_ADDENDUM = """
extensions.append('sphinx.ext.napoleon')
napoleon_google_docstring = True
napoleon_numpy_docstring = True
@ -48,7 +52,7 @@ html_theme = 'sphinx_rtd_theme'
class SphinxDocumentation(setuptools.Command):
"""Command to generate documentation via sphinx."""
description = ''
description = 'generate sphinx documentation'
user_options = []
def initialize_options(self):
@ -72,14 +76,61 @@ class SphinxDocumentation(setuptools.Command):
'-o', os.path.join('doc', 'src'), src_dir])
conf_filepath = os.path.join('doc', 'src', 'conf.py')
with open(conf_filepath, 'a') as conf_file:
conf_file.write(_CONF_PY_ADDENDUM)
conf_file.write(CONF_PY_ADDENDUM)
sphinx.main(['', os.path.join('doc', 'src'), os.path.join('doc', 'build')])
class BuildProtoModules(setuptools.Command):
"""Command to generate project *_pb2.py modules from proto files."""
description = 'build protobuf modules'
user_options = [
('include=', None, 'path patterns to include in protobuf generation'),
('exclude=', None, 'path patterns to exclude from protobuf generation')
]
def initialize_options(self):
self.exclude = None
self.include = r'.*\.proto$'
self.protoc_command = None
self.grpc_python_plugin_command = None
def finalize_options(self):
self.protoc_command = distutils.spawn.find_executable('protoc')
self.grpc_python_plugin_command = distutils.spawn.find_executable(
'grpc_python_plugin')
def run(self):
include_regex = re.compile(self.include)
exclude_regex = re.compile(self.exclude) if self.exclude else None
paths = []
root_directory = os.getcwd()
for walk_root, directories, filenames in os.walk(root_directory):
for filename in filenames:
path = os.path.join(walk_root, filename)
if include_regex.match(path) and not (
exclude_regex and exclude_regex.match(path)):
paths.append(path)
command = [
self.protoc_command,
'--plugin=protoc-gen-python-grpc={}'.format(
self.grpc_python_plugin_command),
'-I {}'.format(root_directory),
'--python_out={}'.format(root_directory),
'--python-grpc_out={}'.format(root_directory),
] + paths
try:
subprocess.check_output(' '.join(command), cwd=root_directory, shell=True,
stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
raise Exception('Command:\n{}\nMessage:\n{}\nOutput:\n{}'.format(
command, e.message, e.output))
class BuildProjectMetadata(setuptools.Command):
"""Command to generate project metadata in a module."""
description = ''
description = 'build grpcio project metadata files'
user_options = []
def initialize_options(self):
@ -98,5 +149,73 @@ class BuildPy(build_py.build_py):
"""Custom project build command."""
def run(self):
self.run_command('build_proto_modules')
self.run_command('build_project_metadata')
build_py.build_py.run(self)
class Gather(setuptools.Command):
"""Command to gather project dependencies."""
description = 'gather dependencies for grpcio'
user_options = [
('test', 't', 'flag indicating to gather test dependencies'),
('install', 'i', 'flag indicating to gather install dependencies')
]
def initialize_options(self):
self.test = False
self.install = False
def finalize_options(self):
# distutils requires this override.
pass
def run(self):
if self.install and self.distribution.install_requires:
self.distribution.fetch_build_eggs(self.distribution.install_requires)
if self.test and self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
class RunInterop(test.test):
description = 'run interop test client/server'
user_options = [
('args=', 'a', 'pass-thru arguments for the client/server'),
('client', 'c', 'flag indicating to run the client'),
('server', 's', 'flag indicating to run the server')
]
def initialize_options(self):
self.args = ''
self.client = False
self.server = False
def finalize_options(self):
if self.client and self.server:
raise DistutilsOptionError('you may only specify one of client or server')
def run(self):
if self.distribution.install_requires:
self.distribution.fetch_build_eggs(self.distribution.install_requires)
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
if self.client:
self.run_client()
elif self.server:
self.run_server()
def run_server(self):
# We import here to ensure that our setuptools parent has had a chance to
# edit the Python system path.
from tests.interop import server
sys.argv[1:] = self.args.split()
server.serve()
def run_client(self):
# We import here to ensure that our setuptools parent has had a chance to
# edit the Python system path.
from tests.interop import client
sys.argv[1:] = self.args.split()
client.test_interoperability()

@ -44,6 +44,12 @@ _DEFAULT_TIMEOUT = 300
_MAXIMUM_TIMEOUT = 24 * 60 * 60
def _set_event():
event = threading.Event()
event.set()
return event
class _GRPCServicer(base.Servicer):
def __init__(self, delegate):
@ -61,86 +67,143 @@ class _GRPCServicer(base.Servicer):
raise
def _disassemble(grpc_link, end_link, pool, event, grace):
grpc_link.begin_stop()
end_link.stop(grace).wait()
grpc_link.end_stop()
grpc_link.join_link(utilities.NULL_LINK)
end_link.join_link(utilities.NULL_LINK)
if pool is not None:
pool.shutdown(wait=True)
event.set()
class _Server(interfaces.Server):
def __init__(
self, implementations, multi_implementation, pool, pool_size,
default_timeout, maximum_timeout, grpc_link):
self._lock = threading.Lock()
self._implementations = implementations
self._multi_implementation = multi_implementation
self._customer_pool = pool
self._pool_size = pool_size
self._default_timeout = default_timeout
self._maximum_timeout = maximum_timeout
self._grpc_link = grpc_link
class Server(interfaces.Server):
self._end_link = None
self._stop_events = None
self._pool = None
def __init__(self, grpc_link, end_link, pool):
self._grpc_link = grpc_link
self._end_link = end_link
self._pool = pool
def _start(self):
with self._lock:
if self._end_link is not None:
raise ValueError('Cannot start already-started server!')
if self._customer_pool is None:
self._pool = logging_pool.pool(self._pool_size)
assembly_pool = self._pool
else:
assembly_pool = self._customer_pool
servicer = _GRPCServicer(
_crust_implementations.servicer(
self._implementations, self._multi_implementation, assembly_pool))
self._end_link = _core_implementations.service_end_link(
servicer, self._default_timeout, self._maximum_timeout)
self._grpc_link.join_link(self._end_link)
self._end_link.join_link(self._grpc_link)
self._grpc_link.start()
self._end_link.start()
def _dissociate_links_and_shut_down_pool(self):
self._grpc_link.end_stop()
self._grpc_link.join_link(utilities.NULL_LINK)
self._end_link.join_link(utilities.NULL_LINK)
self._end_link = None
if self._pool is not None:
self._pool.shutdown(wait=True)
self._pool = None
def _stop_stopping(self):
self._dissociate_links_and_shut_down_pool()
for stop_event in self._stop_events:
stop_event.set()
self._stop_events = None
def _stop_started(self):
self._grpc_link.begin_stop()
self._end_link.stop(0).wait()
self._dissociate_links_and_shut_down_pool()
def _foreign_thread_stop(self, end_stop_event, stop_events):
end_stop_event.wait()
with self._lock:
if self._stop_events is stop_events:
self._stop_stopping()
def _schedule_stop(self, grace):
with self._lock:
if self._end_link is None:
return _set_event()
server_stop_event = threading.Event()
if self._stop_events is None:
self._stop_events = [server_stop_event]
self._grpc_link.begin_stop()
else:
self._stop_events.append(server_stop_event)
end_stop_event = self._end_link.stop(grace)
end_stop_thread = threading.Thread(
target=self._foreign_thread_stop,
args=(end_stop_event, self._stop_events))
end_stop_thread.start()
return server_stop_event
def _stop_now(self):
with self._lock:
if self._end_link is not None:
if self._stop_events is None:
self._stop_started()
else:
self._stop_stopping()
def add_insecure_port(self, address):
return self._grpc_link.add_port(address, None)
with self._lock:
if self._end_link is None:
return self._grpc_link.add_port(address, None)
else:
raise ValueError('Can\'t add port to serving server!')
def add_secure_port(self, address, server_credentials):
return self._grpc_link.add_port(
address, server_credentials._intermediary_low_credentials) # pylint: disable=protected-access
def _start(self):
self._grpc_link.join_link(self._end_link)
self._end_link.join_link(self._grpc_link)
self._grpc_link.start()
self._end_link.start()
def _stop(self, grace):
stop_event = threading.Event()
if 0 < grace:
disassembly_thread = threading.Thread(
target=_disassemble,
args=(
self._grpc_link, self._end_link, self._pool, stop_event, grace,))
disassembly_thread.start()
return stop_event
else:
_disassemble(self._grpc_link, self._end_link, self._pool, stop_event, 0)
return stop_event
with self._lock:
if self._end_link is None:
return self._grpc_link.add_port(
address, server_credentials._intermediary_low_credentials) # pylint: disable=protected-access
else:
raise ValueError('Can\'t add port to serving server!')
def start(self):
self._start()
def stop(self, grace):
return self._stop(grace)
if 0 < grace:
return self._schedule_stop(grace)
else:
self._stop_now()
return _set_event()
def __enter__(self):
self._start()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._stop(0).wait()
self._stop_now()
return False
def __del__(self):
self._stop_now()
def server(
implementations, multi_implementation, request_deserializers,
response_serializers, thread_pool, thread_pool_size, default_timeout,
maximum_timeout):
if thread_pool is None:
service_thread_pool = logging_pool.pool(
_DEFAULT_POOL_SIZE if thread_pool_size is None else thread_pool_size)
assembly_thread_pool = service_thread_pool
else:
service_thread_pool = thread_pool
assembly_thread_pool = None
servicer = _GRPCServicer(
_crust_implementations.servicer(
implementations, multi_implementation, service_thread_pool))
grpc_link = service.service_link(request_deserializers, response_serializers)
end_link = _core_implementations.service_end_link(
servicer,
return _Server(
implementations, multi_implementation, thread_pool,
_DEFAULT_POOL_SIZE if thread_pool_size is None else thread_pool_size,
_DEFAULT_TIMEOUT if default_timeout is None else default_timeout,
_MAXIMUM_TIMEOUT if maximum_timeout is None else maximum_timeout)
return Server(grpc_link, end_link, assembly_thread_pool)
_MAXIMUM_TIMEOUT if maximum_timeout is None else maximum_timeout,
grpc_link)

@ -42,76 +42,114 @@ _DEFAULT_POOL_SIZE = 6
class _AutoIntermediary(object):
def __init__(self, delegate, on_deletion):
def __init__(self, up, down, delegate):
self._lock = threading.Lock()
self._up = up
self._down = down
self._in_context = False
self._delegate = delegate
self._on_deletion = on_deletion
def __getattr__(self, attr):
return getattr(self._delegate, attr)
with self._lock:
if self._delegate is None:
raise AttributeError('No useful attributes out of context!')
else:
return getattr(self._delegate, attr)
def __enter__(self):
return self
with self._lock:
if self._in_context:
raise ValueError('Already in context!')
elif self._delegate is None:
self._delegate = self._up()
self._in_context = True
return self
def __exit__(self, exc_type, exc_val, exc_tb):
return False
with self._lock:
if not self._in_context:
raise ValueError('Not in context!')
self._down()
self._in_context = False
self._delegate = None
return False
def __del__(self):
self._on_deletion()
with self._lock:
if self._delegate is not None:
self._down()
self._delegate = None
class _StubAssemblyManager(object):
def __init__(
self, thread_pool, thread_pool_size, end_link, grpc_link, stub_creator):
self._thread_pool = thread_pool
self._pool_size = thread_pool_size
self._end_link = end_link
self._grpc_link = grpc_link
self._stub_creator = stub_creator
self._own_pool = None
def up(self):
if self._thread_pool is None:
self._own_pool = logging_pool.pool(
_DEFAULT_POOL_SIZE if self._pool_size is None else self._pool_size)
assembly_pool = self._own_pool
else:
assembly_pool = self._thread_pool
self._end_link.join_link(self._grpc_link)
self._grpc_link.join_link(self._end_link)
self._end_link.start()
self._grpc_link.start()
return self._stub_creator(self._end_link, assembly_pool)
def down(self):
self._end_link.stop(0).wait()
self._grpc_link.stop()
self._end_link.join_link(utilities.NULL_LINK)
self._grpc_link.join_link(utilities.NULL_LINK)
if self._own_pool is not None:
self._own_pool.shutdown(wait=True)
self._own_pool = None
def _assemble(
channel, host, metadata_transformer, request_serializers,
response_deserializers, thread_pool, thread_pool_size):
response_deserializers, thread_pool, thread_pool_size, stub_creator):
end_link = _core_implementations.invocation_end_link()
grpc_link = invocation.invocation_link(
channel, host, metadata_transformer, request_serializers,
response_deserializers)
if thread_pool is None:
invocation_pool = logging_pool.pool(
_DEFAULT_POOL_SIZE if thread_pool_size is None else thread_pool_size)
assembly_pool = invocation_pool
else:
invocation_pool = thread_pool
assembly_pool = None
end_link.join_link(grpc_link)
grpc_link.join_link(end_link)
end_link.start()
grpc_link.start()
return end_link, grpc_link, invocation_pool, assembly_pool
def _disassemble(end_link, grpc_link, pool):
end_link.stop(24 * 60 * 60).wait()
grpc_link.stop()
end_link.join_link(utilities.NULL_LINK)
grpc_link.join_link(utilities.NULL_LINK)
if pool is not None:
pool.shutdown(wait=True)
def _wrap_assembly(stub, end_link, grpc_link, assembly_pool):
disassembly_thread = threading.Thread(
target=_disassemble, args=(end_link, grpc_link, assembly_pool))
return _AutoIntermediary(stub, disassembly_thread.start)
stub_assembly_manager = _StubAssemblyManager(
thread_pool, thread_pool_size, end_link, grpc_link, stub_creator)
stub = stub_assembly_manager.up()
return _AutoIntermediary(
stub_assembly_manager.up, stub_assembly_manager.down, stub)
def _dynamic_stub_creator(service, cardinalities):
def create_dynamic_stub(end_link, invocation_pool):
return _crust_implementations.dynamic_stub(
end_link, service, cardinalities, invocation_pool)
return create_dynamic_stub
def generic_stub(
channel, host, metadata_transformer, request_serializers,
response_deserializers, thread_pool, thread_pool_size):
end_link, grpc_link, invocation_pool, assembly_pool = _assemble(
return _assemble(
channel, host, metadata_transformer, request_serializers,
response_deserializers, thread_pool, thread_pool_size)
stub = _crust_implementations.generic_stub(end_link, invocation_pool)
return _wrap_assembly(stub, end_link, grpc_link, assembly_pool)
response_deserializers, thread_pool, thread_pool_size,
_crust_implementations.generic_stub)
def dynamic_stub(
channel, host, service, cardinalities, metadata_transformer,
request_serializers, response_deserializers, thread_pool,
thread_pool_size):
end_link, grpc_link, invocation_pool, assembly_pool = _assemble(
return _assemble(
channel, host, metadata_transformer, request_serializers,
response_deserializers, thread_pool, thread_pool_size)
stub = _crust_implementations.dynamic_stub(
end_link, service, cardinalities, invocation_pool)
return _wrap_assembly(stub, end_link, grpc_link, assembly_pool)
response_deserializers, thread_pool, thread_pool_size,
_dynamic_stub_creator(service, cardinalities))

@ -85,35 +85,6 @@ def _future_shutdown(lock, cycle, event):
return in_future
def _termination_action(lock, stats, operation_id, cycle):
"""Constructs the termination action for a single operation.
Args:
lock: A lock to hold during the termination action.
stats: A mapping from base.Outcome.Kind values to integers to increment
with the outcome kind given to the termination action.
operation_id: The operation ID for the termination action.
cycle: A _Cycle value to be updated during the termination action.
Returns:
A callable that takes an operation outcome kind as its sole parameter and
that should be used as the termination action for the operation
associated with the given operation ID.
"""
def termination_action(outcome_kind):
with lock:
stats[outcome_kind] += 1
cycle.operations.pop(operation_id, None)
if not cycle.operations:
for action in cycle.idle_actions:
cycle.pool.submit(action)
cycle.idle_actions = []
if cycle.grace:
_cancel_futures(cycle.futures)
cycle.pool.shutdown(wait=False)
return termination_action
class _End(End):
"""An End implementation."""
@ -133,6 +104,31 @@ class _End(End):
self._cycle = None
def _termination_action(self, operation_id):
"""Constructs the termination action for a single operation.
Args:
operation_id: The operation ID for the termination action.
Returns:
A callable that takes an operation outcome kind as its sole parameter and
that should be used as the termination action for the operation
associated with the given operation ID.
"""
def termination_action(outcome_kind):
with self._lock:
self._stats[outcome_kind] += 1
self._cycle.operations.pop(operation_id, None)
if not self._cycle.operations:
for action in self._cycle.idle_actions:
self._cycle.pool.submit(action)
self._cycle.idle_actions = []
if self._cycle.grace:
_cancel_futures(self._cycle.futures)
self._cycle.pool.shutdown(wait=False)
self._cycle = None
return termination_action
def start(self):
"""See base.End.start for specification."""
with self._lock:
@ -174,8 +170,7 @@ class _End(End):
with self._lock:
if self._cycle is None or self._cycle.grace:
raise ValueError('Can\'t operate on stopped or stopping End!')
termination_action = _termination_action(
self._lock, self._stats, operation_id, self._cycle)
termination_action = self._termination_action(operation_id)
operation = _operation.invocation_operate(
operation_id, group, method, subscription, timeout, protocol_options,
initial_metadata, payload, completion, self._mate.accept_ticket,
@ -208,8 +203,7 @@ class _End(End):
if operation is not None:
operation.handle_ticket(ticket)
elif self._servicer_package is not None and not self._cycle.grace:
termination_action = _termination_action(
self._lock, self._stats, ticket.operation_id, self._cycle)
termination_action = self._termination_action(ticket.operation_id)
operation = _operation.service_operate(
self._servicer_package, ticket, self._mate.accept_ticket,
termination_action, self._cycle.pool)

@ -1,3 +1,4 @@
enum34>=1.0.4
futures>=2.2.0
cython>=0.23
coverage>=4.0

@ -1,2 +1,8 @@
[coverage:run]
plugins = Cython.Coverage
[build_ext]
inplace=1
[build_proto_modules]
exclude=.*protoc_plugin/protoc_plugin_test\.proto$

@ -43,12 +43,21 @@ os.chdir(os.path.dirname(os.path.abspath(__file__)))
# Break import-style to ensure we can actually find our commands module.
import commands
# Use environment variables to determine whether or not the Cython extension
# should *use* Cython or use the generated C files. Note that this requires the
# C files to have been generated by building first *with* Cython support.
_BUILD_WITH_CYTHON = os.environ.get('GRPC_PYTHON_BUILD_WITH_CYTHON', False)
# Environment variable to determine whether or not the Cython extension should
# *use* Cython or use the generated C files. Note that this requires the C files
# to have been generated by building first *with* Cython support.
BUILD_WITH_CYTHON = os.environ.get('GRPC_PYTHON_BUILD_WITH_CYTHON', False)
_C_EXTENSION_SOURCES = (
# Environment variable to determine whether or not to enable coverage analysis
# in Cython modules.
ENABLE_CYTHON_TRACING = os.environ.get(
'GRPC_PYTHON_ENABLE_CYTHON_TRACING', False)
# Environment variable to determine whether or not to include the test files in
# the installation.
INSTALL_TESTS = os.environ.get('GRPC_PYTHON_INSTALL_TESTS', False)
C_EXTENSION_SOURCES = (
'grpc/_adapter/_c/module.c',
'grpc/_adapter/_c/types.c',
'grpc/_adapter/_c/utility.c',
@ -61,9 +70,9 @@ _C_EXTENSION_SOURCES = (
'grpc/_adapter/_c/types/server.c',
)
_CYTHON_EXTENSION_PACKAGE_NAMES = ()
CYTHON_EXTENSION_PACKAGE_NAMES = ()
_CYTHON_EXTENSION_MODULE_NAMES = (
CYTHON_EXTENSION_MODULE_NAMES = (
'grpc._cython.cygrpc',
'grpc._cython._cygrpc.call',
'grpc._cython._cygrpc.channel',
@ -73,24 +82,24 @@ _CYTHON_EXTENSION_MODULE_NAMES = (
'grpc._cython._cygrpc.server',
)
_EXTENSION_INCLUDE_DIRECTORIES = (
EXTENSION_INCLUDE_DIRECTORIES = (
'.',
)
_EXTENSION_LIBRARIES = (
EXTENSION_LIBRARIES = (
'grpc',
'gpr',
)
if not "darwin" in sys.platform:
_EXTENSION_LIBRARIES += ('rt',)
EXTENSION_LIBRARIES += ('rt',)
_C_EXTENSION_MODULE = _core.Extension(
'grpc._adapter._c', sources=list(_C_EXTENSION_SOURCES),
include_dirs=list(_EXTENSION_INCLUDE_DIRECTORIES),
libraries=list(_EXTENSION_LIBRARIES),
C_EXTENSION_MODULE = _core.Extension(
'grpc._adapter._c', sources=list(C_EXTENSION_SOURCES),
include_dirs=list(EXTENSION_INCLUDE_DIRECTORIES),
libraries=list(EXTENSION_LIBRARIES)
)
_EXTENSION_MODULES = [_C_EXTENSION_MODULE]
EXTENSION_MODULES = [C_EXTENSION_MODULE]
def cython_extensions(package_names, module_names, include_dirs, libraries,
@ -101,48 +110,89 @@ def cython_extensions(package_names, module_names, include_dirs, libraries,
extensions = [
_extension.Extension(
name=module_name, sources=[module_file],
include_dirs=include_dirs, libraries=libraries
include_dirs=include_dirs, libraries=libraries,
define_macros=[('CYTHON_TRACE_NOGIL', 1)] if ENABLE_CYTHON_TRACING else []
) for (module_name, module_file) in zip(module_names, module_files)
]
if build_with_cython:
import Cython.Build
return Cython.Build.cythonize(extensions)
return Cython.Build.cythonize(
extensions,
compiler_directives={'linetrace': bool(ENABLE_CYTHON_TRACING)})
else:
return extensions
_CYTHON_EXTENSION_MODULES = cython_extensions(
list(_CYTHON_EXTENSION_PACKAGE_NAMES), list(_CYTHON_EXTENSION_MODULE_NAMES),
list(_EXTENSION_INCLUDE_DIRECTORIES), list(_EXTENSION_LIBRARIES),
bool(_BUILD_WITH_CYTHON))
CYTHON_EXTENSION_MODULES = cython_extensions(
list(CYTHON_EXTENSION_PACKAGE_NAMES), list(CYTHON_EXTENSION_MODULE_NAMES),
list(EXTENSION_INCLUDE_DIRECTORIES), list(EXTENSION_LIBRARIES),
bool(BUILD_WITH_CYTHON))
_PACKAGES = setuptools.find_packages('.')
_PACKAGE_DIRECTORIES = {
PACKAGE_DIRECTORIES = {
'': '.',
}
_INSTALL_REQUIRES = (
INSTALL_REQUIRES = (
'enum34>=1.0.4',
'futures>=2.2.0',
)
_SETUP_REQUIRES = (
SETUP_REQUIRES = (
'sphinx>=1.3',
) + _INSTALL_REQUIRES
) + INSTALL_REQUIRES
_COMMAND_CLASS = {
COMMAND_CLASS = {
'doc': commands.SphinxDocumentation,
'build_proto_modules': commands.BuildProtoModules,
'build_project_metadata': commands.BuildProjectMetadata,
'build_py': commands.BuildPy,
'gather': commands.Gather,
'run_interop': commands.RunInterop,
}
TEST_PACKAGE_DATA = {
'tests.interop': [
'credentials/ca.pem',
'credentials/server1.key',
'credentials/server1.pem',
],
'tests.protoc_plugin': [
'protoc_plugin_test.proto',
],
'tests.unit': [
'credentials/ca.pem',
'credentials/server1.key',
'credentials/server1.pem',
],
}
TESTS_REQUIRE = (
'oauth2client>=1.4.7',
'protobuf==3.0.0a3',
'coverage>=4.0',
) + INSTALL_REQUIRES
TEST_SUITE = 'tests'
TEST_LOADER = 'tests:Loader'
TEST_RUNNER = 'tests:Runner'
PACKAGE_DATA = {}
if INSTALL_TESTS:
PACKAGE_DATA = dict(PACKAGE_DATA, **TEST_PACKAGE_DATA)
PACKAGES = setuptools.find_packages('.')
else:
PACKAGES = setuptools.find_packages('.', exclude=['tests', 'tests.*'])
setuptools.setup(
name='grpcio',
version='0.11.0b1',
ext_modules=_EXTENSION_MODULES + _CYTHON_EXTENSION_MODULES,
packages=list(_PACKAGES),
package_dir=_PACKAGE_DIRECTORIES,
install_requires=_INSTALL_REQUIRES,
setup_requires=_SETUP_REQUIRES,
cmdclass=_COMMAND_CLASS
version='0.11.0b2',
ext_modules=EXTENSION_MODULES + CYTHON_EXTENSION_MODULES,
packages=list(PACKAGES),
package_dir=PACKAGE_DIRECTORIES,
install_requires=INSTALL_REQUIRES,
setup_requires=SETUP_REQUIRES,
cmdclass=COMMAND_CLASS,
tests_require=TESTS_REQUIRE,
test_suite=TEST_SUITE,
test_loader=TEST_LOADER,
test_runner=TEST_RUNNER,
)

@ -27,68 +27,8 @@
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A setup module for the GRPC Python interop testing package."""
from tests import _loader
from tests import _runner
import os
import os.path
import setuptools
# Ensure we're in the proper directory whether or not we're being used by pip.
os.chdir(os.path.dirname(os.path.abspath(__file__)))
# Break import-style to ensure we can actually find our commands module.
import commands
_PACKAGES = setuptools.find_packages('.')
_PACKAGE_DIRECTORIES = {
'': '.',
}
_PACKAGE_DATA = {
'grpc_interop': [
'credentials/ca.pem',
'credentials/server1.key',
'credentials/server1.pem',
],
'grpc_protoc_plugin': [
'test.proto',
],
'grpc_test': [
'credentials/ca.pem',
'credentials/server1.key',
'credentials/server1.pem',
],
}
_SETUP_REQUIRES = (
'pytest>=2.6',
'pytest-cov>=2.0',
'pytest-xdist>=1.11',
'pytest-timeout>=0.5',
)
_INSTALL_REQUIRES = (
'oauth2client>=1.4.7',
'grpcio>=0.11.0b0',
# TODO(issue 3321): Unpin protobuf dependency.
'protobuf==3.0.0a3',
)
_COMMAND_CLASS = {
'test': commands.RunTests,
'build_proto_modules': commands.BuildProtoModules,
'build_py': commands.BuildPy,
}
setuptools.setup(
name='grpcio_test',
version='0.11.0b0',
packages=_PACKAGES,
package_dir=_PACKAGE_DIRECTORIES,
package_data=_PACKAGE_DATA,
install_requires=_INSTALL_REQUIRES + _SETUP_REQUIRES,
setup_requires=_SETUP_REQUIRES,
cmdclass=_COMMAND_CLASS,
)
Loader = _loader.Loader
Runner = _runner.Runner

@ -0,0 +1,119 @@
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import importlib
import pkgutil
import re
import unittest
import coverage
TEST_MODULE_REGEX = r'^.*_test$'
class Loader(object):
"""Test loader for setuptools test suite support.
Attributes:
suite (unittest.TestSuite): All tests collected by the loader.
loader (unittest.TestLoader): Standard Python unittest loader to be ran per
module discovered.
module_matcher (re.RegexObject): A regular expression object to match
against module names and determine whether or not the discovered module
contributes to the test suite.
"""
def __init__(self):
self.suite = unittest.TestSuite()
self.loader = unittest.TestLoader()
self.module_matcher = re.compile(TEST_MODULE_REGEX)
def loadTestsFromNames(self, names, module=None):
"""Function mirroring TestLoader::loadTestsFromNames, as expected by
setuptools.setup argument `test_loader`."""
# ensure that we capture decorators and definitions (else our coverage
# measure unnecessarily suffers)
coverage_context = coverage.Coverage(data_suffix=True)
coverage_context.start()
modules = [importlib.import_module(name) for name in names]
for module in modules:
self.visit_module(module)
for module in modules:
try:
package_paths = module.__path__
except:
continue
self.walk_packages(package_paths)
coverage_context.stop()
coverage_context.save()
return self.suite
def walk_packages(self, package_paths):
"""Walks over the packages, dispatching `visit_module` calls.
Args:
package_paths (list): A list of paths over which to walk through modules
along.
"""
for importer, module_name, is_package in (
pkgutil.iter_modules(package_paths)):
module = importer.find_module(module_name).load_module(module_name)
self.visit_module(module)
if is_package:
self.walk_packages(module.__path__)
def visit_module(self, module):
"""Visits the module, adding discovered tests to the test suite.
Args:
module (module): Module to match against self.module_matcher; if matched
it has its tests loaded via self.loader into self.suite.
"""
if self.module_matcher.match(module.__name__):
module_suite = self.loader.loadTestsFromModule(module)
self.suite.addTest(module_suite)
def iterate_suite_cases(suite):
"""Generator over all unittest.TestCases in a unittest.TestSuite.
Args:
suite (unittest.TestSuite): Suite to iterate over in the generator.
Returns:
generator: A generator over all unittest.TestCases in `suite`.
"""
for item in suite:
if isinstance(item, unittest.TestSuite):
for child_item in iterate_suite_cases(item):
yield child_item
elif isinstance(item, unittest.TestCase):
yield item
else:
raise ValueError('unexpected suite item of type {}'.format(type(item)))

@ -0,0 +1,451 @@
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import cStringIO as StringIO
import collections
import itertools
import traceback
import unittest
from xml.etree import ElementTree
import coverage
from tests import _loader
class CaseResult(collections.namedtuple('CaseResult', [
'id', 'name', 'kind', 'stdout', 'stderr', 'skip_reason', 'traceback'])):
"""A serializable result of a single test case.
Attributes:
id (object): Any serializable object used to denote the identity of this
test case.
name (str or None): A human-readable name of the test case.
kind (CaseResult.Kind): The kind of test result.
stdout (object or None): Output on stdout, or None if nothing was captured.
stderr (object or None): Output on stderr, or None if nothing was captured.
skip_reason (object or None): The reason the test was skipped. Must be
something if self.kind is CaseResult.Kind.SKIP, else None.
traceback (object or None): The traceback of the test. Must be something if
self.kind is CaseResult.Kind.{ERROR, FAILURE, EXPECTED_FAILURE}, else
None.
"""
class Kind:
UNTESTED = 'untested'
RUNNING = 'running'
ERROR = 'error'
FAILURE = 'failure'
SUCCESS = 'success'
SKIP = 'skip'
EXPECTED_FAILURE = 'expected failure'
UNEXPECTED_SUCCESS = 'unexpected success'
def __new__(cls, id=None, name=None, kind=None, stdout=None, stderr=None,
skip_reason=None, traceback=None):
"""Helper keyword constructor for the namedtuple.
See this class' attributes for information on the arguments."""
assert id is not None
assert name is None or isinstance(name, str)
if kind is CaseResult.Kind.UNTESTED:
pass
elif kind is CaseResult.Kind.RUNNING:
pass
elif kind is CaseResult.Kind.ERROR:
assert traceback is not None
elif kind is CaseResult.Kind.FAILURE:
assert traceback is not None
elif kind is CaseResult.Kind.SUCCESS:
pass
elif kind is CaseResult.Kind.SKIP:
assert skip_reason is not None
elif kind is CaseResult.Kind.EXPECTED_FAILURE:
assert traceback is not None
elif kind is CaseResult.Kind.UNEXPECTED_SUCCESS:
pass
else:
assert False
return super(cls, CaseResult).__new__(
cls, id, name, kind, stdout, stderr, skip_reason, traceback)
def updated(self, name=None, kind=None, stdout=None, stderr=None,
skip_reason=None, traceback=None):
"""Get a new validated CaseResult with the fields updated.
See this class' attributes for information on the arguments."""
name = self.name if name is None else name
kind = self.kind if kind is None else kind
stdout = self.stdout if stdout is None else stdout
stderr = self.stderr if stderr is None else stderr
skip_reason = self.skip_reason if skip_reason is None else skip_reason
traceback = self.traceback if traceback is None else traceback
return CaseResult(id=self.id, name=name, kind=kind, stdout=stdout,
stderr=stderr, skip_reason=skip_reason,
traceback=traceback)
class AugmentedResult(unittest.TestResult):
"""unittest.Result that keeps track of additional information.
Uses CaseResult objects to store test-case results, providing additional
information beyond that of the standard Python unittest library, such as
standard output.
Attributes:
id_map (callable): A unary callable mapping unittest.TestCase objects to
unique identifiers.
cases (dict): A dictionary mapping from the identifiers returned by id_map
to CaseResult objects corresponding to those IDs.
"""
def __init__(self, id_map):
"""Initialize the object with an identifier mapping.
Arguments:
id_map (callable): Corresponds to the attribute `id_map`."""
super(AugmentedResult, self).__init__()
self.id_map = id_map
self.cases = None
def startTestRun(self):
"""See unittest.TestResult.startTestRun."""
super(AugmentedResult, self).startTestRun()
self.cases = dict()
def stopTestRun(self):
"""See unittest.TestResult.stopTestRun."""
super(AugmentedResult, self).stopTestRun()
def startTest(self, test):
"""See unittest.TestResult.startTest."""
super(AugmentedResult, self).startTest(test)
case_id = self.id_map(test)
self.cases[case_id] = CaseResult(
id=case_id, name=test.id(), kind=CaseResult.Kind.RUNNING)
def addError(self, test, error):
"""See unittest.TestResult.addError."""
super(AugmentedResult, self).addError(test, error)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.ERROR, traceback=error)
def addFailure(self, test, error):
"""See unittest.TestResult.addFailure."""
super(AugmentedResult, self).addFailure(test, error)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.FAILURE, traceback=error)
def addSuccess(self, test):
"""See unittest.TestResult.addSuccess."""
super(AugmentedResult, self).addSuccess(test)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.SUCCESS)
def addSkip(self, test, reason):
"""See unittest.TestResult.addSkip."""
super(AugmentedResult, self).addSkip(test, reason)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.SKIP, skip_reason=reason)
def addExpectedFailure(self, test, error):
"""See unittest.TestResult.addExpectedFailure."""
super(AugmentedResult, self).addExpectedFailure(test, error)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.EXPECTED_FAILURE, traceback=error)
def addUnexpectedSuccess(self, test):
"""See unittest.TestResult.addUnexpectedSuccess."""
super(AugmentedResult, self).addUnexpectedSuccess(test)
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
kind=CaseResult.Kind.UNEXPECTED_SUCCESS)
def set_output(self, test, stdout, stderr):
"""Set the output attributes for the CaseResult corresponding to a test.
Args:
test (unittest.TestCase): The TestCase to set the outputs of.
stdout (str): Output from stdout to assign to self.id_map(test).
stderr (str): Output from stderr to assign to self.id_map(test).
"""
case_id = self.id_map(test)
self.cases[case_id] = self.cases[case_id].updated(
stdout=stdout, stderr=stderr)
def augmented_results(self, filter):
"""Convenience method to retrieve filtered case results.
Args:
filter (callable): A unary predicate to filter over CaseResult objects.
"""
return (self.cases[case_id] for case_id in self.cases
if filter(self.cases[case_id]))
class CoverageResult(AugmentedResult):
"""Extension to AugmentedResult adding coverage.py support per test.\
Attributes:
coverage_context (coverage.Coverage): coverage.py management object.
"""
def __init__(self, id_map):
"""See AugmentedResult.__init__."""
super(CoverageResult, self).__init__(id_map=id_map)
self.coverage_context = None
def startTest(self, test):
"""See unittest.TestResult.startTest.
Additionally initializes and begins code coverage tracking."""
super(CoverageResult, self).startTest(test)
self.coverage_context = coverage.Coverage(data_suffix=True)
self.coverage_context.start()
def stopTest(self, test):
"""See unittest.TestResult.stopTest.
Additionally stops and deinitializes code coverage tracking."""
super(CoverageResult, self).stopTest(test)
self.coverage_context.stop()
self.coverage_context.save()
self.coverage_context = None
def stopTestRun(self):
"""See unittest.TestResult.stopTestRun."""
super(CoverageResult, self).stopTestRun()
# TODO(atash): Dig deeper into why the following line fails to properly
# combine coverage data from the Cython plugin.
#coverage.Coverage().combine()
class _Colors:
"""Namespaced constants for terminal color magic numbers."""
HEADER = '\033[95m'
INFO = '\033[94m'
OK = '\033[92m'
WARN = '\033[93m'
FAIL = '\033[91m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
END = '\033[0m'
class TerminalResult(CoverageResult):
"""Extension to CoverageResult adding basic terminal reporting."""
def __init__(self, out, id_map):
"""Initialize the result object.
Args:
out (file-like): Output file to which terminal-colored live results will
be written.
id_map (callable): See AugmentedResult.__init__.
"""
super(TerminalResult, self).__init__(id_map=id_map)
self.out = out
def startTestRun(self):
"""See unittest.TestResult.startTestRun."""
super(TerminalResult, self).startTestRun()
self.out.write(
_Colors.HEADER +
'Testing gRPC Python...\n' +
_Colors.END)
def stopTestRun(self):
"""See unittest.TestResult.stopTestRun."""
super(TerminalResult, self).stopTestRun()
self.out.write(summary(self))
self.out.flush()
def addError(self, test, error):
"""See unittest.TestResult.addError."""
super(TerminalResult, self).addError(test, error)
self.out.write(
_Colors.FAIL +
'ERROR {}\n'.format(test.id()) +
_Colors.END)
self.out.flush()
def addFailure(self, test, error):
"""See unittest.TestResult.addFailure."""
super(TerminalResult, self).addFailure(test, error)
self.out.write(
_Colors.FAIL +
'FAILURE {}\n'.format(test.id()) +
_Colors.END)
self.out.flush()
def addSuccess(self, test):
"""See unittest.TestResult.addSuccess."""
super(TerminalResult, self).addSuccess(test)
self.out.write(
_Colors.OK +
'SUCCESS {}\n'.format(test.id()) +
_Colors.END)
self.out.flush()
def addSkip(self, test, reason):
"""See unittest.TestResult.addSkip."""
super(TerminalResult, self).addSkip(test, reason)
self.out.write(
_Colors.INFO +
'SKIP {}\n'.format(test.id()) +
_Colors.END)
self.out.flush()
def addExpectedFailure(self, test, error):
"""See unittest.TestResult.addExpectedFailure."""
super(TerminalResult, self).addExpectedFailure(test, error)
self.out.write(
_Colors.INFO +
'FAILURE_OK {}\n'.format(test.id()) +
_Colors.END)
self.out.flush()
def addUnexpectedSuccess(self, test):
"""See unittest.TestResult.addUnexpectedSuccess."""
super(TerminalResult, self).addUnexpectedSuccess(test)
self.out.write(
_Colors.INFO +
'UNEXPECTED_OK {}\n'.format(test.id()) +
_Colors.END)
self.out.flush()
def _traceback_string(type, value, trace):
"""Generate a descriptive string of a Python exception traceback.
Args:
type (class): The type of the exception.
value (Exception): The value of the exception.
trace (traceback): Traceback of the exception.
Returns:
str: Formatted exception descriptive string.
"""
buffer = StringIO.StringIO()
traceback.print_exception(type, value, trace, file=buffer)
return buffer.getvalue()
def summary(result):
"""A summary string of a result object.
Args:
result (AugmentedResult): The result object to get the summary of.
Returns:
str: The summary string.
"""
assert isinstance(result, AugmentedResult)
untested = list(result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.UNTESTED))
running = list(result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.RUNNING))
failures = list(result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.FAILURE))
errors = list(result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.ERROR))
successes = list(result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.SUCCESS))
skips = list(result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.SKIP))
expected_failures = list(result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.EXPECTED_FAILURE))
unexpected_successes = list(result.augmented_results(
lambda case_result: case_result.kind is CaseResult.Kind.UNEXPECTED_SUCCESS))
running_names = [case.name for case in running]
finished_count = (len(failures) + len(errors) + len(successes) +
len(expected_failures) + len(unexpected_successes))
statistics = (
'{finished} tests finished:\n'
'\t{successful} successful\n'
'\t{unsuccessful} unsuccessful\n'
'\t{skipped} skipped\n'
'\t{expected_fail} expected failures\n'
'\t{unexpected_successful} unexpected successes\n'
'Interrupted Tests:\n'
'\t{interrupted}\n'
.format(finished=finished_count,
successful=len(successes),
unsuccessful=(len(failures)+len(errors)),
skipped=len(skips),
expected_fail=len(expected_failures),
unexpected_successful=len(unexpected_successes),
interrupted=str(running_names)))
tracebacks = '\n\n'.join([
(_Colors.FAIL + '{test_name}' + _Colors.END + + '\n' +
_Colors.BOLD + 'traceback:' + _Colors.END + '\n' +
'{traceback}\n' +
_Colors.BOLD + 'stdout:' + _Colors.END + '\n' +
'{stdout}\n' +
_Colors.BOLD + 'stderr:' + _Colors.END + '\n' +
'{stderr}\n').format(
test_name=result.name,
traceback=_traceback_string(*result.traceback),
stdout=result.stdout, stderr=result.stderr)
for result in itertools.chain(failures, errors)
])
notes = 'Unexpected successes: {}\n'.format([
result.name for result in unexpected_successes])
return statistics + '\nErrors/Failures: \n' + tracebacks + '\n' + notes
def jenkins_junit_xml(result):
"""An XML tree object that when written is recognizable by Jenkins.
Args:
result (AugmentedResult): The result object to get the junit xml output of.
Returns:
ElementTree.ElementTree: The XML tree.
"""
assert isinstance(result, AugmentedResult)
root = ElementTree.Element('testsuites')
suite = ElementTree.SubElement(root, 'testsuite', {
'name': 'Python gRPC tests',
})
for case in result.cases.values():
if case.kind is CaseResult.Kind.SUCCESS:
ElementTree.SubElement(suite, 'testcase', {
'name': case.name,
})
elif case.kind in (CaseResult.Kind.ERROR, CaseResult.Kind.FAILURE):
case_xml = ElementTree.SubElement(suite, 'testcase', {
'name': case.name,
})
error_xml = ElementTree.SubElement(case_xml, 'error', {})
error_xml.text = ''.format(case.stderr, case.traceback)
return ElementTree.ElementTree(element=root)

@ -0,0 +1,224 @@
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import cStringIO as StringIO
import collections
import fcntl
import multiprocessing
import os
import select
import signal
import sys
import threading
import time
import unittest
import uuid
from tests import _loader
from tests import _result
class CapturePipe(object):
"""A context-manager pipe to redirect output to a byte array.
Attributes:
_redirect_fd (int): File descriptor of file to redirect writes from.
_saved_fd (int): A copy of the original value of the redirected file
descriptor.
_read_thread (threading.Thread or None): Thread upon which reads through the
pipe are performed. Only non-None when self is started.
_read_fd (int or None): File descriptor of the read end of the redirect
pipe. Only non-None when self is started.
_write_fd (int or None): File descriptor of the write end of the redirect
pipe. Only non-None when self is started.
output (bytearray or None): Redirected output from writes to the redirected
file descriptor. Only valid during and after self has started.
"""
def __init__(self, fd):
self._redirect_fd = fd
self._saved_fd = os.dup(self._redirect_fd)
self._read_thread = None
self._read_fd = None
self._write_fd = None
self.output = None
def start(self):
"""Start redirection of writes to the file descriptor."""
self._read_fd, self._write_fd = os.pipe()
os.dup2(self._write_fd, self._redirect_fd)
flags = fcntl.fcntl(self._read_fd, fcntl.F_GETFL)
fcntl.fcntl(self._read_fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
self._read_thread = threading.Thread(target=self._read)
self._read_thread.start()
def stop(self):
"""Stop redirection of writes to the file descriptor."""
os.close(self._write_fd)
os.dup2(self._saved_fd, self._redirect_fd) # auto-close self._redirect_fd
self._read_thread.join()
self._read_thread = None
# we waited for the read thread to finish, so _read_fd has been read and we
# can close it.
os.close(self._read_fd)
def _read(self):
"""Read-thread target for self."""
self.output = bytearray()
while True:
select.select([self._read_fd], [], [])
read_bytes = os.read(self._read_fd, 1024)
if read_bytes:
self.output.extend(read_bytes)
else:
break
def write_bypass(self, value):
"""Bypass the redirection and write directly to the original file.
Arguments:
value (str): What to write to the original file.
"""
if self._saved_fd is None:
os.write(self._redirect_fd, value)
else:
os.write(self._saved_fd, value)
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
def close(self):
"""Close any resources used by self not closed by stop()."""
os.close(self._saved_fd)
class AugmentedCase(collections.namedtuple('AugmentedCase', [
'case', 'id'])):
"""A test case with a guaranteed unique externally specified identifier.
Attributes:
case (unittest.TestCase): TestCase we're decorating with an additional
identifier.
id (object): Any identifier that may be considered 'unique' for testing
purposes.
"""
def __new__(cls, case, id=None):
if id is None:
id = uuid.uuid4()
return super(cls, AugmentedCase).__new__(cls, case, id)
class Runner(object):
def run(self, suite):
"""See setuptools' test_runner setup argument for information."""
# Ensure that every test case has no collision with any other test case in
# the augmented results.
augmented_cases = [AugmentedCase(case, uuid.uuid4())
for case in _loader.iterate_suite_cases(suite)]
case_id_by_case = dict((augmented_case.case, augmented_case.id)
for augmented_case in augmented_cases)
result_out = StringIO.StringIO()
result = _result.TerminalResult(
result_out, id_map=lambda case: case_id_by_case[case])
stdout_pipe = CapturePipe(sys.stdout.fileno())
stderr_pipe = CapturePipe(sys.stderr.fileno())
kill_flag = [False]
def sigint_handler(signal_number, frame):
if signal_number == signal.SIGINT:
kill_flag[0] = True # Python 2.7 not having 'local'... :-(
signal.signal(signal_number, signal.SIG_DFL)
def fault_handler(signal_number, frame):
stdout_pipe.write_bypass(
'Received fault signal {}\nstdout:\n{}\n\nstderr:{}\n'
.format(signal_number, stdout_pipe.output, stderr_pipe.output))
os._exit(1)
def check_kill_self():
if kill_flag[0]:
stdout_pipe.write_bypass('Stopping tests short...')
result.stopTestRun()
stdout_pipe.write_bypass(result_out.getvalue())
stdout_pipe.write_bypass(
'\ninterrupted stdout:\n{}\n'.format(stdout_pipe.output))
stderr_pipe.write_bypass(
'\ninterrupted stderr:\n{}\n'.format(stderr_pipe.output))
os._exit(1)
signal.signal(signal.SIGINT, sigint_handler)
signal.signal(signal.SIGSEGV, fault_handler)
signal.signal(signal.SIGBUS, fault_handler)
signal.signal(signal.SIGABRT, fault_handler)
signal.signal(signal.SIGFPE, fault_handler)
signal.signal(signal.SIGILL, fault_handler)
# Sometimes output will lag after a test has successfully finished; we
# ignore such writes to our pipes.
signal.signal(signal.SIGPIPE, signal.SIG_IGN)
# Run the tests
result.startTestRun()
for augmented_case in augmented_cases:
sys.stdout.write('Running {}\n'.format(augmented_case.case.id()))
sys.stdout.flush()
case_thread = threading.Thread(
target=augmented_case.case.run, args=(result,))
try:
with stdout_pipe, stderr_pipe:
case_thread.start()
while case_thread.is_alive():
check_kill_self()
time.sleep(0)
case_thread.join()
except:
# re-raise the exception after forcing the with-block to end
raise
result.set_output(
augmented_case.case, stdout_pipe.output, stderr_pipe.output)
sys.stdout.write(result_out.getvalue())
sys.stdout.flush()
result_out.truncate(0)
check_kill_self()
result.stopTestRun()
stdout_pipe.close()
stderr_pipe.close()
# Report results
sys.stdout.write(result_out.getvalue())
sys.stdout.flush()
signal.signal(signal.SIGINT, signal.SIG_DFL)
with open('report.xml', 'w') as report_xml_file:
_result.jenkins_junit_xml(result).write(report_xml_file)
return result

@ -33,10 +33,10 @@ import unittest
from grpc.beta import implementations
from grpc_interop import _interop_test_case
from grpc_interop import methods
from grpc_interop import server
from grpc_interop import test_pb2
from tests.interop import _interop_test_case
from tests.interop import methods
from tests.interop import server
from tests.interop import test_pb2
class InsecureInteropTest(

@ -29,7 +29,7 @@
"""Common code for unit tests of the interoperability test code."""
from grpc_interop import methods
from tests.interop import methods
class InteropTestCase(object):

@ -33,12 +33,12 @@ import unittest
from grpc.beta import implementations
from grpc_test.beta import test_utilities
from tests.interop import _interop_test_case
from tests.interop import methods
from tests.interop import resources
from tests.interop import test_pb2
from grpc_interop import _interop_test_case
from grpc_interop import methods
from grpc_interop import resources
from grpc_interop import test_pb2
from tests.unit.beta import test_utilities
_SERVER_HOST_OVERRIDE = 'foo.test.google.fr'

@ -34,11 +34,10 @@ from oauth2client import client as oauth2client_client
from grpc.beta import implementations
from grpc_test.beta import test_utilities
from grpc_interop import methods
from grpc_interop import resources
from grpc_interop import test_pb2
from tests.interop import methods
from tests.interop import resources
from tests.interop import test_pb2
from tests.unit.beta import test_utilities
_ONE_DAY_IN_SECONDS = 60 * 60 * 24
@ -114,7 +113,7 @@ def _test_case_from_arg(test_case_arg):
raise ValueError('No test case "%s"!' % test_case_arg)
def _test_interoperability():
def test_interoperability():
args = _args()
stub = _stub(args)
test_case = _test_case_from_arg(args.test_case)
@ -122,4 +121,4 @@ def _test_interoperability():
if __name__ == '__main__':
_test_interoperability()
test_interoperability()

@ -40,9 +40,9 @@ from oauth2client import client as oauth2client_client
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import face
from grpc_interop import empty_pb2
from grpc_interop import messages_pb2
from grpc_interop import test_pb2
from tests.interop import empty_pb2
from tests.interop import messages_pb2
from tests.interop import test_pb2
_TIMEOUT = 7

@ -35,9 +35,9 @@ import time
from grpc.beta import implementations
from grpc_interop import methods
from grpc_interop import resources
from grpc_interop import test_pb2
from tests.interop import methods
from tests.interop import resources
from tests.interop import test_pb2
_ONE_DAY_IN_SECONDS = 60 * 60 * 24
@ -68,7 +68,7 @@ def serve():
time.sleep(_ONE_DAY_IN_SECONDS)
except BaseException as e:
logging.info('Caught exception "%s"; stopping server...', e)
server.stop()
server.stop(0)
logging.info('Server stopped; exiting.')
if __name__ == '__main__':

@ -33,8 +33,8 @@
syntax = "proto3";
import "grpc_interop/empty.proto";
import "grpc_interop/messages.proto";
import "tests/interop/empty.proto";
import "tests/interop/messages.proto";
package grpc.testing;

@ -45,7 +45,7 @@ import unittest
from grpc.beta import implementations
from grpc.framework.foundation import future
from grpc.framework.interfaces.face import face
from grpc_test.framework.common import test_constants
from tests.unit.framework.common import test_constants
# Identifiers of entities we expect to find in the generated module.
SERVICER_IDENTIFIER = 'BetaTestServiceServicer'
@ -218,7 +218,7 @@ class PythonPluginTest(unittest.TestCase):
protoc_plugin_filename = distutils.spawn.find_executable(
'grpc_python_plugin')
test_proto_filename = pkg_resources.resource_filename(
'grpc_protoc_plugin', 'test.proto')
'tests.protoc_plugin', 'protoc_plugin_test.proto')
if not os.path.isfile(protoc_command):
# Assume that if we haven't built protoc that it's on the system.
protoc_command = 'protoc'
@ -237,7 +237,7 @@ class PythonPluginTest(unittest.TestCase):
]
subprocess.check_call(' '.join(cmd), shell=True, env=os.environ,
cwd=os.path.dirname(test_proto_filename))
sys.path.append(self.outdir)
sys.path.insert(0, self.outdir)
def tearDown(self):
try:
@ -245,22 +245,26 @@ class PythonPluginTest(unittest.TestCase):
except OSError as exc:
if exc.errno != errno.ENOENT:
raise
sys.path.remove(self.outdir)
def testImportAttributes(self):
# check that we can access the generated module and its members.
import test_pb2 # pylint: disable=g-import-not-at-top
import protoc_plugin_test_pb2 as test_pb2 # pylint: disable=g-import-not-at-top
reload(test_pb2)
self.assertIsNotNone(getattr(test_pb2, SERVICER_IDENTIFIER, None))
self.assertIsNotNone(getattr(test_pb2, STUB_IDENTIFIER, None))
self.assertIsNotNone(getattr(test_pb2, SERVER_FACTORY_IDENTIFIER, None))
self.assertIsNotNone(getattr(test_pb2, STUB_FACTORY_IDENTIFIER, None))
def testUpDown(self):
import test_pb2
import protoc_plugin_test_pb2 as test_pb2
reload(test_pb2)
with _CreateService(test_pb2) as (servicer, stub):
request = test_pb2.SimpleRequest(response_size=13)
def testUnaryCall(self):
import test_pb2 # pylint: disable=g-import-not-at-top
import protoc_plugin_test_pb2 as test_pb2 # pylint: disable=g-import-not-at-top
reload(test_pb2)
with _CreateService(test_pb2) as (methods, stub):
request = test_pb2.SimpleRequest(response_size=13)
response = stub.UnaryCall(request, test_constants.LONG_TIMEOUT)
@ -268,7 +272,8 @@ class PythonPluginTest(unittest.TestCase):
self.assertEqual(expected_response, response)
def testUnaryCallFuture(self):
import test_pb2 # pylint: disable=g-import-not-at-top
import protoc_plugin_test_pb2 as test_pb2 # pylint: disable=g-import-not-at-top
reload(test_pb2)
request = test_pb2.SimpleRequest(response_size=13)
with _CreateService(test_pb2) as (methods, stub):
# Check that the call does not block waiting for the server to respond.
@ -280,7 +285,8 @@ class PythonPluginTest(unittest.TestCase):
self.assertEqual(expected_response, response)
def testUnaryCallFutureExpired(self):
import test_pb2 # pylint: disable=g-import-not-at-top
import protoc_plugin_test_pb2 as test_pb2 # pylint: disable=g-import-not-at-top
reload(test_pb2)
with _CreateService(test_pb2) as (methods, stub):
request = test_pb2.SimpleRequest(response_size=13)
with methods.pause():
@ -290,7 +296,8 @@ class PythonPluginTest(unittest.TestCase):
response_future.result()
def testUnaryCallFutureCancelled(self):
import test_pb2 # pylint: disable=g-import-not-at-top
import protoc_plugin_test_pb2 as test_pb2 # pylint: disable=g-import-not-at-top
reload(test_pb2)
request = test_pb2.SimpleRequest(response_size=13)
with _CreateService(test_pb2) as (methods, stub):
with methods.pause():
@ -299,7 +306,8 @@ class PythonPluginTest(unittest.TestCase):
self.assertTrue(response_future.cancelled())
def testUnaryCallFutureFailed(self):
import test_pb2 # pylint: disable=g-import-not-at-top
import protoc_plugin_test_pb2 as test_pb2 # pylint: disable=g-import-not-at-top
reload(test_pb2)
request = test_pb2.SimpleRequest(response_size=13)
with _CreateService(test_pb2) as (methods, stub):
with methods.fail():
@ -308,7 +316,8 @@ class PythonPluginTest(unittest.TestCase):
self.assertIsNotNone(response_future.exception())
def testStreamingOutputCall(self):
import test_pb2 # pylint: disable=g-import-not-at-top
import protoc_plugin_test_pb2 as test_pb2 # pylint: disable=g-import-not-at-top
reload(test_pb2)
request = _streaming_output_request(test_pb2)
with _CreateService(test_pb2) as (methods, stub):
responses = stub.StreamingOutputCall(
@ -320,7 +329,8 @@ class PythonPluginTest(unittest.TestCase):
self.assertEqual(expected_response, response)
def testStreamingOutputCallExpired(self):
import test_pb2 # pylint: disable=g-import-not-at-top
import protoc_plugin_test_pb2 as test_pb2 # pylint: disable=g-import-not-at-top
reload(test_pb2)
request = _streaming_output_request(test_pb2)
with _CreateService(test_pb2) as (methods, stub):
with methods.pause():
@ -330,7 +340,8 @@ class PythonPluginTest(unittest.TestCase):
list(responses)
def testStreamingOutputCallCancelled(self):
import test_pb2 # pylint: disable=g-import-not-at-top
import protoc_plugin_test_pb2 as test_pb2 # pylint: disable=g-import-not-at-top
reload(test_pb2)
request = _streaming_output_request(test_pb2)
with _CreateService(test_pb2) as (unused_methods, stub):
responses = stub.StreamingOutputCall(
@ -341,7 +352,8 @@ class PythonPluginTest(unittest.TestCase):
next(responses)
def testStreamingOutputCallFailed(self):
import test_pb2 # pylint: disable=g-import-not-at-top
import protoc_plugin_test_pb2 as test_pb2 # pylint: disable=g-import-not-at-top
reload(test_pb2)
request = _streaming_output_request(test_pb2)
with _CreateService(test_pb2) as (methods, stub):
with methods.fail():
@ -351,7 +363,8 @@ class PythonPluginTest(unittest.TestCase):
next(responses)
def testStreamingInputCall(self):
import test_pb2 # pylint: disable=g-import-not-at-top
import protoc_plugin_test_pb2 as test_pb2 # pylint: disable=g-import-not-at-top
reload(test_pb2)
with _CreateService(test_pb2) as (methods, stub):
response = stub.StreamingInputCall(
_streaming_input_request_iterator(test_pb2),
@ -361,7 +374,8 @@ class PythonPluginTest(unittest.TestCase):
self.assertEqual(expected_response, response)
def testStreamingInputCallFuture(self):
import test_pb2 # pylint: disable=g-import-not-at-top
import protoc_plugin_test_pb2 as test_pb2 # pylint: disable=g-import-not-at-top
reload(test_pb2)
with _CreateService(test_pb2) as (methods, stub):
with methods.pause():
response_future = stub.StreamingInputCall.future(
@ -373,7 +387,8 @@ class PythonPluginTest(unittest.TestCase):
self.assertEqual(expected_response, response)
def testStreamingInputCallFutureExpired(self):
import test_pb2 # pylint: disable=g-import-not-at-top
import protoc_plugin_test_pb2 as test_pb2 # pylint: disable=g-import-not-at-top
reload(test_pb2)
with _CreateService(test_pb2) as (methods, stub):
with methods.pause():
response_future = stub.StreamingInputCall.future(
@ -385,7 +400,8 @@ class PythonPluginTest(unittest.TestCase):
response_future.exception(), face.ExpirationError)
def testStreamingInputCallFutureCancelled(self):
import test_pb2 # pylint: disable=g-import-not-at-top
import protoc_plugin_test_pb2 as test_pb2 # pylint: disable=g-import-not-at-top
reload(test_pb2)
with _CreateService(test_pb2) as (methods, stub):
with methods.pause():
response_future = stub.StreamingInputCall.future(
@ -397,7 +413,8 @@ class PythonPluginTest(unittest.TestCase):
response_future.result()
def testStreamingInputCallFutureFailed(self):
import test_pb2 # pylint: disable=g-import-not-at-top
import protoc_plugin_test_pb2 as test_pb2 # pylint: disable=g-import-not-at-top
reload(test_pb2)
with _CreateService(test_pb2) as (methods, stub):
with methods.fail():
response_future = stub.StreamingInputCall.future(
@ -406,7 +423,8 @@ class PythonPluginTest(unittest.TestCase):
self.assertIsNotNone(response_future.exception())
def testFullDuplexCall(self):
import test_pb2 # pylint: disable=g-import-not-at-top
import protoc_plugin_test_pb2 as test_pb2 # pylint: disable=g-import-not-at-top
reload(test_pb2)
with _CreateService(test_pb2) as (methods, stub):
responses = stub.FullDuplexCall(
_full_duplex_request_iterator(test_pb2), test_constants.LONG_TIMEOUT)
@ -417,7 +435,8 @@ class PythonPluginTest(unittest.TestCase):
self.assertEqual(expected_response, response)
def testFullDuplexCallExpired(self):
import test_pb2 # pylint: disable=g-import-not-at-top
import protoc_plugin_test_pb2 as test_pb2 # pylint: disable=g-import-not-at-top
reload(test_pb2)
request_iterator = _full_duplex_request_iterator(test_pb2)
with _CreateService(test_pb2) as (methods, stub):
with methods.pause():
@ -427,7 +446,8 @@ class PythonPluginTest(unittest.TestCase):
list(responses)
def testFullDuplexCallCancelled(self):
import test_pb2 # pylint: disable=g-import-not-at-top
import protoc_plugin_test_pb2 as test_pb2 # pylint: disable=g-import-not-at-top
reload(test_pb2)
with _CreateService(test_pb2) as (methods, stub):
request_iterator = _full_duplex_request_iterator(test_pb2)
responses = stub.FullDuplexCall(
@ -438,7 +458,8 @@ class PythonPluginTest(unittest.TestCase):
next(responses)
def testFullDuplexCallFailed(self):
import test_pb2 # pylint: disable=g-import-not-at-top
import protoc_plugin_test_pb2 as test_pb2 # pylint: disable=g-import-not-at-top
reload(test_pb2)
request_iterator = _full_duplex_request_iterator(test_pb2)
with _CreateService(test_pb2) as (methods, stub):
with methods.fail():
@ -449,7 +470,8 @@ class PythonPluginTest(unittest.TestCase):
next(responses)
def testHalfDuplexCall(self):
import test_pb2 # pylint: disable=g-import-not-at-top
import protoc_plugin_test_pb2 as test_pb2 # pylint: disable=g-import-not-at-top
reload(test_pb2)
with _CreateService(test_pb2) as (methods, stub):
def half_duplex_request_iterator():
request = test_pb2.StreamingOutputCallRequest()
@ -468,7 +490,8 @@ class PythonPluginTest(unittest.TestCase):
self.assertEqual(expected_response, response)
def testHalfDuplexCallWedged(self):
import test_pb2 # pylint: disable=g-import-not-at-top
import protoc_plugin_test_pb2 as test_pb2 # pylint: disable=g-import-not-at-top
reload(test_pb2)
condition = threading.Condition()
wait_cell = [False]
@contextlib.contextmanager

@ -34,7 +34,7 @@ import unittest
from grpc import _grpcio_metadata
from grpc._adapter import _types
from grpc._adapter import _low
from grpc_test import test_common
from tests.unit import test_common
def wait_for_events(completion_queues, deadline):

@ -32,7 +32,7 @@
import abc
import threading
from grpc_test._junkdrawer import math_pb2
from tests.unit._junkdrawer import math_pb2
class ProtoScenario(object):

@ -41,10 +41,10 @@ from grpc._links import service
from grpc.beta import interfaces as beta_interfaces
from grpc.framework.core import implementations
from grpc.framework.interfaces.base import utilities
from grpc_test import test_common as grpc_test_common
from grpc_test.framework.common import test_constants
from grpc_test.framework.interfaces.base import test_cases
from grpc_test.framework.interfaces.base import test_interfaces
from tests.unit import test_common as grpc_test_common
from tests.unit.framework.common import test_constants
from tests.unit.framework.interfaces.base import test_cases
from tests.unit.framework.interfaces.base import test_interfaces
class _SerializationBehaviors(

@ -40,10 +40,10 @@ from grpc.framework.core import implementations as core_implementations
from grpc.framework.crust import implementations as crust_implementations
from grpc.framework.foundation import logging_pool
from grpc.framework.interfaces.links import utilities
from grpc_test import test_common as grpc_test_common
from grpc_test.framework.common import test_constants
from grpc_test.framework.interfaces.face import test_cases
from grpc_test.framework.interfaces.face import test_interfaces
from tests.unit import test_common as grpc_test_common
from tests.unit.framework.common import test_constants
from tests.unit.framework.interfaces.face import test_cases
from tests.unit.framework.interfaces.face import test_interfaces
class _SerializationBehaviors(

@ -31,8 +31,8 @@ import time
import unittest
from grpc._cython import cygrpc
from grpc_test._cython import test_utilities
from grpc_test import test_common
from tests.unit._cython import test_utilities
from tests.unit import test_common
class TypeSmokeTest(unittest.TestCase):

@ -34,9 +34,9 @@ import unittest
from grpc._adapter import _intermediary_low
from grpc._links import invocation
from grpc.framework.interfaces.links import links
from grpc_test.framework.common import test_constants
from grpc_test.framework.interfaces.links import test_cases
from grpc_test.framework.interfaces.links import test_utilities
from tests.unit.framework.common import test_constants
from tests.unit.framework.interfaces.links import test_cases
from tests.unit.framework.interfaces.links import test_utilities
_NULL_BEHAVIOR = lambda unused_argument: None

@ -32,8 +32,8 @@
import abc
import threading
from grpc_test._junkdrawer import math_pb2
from grpc_test.framework.common import test_constants
from tests.unit._junkdrawer import math_pb2
from tests.unit.framework.common import test_constants
class ProtoScenario(object):

@ -36,11 +36,11 @@ from grpc._links import invocation
from grpc._links import service
from grpc.beta import interfaces as beta_interfaces
from grpc.framework.interfaces.links import links
from grpc_test import test_common
from grpc_test._links import _proto_scenarios
from grpc_test.framework.common import test_constants
from grpc_test.framework.interfaces.links import test_cases
from grpc_test.framework.interfaces.links import test_utilities
from tests.unit import test_common
from tests.unit._links import _proto_scenarios
from tests.unit.framework.common import test_constants
from tests.unit.framework.interfaces.links import test_cases
from tests.unit.framework.interfaces.links import test_utilities
_IDENTITY = lambda x: x

@ -36,9 +36,9 @@ from grpc.beta import implementations
from grpc.beta import interfaces
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import utilities
from grpc_test import resources
from grpc_test.beta import test_utilities
from grpc_test.framework.common import test_constants
from tests.unit import resources
from tests.unit.beta import test_utilities
from tests.unit.framework.common import test_constants
_SERVER_HOST_OVERRIDE = 'foo.test.google.fr'
@ -224,5 +224,78 @@ class BetaFeaturesTest(unittest.TestCase):
self.assertEqual(_RESPONSE, response)
class ContextManagementAndLifecycleTest(unittest.TestCase):
def setUp(self):
self._servicer = _Servicer()
self._method_implementations = {
(_GROUP, _UNARY_UNARY):
utilities.unary_unary_inline(self._servicer.unary_unary),
(_GROUP, _UNARY_STREAM):
utilities.unary_stream_inline(self._servicer.unary_stream),
(_GROUP, _STREAM_UNARY):
utilities.stream_unary_inline(self._servicer.stream_unary),
(_GROUP, _STREAM_STREAM):
utilities.stream_stream_inline(self._servicer.stream_stream),
}
self._cardinalities = {
_UNARY_UNARY: cardinality.Cardinality.UNARY_UNARY,
_UNARY_STREAM: cardinality.Cardinality.UNARY_STREAM,
_STREAM_UNARY: cardinality.Cardinality.STREAM_UNARY,
_STREAM_STREAM: cardinality.Cardinality.STREAM_STREAM,
}
self._server_options = implementations.server_options(
thread_pool_size=test_constants.POOL_SIZE)
self._server_credentials = implementations.ssl_server_credentials(
[(resources.private_key(), resources.certificate_chain(),),])
self._client_credentials = implementations.ssl_client_credentials(
resources.test_root_certificates(), None, None)
self._stub_options = implementations.stub_options(
thread_pool_size=test_constants.POOL_SIZE)
def test_stub_context(self):
server = implementations.server(
self._method_implementations, options=self._server_options)
port = server.add_secure_port('[::]:0', self._server_credentials)
server.start()
channel = test_utilities.not_really_secure_channel(
'localhost', port, self._client_credentials, _SERVER_HOST_OVERRIDE)
dynamic_stub = implementations.dynamic_stub(
channel, _GROUP, self._cardinalities, options=self._stub_options)
for _ in range(100):
with dynamic_stub:
pass
for _ in range(10):
with dynamic_stub:
call_options = interfaces.grpc_call_options(
disable_compression=True)
response = getattr(dynamic_stub, _UNARY_UNARY)(
_REQUEST, test_constants.LONG_TIMEOUT,
protocol_options=call_options)
self.assertEqual(_RESPONSE, response)
self.assertIsNotNone(self._servicer.peer())
server.stop(test_constants.SHORT_TIMEOUT).wait()
def test_server_lifecycle(self):
for _ in range(100):
server = implementations.server(
self._method_implementations, options=self._server_options)
port = server.add_secure_port('[::]:0', self._server_credentials)
server.start()
server.stop(test_constants.SHORT_TIMEOUT).wait()
for _ in range(100):
server = implementations.server(
self._method_implementations, options=self._server_options)
server.add_secure_port('[::]:0', self._server_credentials)
server.add_insecure_port('[::]:0')
with server:
server.stop(test_constants.SHORT_TIMEOUT)
server.stop(test_constants.SHORT_TIMEOUT)
if __name__ == '__main__':
unittest.main(verbosity=2)

@ -37,7 +37,7 @@ from grpc._adapter import _low
from grpc._adapter import _types
from grpc.beta import _connectivity_channel
from grpc.beta import interfaces
from grpc_test.framework.common import test_constants
from tests.unit.framework.common import test_constants
def _drive_completion_queue(completion_queue):

@ -34,12 +34,12 @@ import unittest
from grpc.beta import implementations
from grpc.beta import interfaces
from grpc_test import resources
from grpc_test import test_common as grpc_test_common
from grpc_test.beta import test_utilities
from grpc_test.framework.common import test_constants
from grpc_test.framework.interfaces.face import test_cases
from grpc_test.framework.interfaces.face import test_interfaces
from tests.unit import resources
from tests.unit import test_common as grpc_test_common
from tests.unit.beta import test_utilities
from tests.unit.framework.common import test_constants
from tests.unit.framework.interfaces.face import test_cases
from tests.unit.framework.interfaces.face import test_interfaces
_SERVER_HOST_OVERRIDE = 'foo.test.google.fr'

@ -34,7 +34,7 @@ import unittest
from grpc.beta import implementations
from grpc.beta import interfaces
from grpc.framework.interfaces.face import face
from grpc_test.framework.common import test_constants
from tests.unit.framework.common import test_constants
class NotFoundTest(unittest.TestCase):

@ -38,7 +38,7 @@ from grpc._adapter import _types
from grpc.beta import implementations
from grpc.beta import utilities
from grpc.framework.foundation import future
from grpc_test.framework.common import test_constants
from tests.unit.framework.common import test_constants
def _drive_completion_queue(completion_queue):

@ -36,10 +36,10 @@ from grpc.framework.core import implementations as core_implementations
from grpc.framework.crust import implementations as crust_implementations
from grpc.framework.foundation import logging_pool
from grpc.framework.interfaces.links import utilities
from grpc_test.framework.common import test_constants
from grpc_test.framework.interfaces.face import test_cases
from grpc_test.framework.interfaces.face import test_interfaces
from grpc_test.framework.interfaces.links import test_utilities
from tests.unit.framework.common import test_constants
from tests.unit.framework.interfaces.face import test_cases
from tests.unit.framework.interfaces.face import test_interfaces
from tests.unit.framework.interfaces.links import test_utilities
class _Implementation(test_interfaces.Implementation):

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save