Merge branch 'master' into wheezy

pull/6863/head
Vijay Pai 9 years ago
commit ddf694cc62
  1. 2
      examples/python/route_guide/route_guide_server.py
  2. 5
      src/core/ext/client_config/lb_policy.c
  3. 2
      src/cpp/server/server_posix.cc
  4. 1
      src/objective-c/GRPCClient/GRPCCall.m
  5. 35
      src/php/bin/stress_client.sh
  6. 2
      src/php/tests/interop/stress_client.php
  7. 50
      src/python/grpcio/grpc/__init__.py
  8. 2
      src/python/grpcio/grpc/_adapter/_types.py
  9. 4
      src/python/grpcio/grpc/_common.py
  10. 2
      src/python/grpcio/grpc/_cython/_cygrpc/records.pyx.pxi
  11. 3
      src/python/grpcio/grpc/beta/interfaces.py
  12. 9
      src/python/grpcio/tests/qps/benchmark_client.py
  13. 4
      src/python/grpcio/tests/tests.json
  14. 104
      src/python/grpcio/tests/unit/_api_test.py
  15. 38
      src/python/grpcio/tests/unit/_from_grpc_import_star.py
  16. 216
      src/python/grpcio/tests/unit/_metadata_test.py
  17. 10
      src/python/grpcio/tests/unit/beta/_connectivity_channel_test.py
  18. 7
      templates/tools/dockerfile/run_tests_addons.include
  19. 6
      templates/tools/dockerfile/run_tests_addons_nocache.include
  20. 65
      templates/tools/dockerfile/stress_test/grpc_interop_stress_php/Dockerfile.template
  21. 3
      templates/tools/dockerfile/test/cxx_ubuntu1404_x64/Dockerfile.template
  22. 16
      test/cpp/interop/server_main.cc
  23. 35
      test/cpp/qps/client.h
  24. 2
      third_party/protobuf
  25. 2
      tools/buildgen/plugins/make_fuzzer_tests.py
  26. 140
      tools/dockerfile/stress_test/grpc_interop_stress_php/Dockerfile
  27. 54
      tools/dockerfile/stress_test/grpc_interop_stress_php/build_interop_stress.sh
  28. 8
      tools/dockerfile/test/cxx_ubuntu1404_x64/Dockerfile
  29. 20
      tools/run_tests/run_tests.py
  30. 4
      tools/run_tests/sanity/check_submodules.sh
  31. 93
      tools/run_tests/stress_test/configs/php-cxx.json
  32. 12906
      tools/run_tests/tests.json

@ -51,7 +51,7 @@ def get_distance(start, end):
coord_factor = 10000000.0
lat_1 = start.latitude / coord_factor
lat_2 = end.latitude / coord_factor
lon_1 = start.latitude / coord_factor
lon_1 = start.longitude / coord_factor
lon_2 = end.longitude / coord_factor
lat_rad_1 = math.radians(lat_1)
lat_rad_2 = math.radians(lat_2)

@ -60,8 +60,9 @@ static gpr_atm ref_mutate(grpc_lb_policy *c, gpr_atm delta,
: gpr_atm_no_barrier_fetch_add(&c->ref_pair, delta);
#ifdef GRPC_LB_POLICY_REFCOUNT_DEBUG
gpr_log(file, line, GPR_LOG_SEVERITY_DEBUG,
"LB_POLICY: %p % 12s 0x%08x -> 0x%08x [%s]", c, purpose, old_val,
old_val + delta, reason);
"LB_POLICY: 0x%" PRIxPTR " %12s 0x%" PRIxPTR " -> 0x%" PRIxPTR
" [%s]",
(intptr_t)c, purpose, old_val, old_val + delta, reason);
#endif
return old_val;
}

@ -42,8 +42,8 @@ namespace grpc {
void AddInsecureChannelFromFd(Server* server, int fd) {
grpc_server_add_insecure_channel_from_fd(
server->c_server(), server->completion_queue()->cq(), fd);
}
#endif // GPR_SUPPORT_CHANNELS_FROM_FD
}
} // namespace grpc

@ -76,7 +76,6 @@ NSString * const kGRPCTrailersKey = @"io.grpc.TrailersKey";
NSString *_host;
NSString *_path;
GRPCWrappedCall *_wrappedCall;
dispatch_once_t _callAlreadyInvoked;
GRPCConnectivityMonitor *_connectivityMonitor;
// The C gRPC library has less guarantees on the ordering of events than we

@ -0,0 +1,35 @@
#!/bin/bash
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
set -e
cd $(dirname $0)
source ./determine_extension_dir.sh
php $extension_dir -d max_execution_time=300 \
../tests/interop/stress_client.php $@ 1>&2

@ -102,7 +102,7 @@ if (empty($raw_args['server_addresses'])) {
}
$args['metrics_port'] = empty($raw_args['metrics_port']) ?
'8081' : $args['metrics_port'];
'8081' : $raw_args['metrics_port'];
$args['test_duration_secs'] = empty($raw_args['test_duration_secs']) ||
$raw_args['test_duration_secs'] == -1 ?

@ -212,14 +212,14 @@ class ChannelConnectivity(enum.Enum):
READY: The channel is ready to conduct RPCs.
TRANSIENT_FAILURE: The channel has seen a failure from which it expects to
recover.
FATAL_FAILURE: The channel has seen a failure from which it cannot recover.
SHUTDOWN: The channel has seen a failure from which it cannot recover.
"""
IDLE = (_cygrpc.ConnectivityState.idle, 'idle')
CONNECTING = (_cygrpc.ConnectivityState.connecting, 'connecting')
READY = (_cygrpc.ConnectivityState.ready, 'ready')
TRANSIENT_FAILURE = (
_cygrpc.ConnectivityState.transient_failure, 'transient failure')
FATAL_FAILURE = (_cygrpc.ConnectivityState.fatal_failure, 'fatal failure')
SHUTDOWN = (_cygrpc.ConnectivityState.shutdown, 'shutdown')
@enum.unique
@ -1193,3 +1193,49 @@ def server(generic_rpc_handlers, thread_pool, options=None):
"""
from grpc import _server
return _server.Server(generic_rpc_handlers, thread_pool)
################################### __all__ #################################
__all__ = (
'FutureTimeoutError',
'FutureCancelledError',
'Future',
'ChannelConnectivity',
'StatusCode',
'RpcError',
'RpcContext',
'Call',
'ChannelCredentials',
'CallCredentials',
'AuthMetadataContext',
'AuthMetadataPluginCallback',
'AuthMetadataPlugin',
'ServerCredentials',
'UnaryUnaryMultiCallable',
'UnaryStreamMultiCallable',
'StreamUnaryMultiCallable',
'StreamStreamMultiCallable',
'Channel',
'ServicerContext',
'RpcMethodHandler',
'HandlerCallDetails',
'GenericRpcHandler',
'Server',
'unary_unary_rpc_method_handler',
'unary_stream_rpc_method_handler',
'stream_unary_rpc_method_handler',
'stream_stream_rpc_method_handler',
'method_handlers_generic_handler',
'ssl_channel_credentials',
'metadata_call_credentials',
'access_token_call_credentials',
'composite_call_credentials',
'composite_channel_credentials',
'ssl_server_credentials',
'channel_ready_future',
'insecure_channel',
'secure_channel',
'server',
)

@ -114,7 +114,7 @@ class ConnectivityState(enum.IntEnum):
CONNECTING = cygrpc.ConnectivityState.connecting
READY = cygrpc.ConnectivityState.ready
TRANSIENT_FAILURE = cygrpc.ConnectivityState.transient_failure
FATAL_FAILURE = cygrpc.ConnectivityState.fatal_failure
FATAL_FAILURE = cygrpc.ConnectivityState.shutdown
class Status(collections.namedtuple(

@ -46,8 +46,8 @@ CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY = {
cygrpc.ConnectivityState.ready: grpc.ChannelConnectivity.READY,
cygrpc.ConnectivityState.transient_failure:
grpc.ChannelConnectivity.TRANSIENT_FAILURE,
cygrpc.ConnectivityState.fatal_failure:
grpc.ChannelConnectivity.FATAL_FAILURE,
cygrpc.ConnectivityState.shutdown:
grpc.ChannelConnectivity.SHUTDOWN,
}
CYGRPC_STATUS_CODE_TO_STATUS_CODE = {

@ -33,7 +33,7 @@ class ConnectivityState:
connecting = GRPC_CHANNEL_CONNECTING
ready = GRPC_CHANNEL_READY
transient_failure = GRPC_CHANNEL_TRANSIENT_FAILURE
fatal_failure = GRPC_CHANNEL_SHUTDOWN
shutdown = GRPC_CHANNEL_SHUTDOWN
class ChannelArgKey:

@ -36,6 +36,9 @@ import six
import grpc
ChannelConnectivity = grpc.ChannelConnectivity
# FATAL_FAILURE was a Beta-API name for SHUTDOWN
ChannelConnectivity.FATAL_FAILURE = ChannelConnectivity.SHUTDOWN
StatusCode = grpc.StatusCode

@ -30,11 +30,13 @@
"""Defines test client behaviors (UNARY/STREAMING) (SYNC/ASYNC)."""
import abc
import threading
import time
from concurrent import futures
from six.moves import queue
import grpc
from grpc.beta import implementations
from grpc.framework.interfaces.face import face
from src.proto.grpc.testing import messages_pb2
@ -62,6 +64,13 @@ class BenchmarkClient:
else:
channel = implementations.insecure_channel(host, port)
connected_event = threading.Event()
def wait_for_ready(connectivity):
if connectivity == grpc.ChannelConnectivity.READY:
connected_event.set()
channel.subscribe(wait_for_ready, try_to_connect=True)
connected_event.wait()
if config.payload_config.WhichOneof('payload') == 'simple_params':
self._generic = False
self._stub = services_pb2.beta_create_BenchmarkService_stub(channel)

@ -1,4 +1,6 @@
[
"_api_test.AllTest",
"_api_test.ChannelConnectivityTest",
"_auth_test.AccessTokenCallCredentialsTest",
"_auth_test.GoogleCallCredentialsTest",
"_base_interface_test.AsyncEasyTest",
@ -12,6 +14,7 @@
"_channel_ready_future_test.ChannelReadyFutureTest",
"_channel_test.ChannelTest",
"_connectivity_channel_test.ChannelConnectivityTest",
"_connectivity_channel_test.ConnectivityStatesTest",
"_core_over_links_base_interface_test.AsyncEasyTest",
"_core_over_links_base_interface_test.AsyncPeasyTest",
"_core_over_links_base_interface_test.SyncEasyTest",
@ -48,6 +51,7 @@
"_lonely_invocation_link_test.LonelyInvocationLinkTest",
"_low_test.HangingServerShutdown",
"_low_test.InsecureServerInsecureClient",
"_metadata_test.MetadataTest",
"_not_found_test.NotFoundTest",
"_python_plugin_test.PythonPluginTest",
"_read_some_but_not_all_responses_test.ReadSomeButNotAllResponsesTest",

@ -0,0 +1,104 @@
# Copyright 2016, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Test of gRPC Python's application-layer API."""
import unittest
import six
import grpc
from tests.unit import _from_grpc_import_star
class AllTest(unittest.TestCase):
def testAll(self):
expected_grpc_code_elements = (
'FutureTimeoutError',
'FutureCancelledError',
'Future',
'ChannelConnectivity',
'StatusCode',
'RpcError',
'RpcContext',
'Call',
'ChannelCredentials',
'CallCredentials',
'AuthMetadataContext',
'AuthMetadataPluginCallback',
'AuthMetadataPlugin',
'ServerCredentials',
'UnaryUnaryMultiCallable',
'UnaryStreamMultiCallable',
'StreamUnaryMultiCallable',
'StreamStreamMultiCallable',
'Channel',
'ServicerContext',
'RpcMethodHandler',
'HandlerCallDetails',
'GenericRpcHandler',
'Server',
'unary_unary_rpc_method_handler',
'unary_stream_rpc_method_handler',
'stream_unary_rpc_method_handler',
'stream_stream_rpc_method_handler',
'method_handlers_generic_handler',
'ssl_channel_credentials',
'metadata_call_credentials',
'access_token_call_credentials',
'composite_call_credentials',
'composite_channel_credentials',
'ssl_server_credentials',
'channel_ready_future',
'insecure_channel',
'secure_channel',
'server',
)
six.assertCountEqual(
self, expected_grpc_code_elements,
_from_grpc_import_star.GRPC_ELEMENTS)
class ChannelConnectivityTest(unittest.TestCase):
def testChannelConnectivity(self):
self.assertSequenceEqual(
(grpc.ChannelConnectivity.IDLE,
grpc.ChannelConnectivity.CONNECTING,
grpc.ChannelConnectivity.READY,
grpc.ChannelConnectivity.TRANSIENT_FAILURE,
grpc.ChannelConnectivity.SHUTDOWN,),
tuple(grpc.ChannelConnectivity))
if __name__ == '__main__':
unittest.main(verbosity=2)

@ -0,0 +1,38 @@
# Copyright 2016, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
_BEFORE_IMPORT = tuple(globals())
from grpc import *
_AFTER_IMPORT = tuple(globals())
GRPC_ELEMENTS = tuple(
element for element in _AFTER_IMPORT
if element not in _BEFORE_IMPORT and element != '_BEFORE_IMPORT')

@ -0,0 +1,216 @@
# Copyright 2016, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests server and client side metadata API."""
import unittest
import weakref
import grpc
from grpc import _grpcio_metadata
from grpc.framework.foundation import logging_pool
from tests.unit import test_common
from tests.unit.framework.common import test_constants
_CHANNEL_ARGS = (('grpc.primary_user_agent', 'primary-agent'),
('grpc.secondary_user_agent', 'secondary-agent'))
_REQUEST = b'\x00\x00\x00'
_RESPONSE = b'\x00\x00\x00'
_UNARY_UNARY = b'/test/UnaryUnary'
_UNARY_STREAM = b'/test/UnaryStream'
_STREAM_UNARY = b'/test/StreamUnary'
_STREAM_STREAM = b'/test/StreamStream'
_USER_AGENT = 'Python-gRPC-{}'.format(_grpcio_metadata.__version__)
_CLIENT_METADATA = (
(b'client-md-key', b'client-md-key'),
(b'client-md-key-bin', b'\x00\x01')
)
_SERVER_INITIAL_METADATA = (
(b'server-initial-md-key', b'server-initial-md-value'),
(b'server-initial-md-key-bin', b'\x00\x02')
)
_SERVER_TRAILING_METADATA = (
(b'server-trailing-md-key', b'server-trailing-md-value'),
(b'server-trailing-md-key-bin', b'\x00\x03')
)
def user_agent(metadata):
for key, val in metadata:
if key == b'user-agent':
return val.decode('ascii')
raise KeyError('No user agent!')
def validate_client_metadata(test, servicer_context):
test.assertTrue(test_common.metadata_transmitted(
_CLIENT_METADATA, servicer_context.invocation_metadata()))
test.assertTrue(user_agent(servicer_context.invocation_metadata())
.startswith('primary-agent ' + _USER_AGENT))
test.assertTrue(user_agent(servicer_context.invocation_metadata())
.endswith('secondary-agent'))
def handle_unary_unary(test, request, servicer_context):
validate_client_metadata(test, servicer_context)
servicer_context.send_initial_metadata(_SERVER_INITIAL_METADATA)
servicer_context.set_trailing_metadata(_SERVER_TRAILING_METADATA)
return _RESPONSE
def handle_unary_stream(test, request, servicer_context):
validate_client_metadata(test, servicer_context)
servicer_context.send_initial_metadata(_SERVER_INITIAL_METADATA)
servicer_context.set_trailing_metadata(_SERVER_TRAILING_METADATA)
for _ in range(test_constants.STREAM_LENGTH):
yield _RESPONSE
def handle_stream_unary(test, request_iterator, servicer_context):
validate_client_metadata(test, servicer_context)
servicer_context.send_initial_metadata(_SERVER_INITIAL_METADATA)
servicer_context.set_trailing_metadata(_SERVER_TRAILING_METADATA)
# TODO(issue:#6891) We should be able to remove this loop
for request in request_iterator:
pass
return _RESPONSE
def handle_stream_stream(test, request_iterator, servicer_context):
validate_client_metadata(test, servicer_context)
servicer_context.send_initial_metadata(_SERVER_INITIAL_METADATA)
servicer_context.set_trailing_metadata(_SERVER_TRAILING_METADATA)
# TODO(issue:#6891) We should be able to remove this loop,
# and replace with return; yield
for request in request_iterator:
yield _RESPONSE
class _MethodHandler(grpc.RpcMethodHandler):
def __init__(self, test, request_streaming, response_streaming):
self.request_streaming = request_streaming
self.response_streaming = response_streaming
self.request_deserializer = None
self.response_serializer = None
self.unary_unary = None
self.unary_stream = None
self.stream_unary = None
self.stream_stream = None
if self.request_streaming and self.response_streaming:
self.stream_stream = lambda x, y: handle_stream_stream(test, x, y)
elif self.request_streaming:
self.stream_unary = lambda x, y: handle_stream_unary(test, x, y)
elif self.response_streaming:
self.unary_stream = lambda x, y: handle_unary_stream(test, x, y)
else:
self.unary_unary = lambda x, y: handle_unary_unary(test, x, y)
class _GenericHandler(grpc.GenericRpcHandler):
def __init__(self, test):
self._test = test
def service(self, handler_call_details):
if handler_call_details.method == _UNARY_UNARY:
return _MethodHandler(self._test, False, False)
elif handler_call_details.method == _UNARY_STREAM:
return _MethodHandler(self._test, False, True)
elif handler_call_details.method == _STREAM_UNARY:
return _MethodHandler(self._test, True, False)
elif handler_call_details.method == _STREAM_STREAM:
return _MethodHandler(self._test, True, True)
else:
return None
class MetadataTest(unittest.TestCase):
def setUp(self):
self._server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
self._server = grpc.server((_GenericHandler(weakref.proxy(self)),),
self._server_pool)
port = self._server.add_insecure_port('[::]:0')
self._server.start()
self._channel = grpc.insecure_channel('localhost:%d' % port,
options=_CHANNEL_ARGS)
def tearDown(self):
self._server.stop(0)
def testUnaryUnary(self):
multi_callable = self._channel.unary_unary(_UNARY_UNARY)
unused_response, call = multi_callable(
_REQUEST, metadata=_CLIENT_METADATA, with_call=True)
self.assertTrue(test_common.metadata_transmitted(
_SERVER_INITIAL_METADATA, call.initial_metadata()))
self.assertTrue(test_common.metadata_transmitted(
_SERVER_TRAILING_METADATA, call.trailing_metadata()))
def testUnaryStream(self):
multi_callable = self._channel.unary_stream(_UNARY_STREAM)
call = multi_callable(_REQUEST, metadata=_CLIENT_METADATA)
self.assertTrue(test_common.metadata_transmitted(
_SERVER_INITIAL_METADATA, call.initial_metadata()))
for _ in call:
pass
self.assertTrue(test_common.metadata_transmitted(
_SERVER_TRAILING_METADATA, call.trailing_metadata()))
def testStreamUnary(self):
multi_callable = self._channel.stream_unary(_STREAM_UNARY)
unused_response, call = multi_callable(
[_REQUEST] * test_constants.STREAM_LENGTH,
metadata=_CLIENT_METADATA, with_call=True)
self.assertTrue(test_common.metadata_transmitted(
_SERVER_INITIAL_METADATA, call.initial_metadata()))
self.assertTrue(test_common.metadata_transmitted(
_SERVER_TRAILING_METADATA, call.trailing_metadata()))
def testStreamStream(self):
multi_callable = self._channel.stream_stream(_STREAM_STREAM)
call = multi_callable([_REQUEST] * test_constants.STREAM_LENGTH,
metadata=_CLIENT_METADATA)
self.assertTrue(test_common.metadata_transmitted(
_SERVER_INITIAL_METADATA, call.initial_metadata()))
for _ in call:
pass
self.assertTrue(test_common.metadata_transmitted(
_SERVER_TRAILING_METADATA, call.trailing_metadata()))
if __name__ == '__main__':
unittest.main(verbosity=2)

@ -187,5 +187,15 @@ class ChannelConnectivityTest(unittest.TestCase):
server_completion_queue_thread.join()
class ConnectivityStatesTest(unittest.TestCase):
def testBetaConnectivityStates(self):
self.assertIsNotNone(interfaces.ChannelConnectivity.IDLE)
self.assertIsNotNone(interfaces.ChannelConnectivity.CONNECTING)
self.assertIsNotNone(interfaces.ChannelConnectivity.READY)
self.assertIsNotNone(interfaces.ChannelConnectivity.TRANSIENT_FAILURE)
self.assertIsNotNone(interfaces.ChannelConnectivity.FATAL_FAILURE)
if __name__ == '__main__':
unittest.main(verbosity=2)

@ -1,7 +1,2 @@
<%include file="ccache_setup.include"/>
#======================
# Zookeeper dependencies
# TODO(jtattermusch): is zookeeper still needed?
RUN apt-get install -y libzookeeper-mt-dev
RUN mkdir /var/local/jenkins
<%include file="run_tests_addons_nocache.include"/>

@ -0,0 +1,6 @@
#======================
# Zookeeper dependencies
# TODO(jtattermusch): is zookeeper still needed?
RUN apt-get install -y libzookeeper-mt-dev
RUN mkdir /var/local/jenkins

@ -0,0 +1,65 @@
%YAML 1.2
--- |
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
FROM debian:jessie
<%include file="../../apt_get_basic.include"/>
<%include file="../../ruby_deps.include"/>
<%include file="../../gcp_api_libraries.include"/>
<%include file="../../php_deps.include"/>
<%include file="../../run_tests_addons.include"/>
# ronn: a ruby tool used to convert markdown to man pages, used during the
# install of Protobuf extensions
#
# rake: a ruby version of make used to build the PHP Protobuf extension
RUN /bin/bash -l -c "rvm all do gem install ronn rake"
# Install composer
RUN curl -sS https://getcomposer.org/installer | php
RUN mv composer.phar /usr/local/bin/composer
# As an attempt to work around #4212, try to prefetch Protobuf-PHP dependency
# into composer cache to prevent "composer install" from cloning on each build.
RUN git clone --mirror https://github.com/stanley-cheung/Protobuf-PHP.git ${'\\'}
/root/.composer/cache/vcs/git-github.com-stanley-cheung-Protobuf-PHP.git/
# Download the patched PHP protobuf so that PHP gRPC clients can be generated
# from proto3 schemas.
RUN git clone https://github.com/stanley-cheung/Protobuf-PHP.git /var/local/git/protobuf-php
RUN /bin/bash -l -c "rvm use ruby-2.1 ${'\\'}
&& cd /var/local/git/protobuf-php ${'\\'}
&& rvm all do rake pear:package version=1.0 ${'\\'}
&& pear install Protobuf-1.0.tgz"
# Define the default command.
CMD ["bash"]

@ -33,7 +33,6 @@
<%include file="../../apt_get_basic.include"/>
<%include file="../../cxx_deps.include"/>
<%include file="../../run_tests_addons.include"/>
<%include file="../../run_tests_addons_nocache.include"/>
# Define the default command.
CMD ["bash"]

@ -181,6 +181,14 @@ class TestServiceImpl : public TestService::Service {
response.mutable_payload())) {
return Status(grpc::StatusCode::INTERNAL, "Error creating payload.");
}
int time_us;
if ((time_us = request->response_parameters(i).interval_us()) > 0) {
// Sleep before response if needed
gpr_timespec sleep_time =
gpr_time_add(gpr_now(GPR_CLOCK_REALTIME),
gpr_time_from_micros(time_us, GPR_TIMESPAN));
gpr_sleep_until(sleep_time);
}
write_success = writer->Write(response);
}
if (write_success) {
@ -218,6 +226,14 @@ class TestServiceImpl : public TestService::Service {
response.mutable_payload()->set_type(request.payload().type());
response.mutable_payload()->set_body(
grpc::string(request.response_parameters(0).size(), '\0'));
int time_us;
if ((time_us = request.response_parameters(0).interval_us()) > 0) {
// Sleep before response if needed
gpr_timespec sleep_time =
gpr_time_add(gpr_now(GPR_CLOCK_REALTIME),
gpr_time_from_micros(time_us, GPR_TIMESPAN));
gpr_sleep_until(sleep_time);
}
write_success = stream->Write(response);
}
}

@ -125,13 +125,15 @@ class Client {
if (reset) {
Histogram* to_merge = new Histogram[threads_.size()];
for (size_t i = 0; i < threads_.size(); i++) {
threads_[i]->Swap(&to_merge[i]);
latencies.Merge(to_merge[i]);
threads_[i]->BeginSwap(&to_merge[i]);
}
delete[] to_merge;
std::unique_ptr<UsageTimer> timer(new UsageTimer);
timer_.swap(timer);
for (size_t i = 0; i < threads_.size(); i++) {
threads_[i]->EndSwap();
latencies.Merge(to_merge[i]);
}
delete[] to_merge;
timer_result = timer->Mark();
} else {
// merge snapshots of each thread histogram
@ -213,6 +215,7 @@ class Client {
public:
Thread(Client* client, size_t idx)
: done_(false),
new_stats_(nullptr),
client_(client),
idx_(idx),
impl_(&Thread::ThreadFunc, this) {}
@ -225,9 +228,16 @@ class Client {
impl_.join();
}
void Swap(Histogram* n) {
void BeginSwap(Histogram* n) {
std::lock_guard<std::mutex> g(mu_);
n->Swap(&histogram_);
new_stats_ = n;
}
void EndSwap() {
std::unique_lock<std::mutex> g(mu_);
while (new_stats_ != nullptr) {
cv_.wait(g);
};
}
void MergeStatsInto(Histogram* hist) {
@ -241,11 +251,10 @@ class Client {
void ThreadFunc() {
for (;;) {
// lock since the thread should only be doing one thing at a time
std::lock_guard<std::mutex> g(mu_);
// run the loop body
const bool thread_still_ok = client_->ThreadFunc(&histogram_, idx_);
// see if we're done
// lock, see if we're done
std::lock_guard<std::mutex> g(mu_);
if (!thread_still_ok) {
gpr_log(GPR_ERROR, "Finishing client thread due to RPC error");
done_ = true;
@ -253,11 +262,19 @@ class Client {
if (done_) {
return;
}
// check if we're resetting stats, swap out the histogram if so
if (new_stats_) {
new_stats_->Swap(&histogram_);
new_stats_ = nullptr;
cv_.notify_one();
}
}
}
std::mutex mu_;
std::condition_variable cv_;
bool done_;
Histogram* new_stats_;
Histogram histogram_;
Client* client_;
const size_t idx_;

@ -1 +1 @@
Subproject commit 3470b6895aa659b7559ed678e029a5338e535f14
Subproject commit d4d13a4349e4e59d67f311185ddcc1890d956d7a

@ -49,7 +49,7 @@ def mako_plugin(dictionary):
tests.append({
'name': new_target['name'],
'args': [fn],
'exclude_configs': [],
'exclude_configs': ['tsan'],
'uses_polling': False,
'platforms': ['linux'],
'ci_platforms': ['linux'],

@ -0,0 +1,140 @@
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
FROM debian:jessie
# Install Git and basic packages.
RUN apt-get update && apt-get install -y \
autoconf \
autotools-dev \
build-essential \
bzip2 \
ccache \
curl \
gcc \
gcc-multilib \
git \
golang \
gyp \
lcov \
libc6 \
libc6-dbg \
libc6-dev \
libgtest-dev \
libtool \
make \
perl \
strace \
python-dev \
python-setuptools \
python-yaml \
telnet \
unzip \
wget \
zip && apt-get clean
#================
# Build profiling
RUN apt-get update && apt-get install -y time && apt-get clean
#==================
# Ruby dependencies
# Install rvm
RUN gpg --keyserver hkp://keys.gnupg.net --recv-keys 409B6B1796C275462A1703113804BB82D39DC0E3
RUN \curl -sSL https://get.rvm.io | bash -s stable
# Install Ruby 2.1
RUN /bin/bash -l -c "rvm install ruby-2.1"
RUN /bin/bash -l -c "rvm use --default ruby-2.1"
RUN /bin/bash -l -c "echo 'gem: --no-ri --no-rdoc' > ~/.gemrc"
RUN /bin/bash -l -c "echo 'export PATH=/usr/local/rvm/bin:$PATH' >> ~/.bashrc"
RUN /bin/bash -l -c "echo 'rvm --default use ruby-2.1' >> ~/.bashrc"
RUN /bin/bash -l -c "gem install bundler --no-ri --no-rdoc"
# Google Cloud platform API libraries
RUN apt-get update && apt-get install -y python-pip && apt-get clean
RUN pip install --upgrade google-api-python-client
#=================
# PHP dependencies
# Install dependencies
RUN /bin/bash -l -c "echo 'deb http://packages.dotdeb.org wheezy-php55 all' \
>> /etc/apt/sources.list.d/dotdeb.list"
RUN /bin/bash -l -c "echo 'deb-src http://packages.dotdeb.org wheezy-php55 all' \
>> /etc/apt/sources.list.d/dotdeb.list"
RUN wget http://www.dotdeb.org/dotdeb.gpg -O- | apt-key add -
RUN apt-get update && apt-get install -y \
git php5 php5-dev phpunit unzip
# Prepare ccache
RUN ln -s /usr/bin/ccache /usr/local/bin/gcc
RUN ln -s /usr/bin/ccache /usr/local/bin/g++
RUN ln -s /usr/bin/ccache /usr/local/bin/cc
RUN ln -s /usr/bin/ccache /usr/local/bin/c++
RUN ln -s /usr/bin/ccache /usr/local/bin/clang
RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
#======================
# Zookeeper dependencies
# TODO(jtattermusch): is zookeeper still needed?
RUN apt-get install -y libzookeeper-mt-dev
RUN mkdir /var/local/jenkins
# ronn: a ruby tool used to convert markdown to man pages, used during the
# install of Protobuf extensions
#
# rake: a ruby version of make used to build the PHP Protobuf extension
RUN /bin/bash -l -c "rvm all do gem install ronn rake"
# Install composer
RUN curl -sS https://getcomposer.org/installer | php
RUN mv composer.phar /usr/local/bin/composer
# As an attempt to work around #4212, try to prefetch Protobuf-PHP dependency
# into composer cache to prevent "composer install" from cloning on each build.
RUN git clone --mirror https://github.com/stanley-cheung/Protobuf-PHP.git \
/root/.composer/cache/vcs/git-github.com-stanley-cheung-Protobuf-PHP.git/
# Download the patched PHP protobuf so that PHP gRPC clients can be generated
# from proto3 schemas.
RUN git clone https://github.com/stanley-cheung/Protobuf-PHP.git /var/local/git/protobuf-php
RUN /bin/bash -l -c "rvm use ruby-2.1 \
&& cd /var/local/git/protobuf-php \
&& rvm all do rake pear:package version=1.0 \
&& pear install Protobuf-1.0.tgz"
# Define the default command.
CMD ["bash"]

@ -0,0 +1,54 @@
#!/bin/bash
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Builds PHP interop server and client in a base image.
set -ex
mkdir -p /var/local/git
git clone --recursive /var/local/jenkins/grpc /var/local/git/grpc
# copy service account keys if available
cp -r /var/local/jenkins/service_account $HOME || true
cd /var/local/git/grpc
rvm --default use ruby-2.1
make install-certs
# gRPC core and protobuf need to be installed
make install
(cd src/php/ext/grpc && phpize && ./configure && make)
(cd third_party/protobuf && make install)
(cd src/php && composer install)
(cd src/php && protoc-gen-php -i tests/interop/ -o tests/interop/ tests/interop/test.proto)

@ -67,14 +67,6 @@ RUN apt-get update && apt-get install -y time && apt-get clean
# C++ dependencies
RUN apt-get update && apt-get -y install libgflags-dev libgtest-dev libc++-dev clang && apt-get clean
# Prepare ccache
RUN ln -s /usr/bin/ccache /usr/local/bin/gcc
RUN ln -s /usr/bin/ccache /usr/local/bin/g++
RUN ln -s /usr/bin/ccache /usr/local/bin/cc
RUN ln -s /usr/bin/ccache /usr/local/bin/c++
RUN ln -s /usr/bin/ccache /usr/local/bin/clang
RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
#======================
# Zookeeper dependencies
# TODO(jtattermusch): is zookeeper still needed?

@ -62,6 +62,11 @@ os.chdir(_ROOT)
_FORCE_ENVIRON_FOR_WRAPPERS = {}
_POLLING_STRATEGIES = {
'linux': ['poll', 'legacy']
}
def platform_string():
return jobset.platform_string()
@ -153,14 +158,8 @@ class CLanguage(object):
def test_specs(self):
out = []
binaries = get_c_tests(self.args.travis, self.test_lang)
POLLING_STRATEGIES = {
'windows': ['all'],
'mac': ['all'],
'posix': ['all'],
'linux': ['poll', 'legacy']
}
for target in binaries:
polling_strategies = (POLLING_STRATEGIES[self.platform]
polling_strategies = (_POLLING_STRATEGIES.get(self.platform, ['all'])
if target.get('uses_polling', True)
else ['all'])
for polling_strategy in polling_strategies:
@ -395,7 +394,7 @@ class PythonLanguage(object):
tests_json = json.load(tests_json_file)
environment = dict(_FORCE_ENVIRON_FOR_WRAPPERS)
environment['PYTHONPATH'] = '{}:{}'.format(
os.path.abspath('src/python/gens'),
os.path.abspath('src/python/gens'),
os.path.abspath('src/python/grpcio_health_checking'))
if self.config.build_config != 'gcov':
return [self.config.job_spec(
@ -855,8 +854,13 @@ argp.add_argument('--update_submodules', default=[], nargs='*',
argp.add_argument('-a', '--antagonists', default=0, type=int)
argp.add_argument('-x', '--xml_report', default=None, type=str,
help='Generates a JUnit-compatible XML report')
argp.add_argument('--force_default_poller', default=False, action='store_const', const=True,
help='Dont try to iterate over many polling strategies when they exist')
args = argp.parse_args()
if args.force_default_poller:
_POLLING_STRATEGIES = {}
jobset.measure_cpu_costs = args.measure_cpu_costs
# update submodules if necessary

@ -41,11 +41,11 @@ want_submodules=`mktemp /tmp/submXXXXXX`
git submodule | awk '{ print $1 }' | sort > $submodules
cat << EOF | awk '{ print $1 }' | sort > $want_submodules
c880e42ba1c8032d4cdde2aba0541d8a9d9fa2e9 third_party/boringssl (heads/2661)
c880e42ba1c8032d4cdde2aba0541d8a9d9fa2e9 third_party/boringssl (version_for_cocoapods_2.0-100-gc880e42)
05b155ff59114735ec8cd089f669c4c3d8f59029 third_party/gflags (v2.1.0-45-g05b155f)
c99458533a9b4c743ed51537e25989ea55944908 third_party/googletest (release-1.7.0)
f8ac463766281625ad710900479130c7fcb4d63b third_party/nanopb (nanopb-0.3.4-29-gf8ac463)
3470b6895aa659b7559ed678e029a5338e535f14 third_party/protobuf (v3.0.0-beta-2-441-g3470b68)
d4d13a4349e4e59d67f311185ddcc1890d956d7a third_party/protobuf (v3.0.0-beta-3.2)
50893291621658f355bc5b4d450a8d06a563053d third_party/zlib (v1.2.8)
EOF

@ -0,0 +1,93 @@
{
"dockerImages": {
"grpc_stress_cxx_opt" : {
"buildScript": "tools/run_tests/dockerize/build_interop_stress_image.sh",
"dockerFileDir": "grpc_interop_stress_cxx",
"buildType": "opt"
},
"grpc_stress_php": {
"buildScript": "tools/run_tests/dockerize/build_interop_stress_image.sh",
"dockerFileDir": "grpc_interop_stress_php"
}
},
"clientTemplates": {
"baseTemplates": {
"default": {
"wrapperScriptPath": "/var/local/git/grpc/tools/gcp/stress_test/run_client.py",
"pollIntervalSecs": 60,
"clientArgs": {
"num_channels_per_server":5,
"num_stubs_per_channel":10,
"test_cases": "empty_unary:1,large_unary:1,client_streaming:1,server_streaming:1,empty_stream:1",
"metrics_port": 8081
},
"metricsPort": 8081,
"metricsArgs": {
"metrics_server_address": "localhost:8081"
}
}
},
"templates": {
"php_client": {
"baseTemplate": "default",
"stressClientCmd": [
"/var/local/git/grpc/src/php/bin/stress_client.sh"
],
"metricsClientCmd": [
"php",
"/var/local/git/grpc/src/php/tests/interop/metrics_client.php"
]
}
}
},
"serverTemplates": {
"baseTemplates":{
"default": {
"wrapperScriptPath": "/var/local/git/grpc/tools/gcp/stress_test/run_server.py",
"serverPort": 8080,
"serverArgs": {
"port": 8080
}
}
},
"templates": {
"cxx_server_opt": {
"baseTemplate": "default",
"stressServerCmd": ["/var/local/git/grpc/bins/opt/interop_server"]
}
}
},
"testMatrix": {
"serverPodSpecs": {
"stress-server-cxx-php": {
"serverTemplate": "cxx_server_opt",
"dockerImage": "grpc_stress_cxx_opt",
"numInstances": 1
}
},
"clientPodSpecs": {
"stress-client-php": {
"clientTemplate": "php_client",
"dockerImage": "grpc_stress_php",
"numInstances": 20,
"serverPodSpec": "stress-server-cxx-php"
}
}
},
"globalSettings": {
"buildDockerImages": true,
"pollIntervalSecs": 60,
"testDurationSecs": 7200,
"kubernetesProxyPort": 8010,
"datasetIdNamePrefix": "stress_test_php_cxx_opt",
"summaryTableId": "summary",
"qpsTableId": "qps",
"podWarmupSecs": 60
}
}

File diff suppressed because it is too large Load Diff
Loading…
Cancel
Save