Merge branch 'master' of https://github.com/grpc/grpc into faster-bm-diff

pull/11367/head
ncteisen 8 years ago
commit c7586a5d32
  1. 2
      Makefile
  2. 7
      include/grpc++/server_builder.h
  3. 9
      src/cpp/server/server_builder.cc
  4. 1
      src/csharp/Grpc.Auth/Grpc.Auth.csproj
  5. 3
      src/csharp/Grpc.Core.Testing/Grpc.Core.Testing.csproj
  6. 4
      src/csharp/Grpc.Core/Common.csproj.include
  7. 1
      src/csharp/Grpc.Core/Grpc.Core.csproj
  8. 1
      src/csharp/Grpc.HealthCheck/Grpc.HealthCheck.csproj
  9. 4
      src/csharp/Grpc.IntegrationTesting/ClientRunners.cs
  10. 4
      src/csharp/Grpc.IntegrationTesting/QpsWorker.cs
  11. 4
      src/csharp/Grpc.IntegrationTesting/ServerRunners.cs
  12. 1
      src/csharp/Grpc.Reflection/Grpc.Reflection.csproj
  13. 40
      src/node/performance/benchmark_client.js
  14. 5
      src/python/grpcio_reflection/grpc_reflection/v1alpha/reflection.py
  15. 2
      templates/Makefile.template
  16. 4
      test/build/c-ares.c
  17. 29
      test/cpp/microbenchmarks/bm_fullstack_trickle.cc
  18. 14
      tools/distrib/pylint_code.sh
  19. 66
      tools/internal_ci/helper_scripts/prepare_build_macos_rc
  20. 4
      tools/internal_ci/linux/grpc_build_artifacts.sh
  21. 2
      tools/internal_ci/macos/grpc_master.sh
  22. 44
      tools/run_tests/performance/scenario_config.py
  23. 2
      tools/run_tests/run_tests.py
  24. 9
      tools/run_tests/run_tests_matrix.py

@ -506,7 +506,7 @@ OPENSSL_ALPN_CHECK_CMD = $(PKG_CONFIG) --atleast-version=1.0.2 openssl
OPENSSL_NPN_CHECK_CMD = $(PKG_CONFIG) --atleast-version=1.0.1 openssl OPENSSL_NPN_CHECK_CMD = $(PKG_CONFIG) --atleast-version=1.0.1 openssl
ZLIB_CHECK_CMD = $(PKG_CONFIG) --exists zlib ZLIB_CHECK_CMD = $(PKG_CONFIG) --exists zlib
PROTOBUF_CHECK_CMD = $(PKG_CONFIG) --atleast-version=3.0.0 protobuf PROTOBUF_CHECK_CMD = $(PKG_CONFIG) --atleast-version=3.0.0 protobuf
CARES_CHECK_CMD = $(PKG_CONFIG) --exists libcares CARES_CHECK_CMD = $(PKG_CONFIG) --atleast-version=1.11.0 libcares
else # HAS_PKG_CONFIG else # HAS_PKG_CONFIG
ifeq ($(SYSTEM),MINGW32) ifeq ($(SYSTEM),MINGW32)

@ -150,15 +150,16 @@ class ServerBuilder {
/// ///
/// It can be invoked multiple times. /// It can be invoked multiple times.
/// ///
/// \param addr The address to try to bind to the server (eg, localhost:1234, /// \param addr_uri The address to try to bind to the server in URI form. If
/// 192.168.1.1:31416, [::1]:27182, etc.). /// the scheme name is omitted, "dns:///" is assumed. Valid values include
/// dns:///localhost:1234, / 192.168.1.1:31416, dns:///[::1]:27182, etc.).
/// \params creds The credentials associated with the server. /// \params creds The credentials associated with the server.
/// \param selected_port[out] If not `nullptr`, gets populated with the port /// \param selected_port[out] If not `nullptr`, gets populated with the port
/// number bound to the \a grpc::Server for the corresponding endpoint after /// number bound to the \a grpc::Server for the corresponding endpoint after
/// it is successfully bound, 0 otherwise. /// it is successfully bound, 0 otherwise.
/// ///
// TODO(dgq): the "port" part seems to be a misnomer. // TODO(dgq): the "port" part seems to be a misnomer.
ServerBuilder& AddListeningPort(const grpc::string& addr, ServerBuilder& AddListeningPort(const grpc::string& addr_uri,
std::shared_ptr<ServerCredentials> creds, std::shared_ptr<ServerCredentials> creds,
int* selected_port = nullptr); int* selected_port = nullptr);

@ -172,8 +172,15 @@ ServerBuilder& ServerBuilder::SetResourceQuota(
} }
ServerBuilder& ServerBuilder::AddListeningPort( ServerBuilder& ServerBuilder::AddListeningPort(
const grpc::string& addr, std::shared_ptr<ServerCredentials> creds, const grpc::string& addr_uri, std::shared_ptr<ServerCredentials> creds,
int* selected_port) { int* selected_port) {
const grpc::string uri_scheme = "dns:";
grpc::string addr = addr_uri;
if (addr_uri.compare(0, uri_scheme.size(), uri_scheme) == 0) {
size_t pos = uri_scheme.size();
while (addr_uri[pos] == '/') ++pos; // Skip slashes.
addr = addr_uri.substr(pos);
}
Port port = {addr, creds, selected_port}; Port port = {addr, creds, selected_port};
ports_.push_back(port); ports_.push_back(port);
return *this; return *this;

@ -18,6 +18,7 @@
<NetStandardImplicitPackageVersion Condition=" '$(TargetFramework)' == 'netstandard1.5' ">1.6.0</NetStandardImplicitPackageVersion> <NetStandardImplicitPackageVersion Condition=" '$(TargetFramework)' == 'netstandard1.5' ">1.6.0</NetStandardImplicitPackageVersion>
<IncludeSymbols>true</IncludeSymbols> <IncludeSymbols>true</IncludeSymbols>
<IncludeSource>true</IncludeSource> <IncludeSource>true</IncludeSource>
<GenerateDocumentationFile>true</GenerateDocumentationFile>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>

@ -18,6 +18,7 @@
<NetStandardImplicitPackageVersion Condition=" '$(TargetFramework)' == 'netstandard1.5' ">1.6.0</NetStandardImplicitPackageVersion> <NetStandardImplicitPackageVersion Condition=" '$(TargetFramework)' == 'netstandard1.5' ">1.6.0</NetStandardImplicitPackageVersion>
<IncludeSymbols>true</IncludeSymbols> <IncludeSymbols>true</IncludeSymbols>
<IncludeSource>true</IncludeSource> <IncludeSource>true</IncludeSource>
<GenerateDocumentationFile>true</GenerateDocumentationFile>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>
@ -31,8 +32,6 @@
</ItemGroup> </ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'net45' "> <ItemGroup Condition=" '$(TargetFramework)' == 'net45' ">
<Reference Include="System.Runtime" />
<Reference Include="System.IO" />
<Reference Include="System" /> <Reference Include="System" />
<Reference Include="Microsoft.CSharp" /> <Reference Include="Microsoft.CSharp" />
</ItemGroup> </ItemGroup>

@ -12,10 +12,6 @@
<GenerateAssemblyCopyrightAttribute>false</GenerateAssemblyCopyrightAttribute> <GenerateAssemblyCopyrightAttribute>false</GenerateAssemblyCopyrightAttribute>
</PropertyGroup> </PropertyGroup>
<PropertyGroup>
<GenerateDocumentationFile>true</GenerateDocumentationFile>
</PropertyGroup>
<PropertyGroup> <PropertyGroup>
<DefineConstants>$(DefineConstants);SIGNED</DefineConstants> <DefineConstants>$(DefineConstants);SIGNED</DefineConstants>
<AssemblyOriginatorKeyFile>../keys/Grpc.snk</AssemblyOriginatorKeyFile> <AssemblyOriginatorKeyFile>../keys/Grpc.snk</AssemblyOriginatorKeyFile>

@ -17,6 +17,7 @@
<NetStandardImplicitPackageVersion Condition=" '$(TargetFramework)' == 'netstandard1.5' ">1.6.0</NetStandardImplicitPackageVersion> <NetStandardImplicitPackageVersion Condition=" '$(TargetFramework)' == 'netstandard1.5' ">1.6.0</NetStandardImplicitPackageVersion>
<IncludeSymbols>true</IncludeSymbols> <IncludeSymbols>true</IncludeSymbols>
<IncludeSource>true</IncludeSource> <IncludeSource>true</IncludeSource>
<GenerateDocumentationFile>true</GenerateDocumentationFile>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>

@ -17,6 +17,7 @@
<NetStandardImplicitPackageVersion Condition=" '$(TargetFramework)' == 'netstandard1.5' ">1.6.0</NetStandardImplicitPackageVersion> <NetStandardImplicitPackageVersion Condition=" '$(TargetFramework)' == 'netstandard1.5' ">1.6.0</NetStandardImplicitPackageVersion>
<IncludeSymbols>true</IncludeSymbols> <IncludeSymbols>true</IncludeSymbols>
<IncludeSource>true</IncludeSource> <IncludeSource>true</IncludeSource>
<GenerateDocumentationFile>true</GenerateDocumentationFile>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>

@ -186,8 +186,8 @@ namespace Grpc.IntegrationTesting
statsResetCount.Increment(); statsResetCount.Increment();
} }
GrpcEnvironment.Logger.Info("[ClientRunnerImpl.GetStats] GC collection counts: gen0 {0}, gen1 {1}, gen2 {2}, gen3 {3} (histogram reset count:{4}, seconds since reset: {5})", GrpcEnvironment.Logger.Info("[ClientRunnerImpl.GetStats] GC collection counts: gen0 {0}, gen1 {1}, gen2 {2}, (histogram reset count:{3}, seconds since reset: {4})",
GC.CollectionCount(0), GC.CollectionCount(1), GC.CollectionCount(2), GC.CollectionCount(3), statsResetCount.Count, secondsElapsed); GC.CollectionCount(0), GC.CollectionCount(1), GC.CollectionCount(2), statsResetCount.Count, secondsElapsed);
// TODO: populate user time and system time // TODO: populate user time and system time
return new ClientStats return new ClientStats

@ -100,8 +100,8 @@ namespace Grpc.IntegrationTesting
await tcs.Task; await tcs.Task;
await server.ShutdownAsync(); await server.ShutdownAsync();
GrpcEnvironment.Logger.Info("GC collection counts (after shutdown): gen0 {0}, gen1 {1}, gen2 {2}, gen3 {3}", GrpcEnvironment.Logger.Info("GC collection counts (after shutdown): gen0 {0}, gen1 {1}, gen2 {2}",
GC.CollectionCount(0), GC.CollectionCount(1), GC.CollectionCount(2), GC.CollectionCount(3)); GC.CollectionCount(0), GC.CollectionCount(1), GC.CollectionCount(2));
} }
} }
} }

@ -154,8 +154,8 @@ namespace Grpc.IntegrationTesting
{ {
var secondsElapsed = wallClockStopwatch.GetElapsedSnapshot(reset).TotalSeconds; var secondsElapsed = wallClockStopwatch.GetElapsedSnapshot(reset).TotalSeconds;
GrpcEnvironment.Logger.Info("[ServerRunner.GetStats] GC collection counts: gen0 {0}, gen1 {1}, gen2 {2}, gen3 {3} (seconds since last reset {4})", GrpcEnvironment.Logger.Info("[ServerRunner.GetStats] GC collection counts: gen0 {0}, gen1 {1}, gen2 {2}, (seconds since last reset {3})",
GC.CollectionCount(0), GC.CollectionCount(1), GC.CollectionCount(2), GC.CollectionCount(3), secondsElapsed); GC.CollectionCount(0), GC.CollectionCount(1), GC.CollectionCount(2), secondsElapsed);
// TODO: populate user time and system time // TODO: populate user time and system time
return new ServerStats return new ServerStats

@ -17,6 +17,7 @@
<NetStandardImplicitPackageVersion Condition=" '$(TargetFramework)' == 'netstandard1.5' ">1.6.0</NetStandardImplicitPackageVersion> <NetStandardImplicitPackageVersion Condition=" '$(TargetFramework)' == 'netstandard1.5' ">1.6.0</NetStandardImplicitPackageVersion>
<IncludeSymbols>true</IncludeSymbols> <IncludeSymbols>true</IncludeSymbols>
<IncludeSource>true</IncludeSource> <IncludeSource>true</IncludeSource>
<GenerateDocumentationFile>true</GenerateDocumentationFile>
</PropertyGroup> </PropertyGroup>
<ItemGroup> <ItemGroup>

@ -227,19 +227,23 @@ BenchmarkClient.prototype.startClosedLoop = function(
makeCall = function(client) { makeCall = function(client) {
if (self.running) { if (self.running) {
self.pending_calls++; self.pending_calls++;
var start_time = process.hrtime();
var call = client.streamingCall(); var call = client.streamingCall();
var start_time = process.hrtime();
call.write(argument); call.write(argument);
call.on('data', function() { call.on('data', function() {
});
call.on('end', function() {
var time_diff = process.hrtime(start_time); var time_diff = process.hrtime(start_time);
self.histogram.add(timeDiffToNanos(time_diff)); self.histogram.add(timeDiffToNanos(time_diff));
makeCall(client);
self.pending_calls--; self.pending_calls--;
if ((!self.running) && self.pending_calls == 0) { if (self.running) {
self.pending_calls++;
start_time = process.hrtime();
call.write(argument);
} else {
call.end();
if (self.pending_calls == 0) {
self.emit('finished'); self.emit('finished');
} }
}
}); });
call.on('error', function(error) { call.on('error', function(error) {
self.emit('error', new Error('Client error: ' + error.message)); self.emit('error', new Error('Client error: ' + error.message));
@ -317,30 +321,8 @@ BenchmarkClient.prototype.startPoisson = function(
} }
}; };
} else { } else {
makeCall = function(client, poisson) { self.emit('error', new Error('Streaming Poisson benchmarks not supported'));
if (self.running) { return;
self.pending_calls++;
var start_time = process.hrtime();
var call = client.streamingCall();
call.write(argument);
call.on('data', function() {
});
call.on('end', function() {
var time_diff = process.hrtime(start_time);
self.histogram.add(timeDiffToNanos(time_diff));
self.pending_calls--;
if ((!self.running) && self.pending_calls == 0) {
self.emit('finished');
}
});
call.on('error', function(error) {
self.emit('error', new Error('Client error: ' + error.message));
self.running = false;
});
} else {
poisson.stop();
}
};
} }
var averageIntervalMs = (1 / offered_load) * 1000; var averageIntervalMs = (1 / offered_load) * 1000;

@ -28,8 +28,6 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Reference implementation for reflection in gRPC Python.""" """Reference implementation for reflection in gRPC Python."""
import threading
import grpc import grpc
from google.protobuf import descriptor_pb2 from google.protobuf import descriptor_pb2
from google.protobuf import descriptor_pool from google.protobuf import descriptor_pool
@ -120,6 +118,7 @@ class ReflectionServicer(reflection_pb2_grpc.ServerReflectionServicer):
])) ]))
def ServerReflectionInfo(self, request_iterator, context): def ServerReflectionInfo(self, request_iterator, context):
# pylint: disable=unused-argument
for request in request_iterator: for request in request_iterator:
if request.HasField('file_by_filename'): if request.HasField('file_by_filename'):
yield self._file_by_filename(request.file_by_filename) yield self._file_by_filename(request.file_by_filename)
@ -152,4 +151,4 @@ def enable_server_reflection(service_names, server, pool=None):
pool: DescriptorPool object to use (descriptor_pool.Default() if None). pool: DescriptorPool object to use (descriptor_pool.Default() if None).
""" """
reflection_pb2_grpc.add_ServerReflectionServicer_to_server( reflection_pb2_grpc.add_ServerReflectionServicer_to_server(
ReflectionServicer(service_names, pool), server) ReflectionServicer(service_names, pool=pool), server)

@ -427,7 +427,7 @@
OPENSSL_NPN_CHECK_CMD = $(PKG_CONFIG) --atleast-version=1.0.1 openssl OPENSSL_NPN_CHECK_CMD = $(PKG_CONFIG) --atleast-version=1.0.1 openssl
ZLIB_CHECK_CMD = $(PKG_CONFIG) --exists zlib ZLIB_CHECK_CMD = $(PKG_CONFIG) --exists zlib
PROTOBUF_CHECK_CMD = $(PKG_CONFIG) --atleast-version=3.0.0 protobuf PROTOBUF_CHECK_CMD = $(PKG_CONFIG) --atleast-version=3.0.0 protobuf
CARES_CHECK_CMD = $(PKG_CONFIG) --exists libcares CARES_CHECK_CMD = $(PKG_CONFIG) --atleast-version=1.11.0 libcares
else # HAS_PKG_CONFIG else # HAS_PKG_CONFIG
ifeq ($(SYSTEM),MINGW32) ifeq ($(SYSTEM),MINGW32)

@ -33,6 +33,10 @@
#include <ares.h> #include <ares.h>
#if (ARES_VERSION < 0x010b00)
ARES_VERSION should not be smaller than 1.11.0
#endif
int main(void) { int main(void) {
ares_channel channelptr; ares_channel channelptr;

@ -101,8 +101,6 @@ class TrickledCHTTP2 : public EndpointPairFixture {
} }
void AddToLabel(std::ostream& out, benchmark::State& state) { void AddToLabel(std::ostream& out, benchmark::State& state) {
grpc_chttp2_transport* client =
reinterpret_cast<grpc_chttp2_transport*>(client_transport_);
out << " writes/iter:" out << " writes/iter:"
<< ((double)stats_.num_writes / (double)state.iterations()) << ((double)stats_.num_writes / (double)state.iterations())
<< " cli_transport_stalls/iter:" << " cli_transport_stalls/iter:"
@ -118,8 +116,7 @@ class TrickledCHTTP2 : public EndpointPairFixture {
(double)state.iterations()) (double)state.iterations())
<< " svr_stream_stalls/iter:" << " svr_stream_stalls/iter:"
<< ((double)server_stats_.streams_stalled_due_to_stream_flow_control / << ((double)server_stats_.streams_stalled_due_to_stream_flow_control /
(double)state.iterations()) (double)state.iterations());
<< " cli_bw_est:" << (double)client->bdp_estimator.bw_est;
} }
void Log(int64_t iteration) { void Log(int64_t iteration) {
@ -180,7 +177,6 @@ class TrickledCHTTP2 : public EndpointPairFixture {
size_t server_backlog = size_t server_backlog =
grpc_trickle_endpoint_trickle(&exec_ctx, endpoint_pair_.server); grpc_trickle_endpoint_trickle(&exec_ctx, endpoint_pair_.server);
grpc_exec_ctx_finish(&exec_ctx); grpc_exec_ctx_finish(&exec_ctx);
if (update_stats) { if (update_stats) {
UpdateStats((grpc_chttp2_transport*)client_transport_, &client_stats_, UpdateStats((grpc_chttp2_transport*)client_transport_, &client_stats_,
client_backlog); client_backlog);
@ -374,7 +370,7 @@ static void BM_PumpUnbalancedUnary_Trickle(benchmark::State& state) {
stub->AsyncEcho(&cli_ctx, send_request, fixture->cq())); stub->AsyncEcho(&cli_ctx, send_request, fixture->cq()));
void* t; void* t;
bool ok; bool ok;
TrickleCQNext(fixture.get(), &t, &ok, state.iterations()); TrickleCQNext(fixture.get(), &t, &ok, in_warmup ? -1 : state.iterations());
GPR_ASSERT(ok); GPR_ASSERT(ok);
GPR_ASSERT(t == tag(0) || t == tag(1)); GPR_ASSERT(t == tag(0) || t == tag(1));
intptr_t slot = reinterpret_cast<intptr_t>(t); intptr_t slot = reinterpret_cast<intptr_t>(t);
@ -382,7 +378,8 @@ static void BM_PumpUnbalancedUnary_Trickle(benchmark::State& state) {
senv->response_writer.Finish(send_response, Status::OK, tag(3)); senv->response_writer.Finish(send_response, Status::OK, tag(3));
response_reader->Finish(&recv_response, &recv_status, tag(4)); response_reader->Finish(&recv_response, &recv_status, tag(4));
for (int i = (1 << 3) | (1 << 4); i != 0;) { for (int i = (1 << 3) | (1 << 4); i != 0;) {
TrickleCQNext(fixture.get(), &t, &ok, state.iterations()); TrickleCQNext(fixture.get(), &t, &ok,
in_warmup ? -1 : state.iterations());
GPR_ASSERT(ok); GPR_ASSERT(ok);
int tagnum = (int)reinterpret_cast<intptr_t>(t); int tagnum = (int)reinterpret_cast<intptr_t>(t);
GPR_ASSERT(i & (1 << tagnum)); GPR_ASSERT(i & (1 << tagnum));
@ -419,17 +416,15 @@ static void BM_PumpUnbalancedUnary_Trickle(benchmark::State& state) {
} }
static void UnaryTrickleArgs(benchmark::internal::Benchmark* b) { static void UnaryTrickleArgs(benchmark::internal::Benchmark* b) {
// A selection of interesting numbers
const int cli_1024k = 1024 * 1024;
const int cli_32M = 32 * 1024 * 1024;
const int svr_256k = 256 * 1024;
const int svr_4M = 4 * 1024 * 1024;
const int svr_64M = 64 * 1024 * 1024;
for (int bw = 64; bw <= 128 * 1024 * 1024; bw *= 16) { for (int bw = 64; bw <= 128 * 1024 * 1024; bw *= 16) {
for (auto svr : {svr_256k, svr_4M, svr_64M}) { b->Args({1, 1, bw});
for (auto cli : {cli_1024k, cli_32M}) { for (int i = 64; i <= 128 * 1024 * 1024; i *= 64) {
b->Args({cli, svr, bw}); double expected_time =
} static_cast<double>(14 + i) / (125.0 * static_cast<double>(bw));
if (expected_time > 2.0) continue;
b->Args({i, 1, bw});
b->Args({1, i, bw});
b->Args({i, i, bw});
} }
} }
} }

@ -31,18 +31,22 @@
set -ex set -ex
# change to root directory # change to root directory
cd $(dirname $0)/../.. cd "$(dirname "$0")/../.."
DIRS=src/python/grpcio/grpc DIRS=(
'src/python/grpcio/grpc'
'src/python/grpcio_reflection/grpc_reflection'
'src/python/grpcio_health_checking/grpc_health'
)
VIRTUALENV=python_pylint_venv VIRTUALENV=python_pylint_venv
virtualenv $VIRTUALENV virtualenv $VIRTUALENV
PYTHON=`realpath $VIRTUALENV/bin/python` PYTHON=$(realpath $VIRTUALENV/bin/python)
$PYTHON -m pip install pylint==1.6.5 $PYTHON -m pip install pylint==1.6.5
for dir in $DIRS; do for dir in "${DIRS[@]}"; do
$PYTHON -m pylint --rcfile=.pylintrc -rn $dir || exit $? $PYTHON -m pylint --rcfile=.pylintrc -rn "$dir" || exit $?
done done
exit 0 exit 0

@ -0,0 +1,66 @@
#!/bin/bash
# Copyright 2017, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Source this rc script to prepare the environment for macos builds
# TODO(jtattermusch): remove all deps once installed on MacOS workers
# brew and C++ deps
yes | ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
brew install autoconf automake libtool ccache cmake gflags gpg wget
# TODO(jtattermusch): install rvm & ruby
# TODO(jtattermusch): install cocoapods
# python
wget -q https://bootstrap.pypa.io/get-pip.py
sudo python get-pip.py
sudo pip install virtualenv
# TODO(jtattermusch): install python3
# mono
wget -q https://download.mono-project.com/archive/5.0.1/macos-10-universal/MonoFramework-MDK-5.0.1.1.macos10.xamarin.universal.pkg
sudo installer -pkg MonoFramework-MDK-5.0.1.1.macos10.xamarin.universal.pkg -target /
ln -s /Library/Frameworks/Mono.framework/Versions/Current/bin/mono /usr/local/bin/mono
# dotnet SDK
brew install openssl
wget -q https://go.microsoft.com/fwlink/?linkid=843444 -O dotnet-dev-osx-x64.1.0.1.pkg
sudo installer -pkg dotnet-dev-osx-x64.1.0.1.pkg -target /
ln -s /usr/local/share/dotnet/dotnet /usr/local/bin/dotnet
dotnet --version # bootstrap dotnet SDK
# nvm
wget -qO- https://raw.githubusercontent.com/creationix/nvm/v0.30.2/install.sh | bash
# TODO(jtattermusch): install node if needed
git submodule update --init

@ -35,4 +35,8 @@ cd $(dirname $0)/../../..
source tools/internal_ci/helper_scripts/prepare_build_linux_rc source tools/internal_ci/helper_scripts/prepare_build_linux_rc
# TODO(jtattermusch): install ruby on the internal_ci worker
gpg --keyserver hkp://keys.gnupg.net --recv-keys 409B6B1796C275462A1703113804BB82D39DC0E3
curl -sSL https://get.rvm.io | bash -s stable --ruby
tools/run_tests/task_runner.py -f artifact linux tools/run_tests/task_runner.py -f artifact linux

@ -33,7 +33,7 @@ set -ex
# change to grpc repo root # change to grpc repo root
cd $(dirname $0)/../../.. cd $(dirname $0)/../../..
git submodule update --init source tools/internal_ci/helper_scripts/prepare_build_macos_rc
tools/run_tests/run_tests_matrix.py -f basictests macos --internal_ci || FAILED="true" tools/run_tests/run_tests_matrix.py -f basictests macos --internal_ci || FAILED="true"

@ -523,15 +523,14 @@ class NodeLanguage:
def scenarios(self): def scenarios(self):
# TODO(jtattermusch): make this scenario work # TODO(jtattermusch): make this scenario work
#yield _ping_pong_scenario( yield _ping_pong_scenario(
# 'node_generic_async_streaming_ping_pong', rpc_type='STREAMING', 'node_generic_streaming_ping_pong', rpc_type='STREAMING',
# client_type='ASYNC_CLIENT', server_type='ASYNC_GENERIC_SERVER', client_type='ASYNC_CLIENT', server_type='ASYNC_GENERIC_SERVER',
# use_generic_payload=True) use_generic_payload=True)
# TODO(jtattermusch): make this scenario work yield _ping_pong_scenario(
#yield _ping_pong_scenario( 'node_protobuf_streaming_ping_pong', rpc_type='STREAMING',
# 'node_protobuf_async_streaming_ping_pong', rpc_type='STREAMING', client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER')
# client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER')
yield _ping_pong_scenario( yield _ping_pong_scenario(
'node_protobuf_unary_ping_pong', rpc_type='UNARY', 'node_protobuf_unary_ping_pong', rpc_type='UNARY',
@ -564,29 +563,26 @@ class NodeLanguage:
secure=secure, secure=secure,
categories=[SCALABLE]) categories=[SCALABLE])
# TODO(murgatroid99): fix bugs with this scenario and re-enable it yield _ping_pong_scenario(
# yield _ping_pong_scenario( 'node_protobuf_unary_qps_unconstrained', rpc_type='UNARY',
# 'node_protobuf_async_unary_qps_unconstrained', rpc_type='UNARY', client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER',
# client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER', unconstrained_client='async',
# unconstrained_client='async', categories=[SCALABLE, SMOKETEST])
# categories=[SCALABLE, SMOKETEST])
# TODO(jtattermusch): make this scenario work yield _ping_pong_scenario(
#yield _ping_pong_scenario( 'node_protobuf_streaming_qps_unconstrained', rpc_type='STREAMING',
# 'node_protobuf_async_streaming_qps_unconstrained', rpc_type='STREAMING', client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER',
# client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER', unconstrained_client='async')
# unconstrained_client='async')
yield _ping_pong_scenario( yield _ping_pong_scenario(
'node_to_cpp_protobuf_async_unary_ping_pong', rpc_type='UNARY', 'node_to_cpp_protobuf_async_unary_ping_pong', rpc_type='UNARY',
client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER', client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER',
server_language='c++', async_server_threads=1) server_language='c++', async_server_threads=1)
# TODO(jtattermusch): make this scenario work yield _ping_pong_scenario(
#yield _ping_pong_scenario( 'node_to_cpp_protobuf_async_streaming_ping_pong', rpc_type='STREAMING',
# 'node_to_cpp_protobuf_async_streaming_ping_pong', rpc_type='STREAMING', client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER',
# client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER', server_language='c++', async_server_threads=1)
# server_language='c++', async_server_threads=1)
def __str__(self): def __str__(self):
return 'node' return 'node'

@ -672,7 +672,7 @@ class PythonLanguage(object):
if args.compiler == 'default': if args.compiler == 'default':
if os.name == 'nt': if os.name == 'nt':
return (python27_config,) return (python35_config,)
else: else:
return (python27_config, python34_config,) return (python27_config, python34_config,)
elif args.compiler == 'python2.7': elif args.compiler == 'python2.7':

@ -247,15 +247,6 @@ def _create_portability_test_jobs(extra_args=[], inner_jobs=_DEFAULT_INNER_JOBS)
extra_args=extra_args + ['--build_only'], extra_args=extra_args + ['--build_only'],
inner_jobs=inner_jobs) inner_jobs=inner_jobs)
test_jobs += _generate_jobs(languages=['python'],
configs=['dbg'],
platforms=['linux'],
arch='default',
compiler='python3.4',
labels=['portability'],
extra_args=extra_args,
inner_jobs=inner_jobs)
test_jobs += _generate_jobs(languages=['python'], test_jobs += _generate_jobs(languages=['python'],
configs=['dbg'], configs=['dbg'],
platforms=['linux'], platforms=['linux'],

Loading…
Cancel
Save