From 4616dd0685e954a3c617bdb14e73a19c93a85f89 Mon Sep 17 00:00:00 2001 From: Alexander Polcyn Date: Sat, 3 Jun 2017 13:44:40 -0700 Subject: [PATCH 01/41] dont use clock_gettime in grpc compatibiltiy mode --- third_party/cares/config_linux/ares_config.h | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/third_party/cares/config_linux/ares_config.h b/third_party/cares/config_linux/ares_config.h index 265974cfae4..5be5010ed7e 100644 --- a/third_party/cares/config_linux/ares_config.h +++ b/third_party/cares/config_linux/ares_config.h @@ -70,8 +70,14 @@ /* Define to 1 if bool is an available type. */ #define HAVE_BOOL_T 1 -/* Define to 1 if you have the clock_gettime function and monotonic timer. */ -#define HAVE_CLOCK_GETTIME_MONOTONIC 1 +/* Define HAVE_CLOCK_GETTIME_MONOTONIC to 1 if you have the clock_gettime + * function and monotonic timer. + * + * Note: setting HAVE_CLOCK_GETTIME_MONOTONIC causes use of the clock_gettime + * function from glibc, don't set it to support glibc < 2.17 */ +#ifndef GPR_BACKWARDS_COMPATIBILITY_MODE + #define HAVE_CLOCK_GETTIME_MONOTONIC 1 +#endif /* Define to 1 if you have the closesocket function. */ /* #undef HAVE_CLOSESOCKET */ From c3735cbe90445f2ef71955f9ab48b0ae7e976af3 Mon Sep 17 00:00:00 2001 From: Alexander Polcyn Date: Thu, 1 Jun 2017 08:23:52 -0700 Subject: [PATCH 02/41] hack to reimplement FDS macros --- third_party/cares/config_linux/ares_config.h | 28 ++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/third_party/cares/config_linux/ares_config.h b/third_party/cares/config_linux/ares_config.h index 265974cfae4..d95bbb36abc 100644 --- a/third_party/cares/config_linux/ares_config.h +++ b/third_party/cares/config_linux/ares_config.h @@ -505,6 +505,34 @@ # define _DARWIN_USE_64_BIT_INODE 1 #endif +#ifdef GPR_BACKWARDS_COMPATIBILITY_MODE + /* Redefine the fd_set macros for GLIBC < 2.15 support. + * This is a backwards compatibility hack. At version 2.15, GLIBC introduces + * the __fdelt_chk function, and starts using it within its fd_set macros + * (which c-ares uses). For compatibility with GLIBC < 2.15, we need to redefine + * the fd_set macros to not use __fdelt_chk. */ + #include + #undef FD_SET + #undef FD_CLR + #undef FD_ISSET + /* 'FD_ZERO' doesn't use __fdelt_chk, no need to redefine. */ + + #ifdef __FDS_BITS + #define GRPC_CARES_FDS_BITS(set) __FDS_BITS(set) + #else + #define GRPC_CARES_FDS_BITS(set) ((set)->fds_bits) + #endif + + #define GRPC_CARES_FD_MASK(d) ((long int)(1UL << (d) % NFDBITS)) + + #define FD_SET(d, set) \ + ((void) (GRPC_CARES_FDS_BITS (set)[ (d) / NFDBITS ] |= GRPC_CARES_FD_MASK(d))) + #define FD_CLR(d, set) \ + ((void) (GRPC_CARES_FDS_BITS (set)[ (d) / NFDBITS ] &= ~GRPC_CARES_FD_MASK(d))) + #define FD_ISSET(d, set) \ + ((GRPC_CARES_FDS_BITS (set)[ (d) / NFDBITS ] & GRPC_CARES_FD_MASK(d)) != 0) +#endif /* GPR_BACKWARDS_COMPATIBILITY_MODE */ + /* Number of bits in a file offset, on hosts where this is settable. */ /* #undef _FILE_OFFSET_BITS */ From 17e016869a22b13b1f568ec534444928ff136a8e Mon Sep 17 00:00:00 2001 From: murgatroid99 Date: Fri, 9 Jun 2017 16:26:14 -0700 Subject: [PATCH 03/41] Add ARM Linux Node artifacts --- tools/dockerfile/grpc_artifact_linux_x64/Dockerfile | 8 ++++++++ tools/run_tests/artifacts/artifact_targets.py | 7 ++++--- tools/run_tests/artifacts/build_artifact_node.sh | 7 +++++++ 3 files changed, 19 insertions(+), 3 deletions(-) diff --git a/tools/dockerfile/grpc_artifact_linux_x64/Dockerfile b/tools/dockerfile/grpc_artifact_linux_x64/Dockerfile index 34799447171..c3590f5e5db 100644 --- a/tools/dockerfile/grpc_artifact_linux_x64/Dockerfile +++ b/tools/dockerfile/grpc_artifact_linux_x64/Dockerfile @@ -101,6 +101,14 @@ RUN /bin/bash -l -c "gem install bundler --no-ri --no-rdoc" RUN apt-get update && apt-get install -y \ php5 php5-dev php-pear phpunit +################## +# Install cross compiler for ARM + +RUN echo 'deb http://emdebian.org/tools/debian/ jessie main' | tee -a /etc/apt/sources.list.d/crosstools.list && \ + curl http://emdebian.org/tools/debian/emdebian-toolchain-archive.key | apt-key add - + +RUN dpkg --add-architecture armhf && apt-get update && apt-get install -y crossbuild-essential-armhf + RUN mkdir /var/local/jenkins # Define the default command. diff --git a/tools/run_tests/artifacts/artifact_targets.py b/tools/run_tests/artifacts/artifact_targets.py index e3658acbf65..29eca92a34b 100644 --- a/tools/run_tests/artifacts/artifact_targets.py +++ b/tools/run_tests/artifacts/artifact_targets.py @@ -279,11 +279,12 @@ class NodeExtArtifact: return create_docker_jobspec( self.name, 'tools/dockerfile/grpc_artifact_linux_{}'.format(self.arch), - 'tools/run_tests/artifacts/build_artifact_node.sh {}'.format(self.gyp_arch)) + 'tools/run_tests/artifacts/build_artifact_node.sh {} {}'.format( + self.gyp_arch, self.platform)) else: return create_jobspec(self.name, ['tools/run_tests/artifacts/build_artifact_node.sh', - self.gyp_arch], + self.gyp_arch, self.platform], use_workspace=True) class PHPArtifact: @@ -343,7 +344,7 @@ class ProtocArtifact: environ=environ, use_workspace=True) else: - generator = 'Visual Studio 12 Win64' if self.arch == 'x64' else 'Visual Studio 12' + generator = 'Visual Studio 12 Win64' if self.arch == 'x64' else 'Visual Studio 12' vcplatform = 'x64' if self.arch == 'x64' else 'Win32' return create_jobspec(self.name, ['tools\\run_tests\\artifacts\\build_artifact_protoc.bat'], diff --git a/tools/run_tests/artifacts/build_artifact_node.sh b/tools/run_tests/artifacts/build_artifact_node.sh index e5e36903ebd..af0bed1fe23 100755 --- a/tools/run_tests/artifacts/build_artifact_node.sh +++ b/tools/run_tests/artifacts/build_artifact_node.sh @@ -29,6 +29,7 @@ # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. NODE_TARGET_ARCH=$1 +NODE_TARGET_OS=$2 source ~/.nvm/nvm.sh nvm use 4 @@ -50,6 +51,12 @@ for version in ${node_versions[@]} do ./node_modules/.bin/node-pre-gyp configure rebuild package --target=$version --target_arch=$NODE_TARGET_ARCH --grpc_alpine=true cp -r build/stage/* "${ARTIFACTS_OUT}"/ + if [ "$NODE_TARGET_ARCH" == 'x64' ] && [ "$NODE_TARGET_OS" == 'linux' ] + then + # Cross compile for ARM on x64 + CC=arm-linux-gnueabihf-gcc CXX=arm-linux-gnueabihf-g++ LD=arm-linux-gnueabihf-g++ ./node_modules/.bin/node-pre-gyp configure rebuild package testpackage --target=$version --target_arch=arm + cp -r build/stage/* "${ARTIFACTS_OUT}"/ + fi done for version in ${electron_versions[@]} From b48ec8b171cbc017c3f820a2d63b30f300e530da Mon Sep 17 00:00:00 2001 From: murgatroid99 Date: Wed, 14 Jun 2017 14:47:05 -0700 Subject: [PATCH 04/41] Upgrade Protobuf.js 6 code to work with 6.8 --- src/node/src/protobuf_js_6_common.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/node/src/protobuf_js_6_common.js b/src/node/src/protobuf_js_6_common.js index 91a458aa20e..e2451b4be0a 100644 --- a/src/node/src/protobuf_js_6_common.js +++ b/src/node/src/protobuf_js_6_common.js @@ -64,7 +64,7 @@ exports.deserializeCls = function deserializeCls(cls, options) { * @return {cls} The resulting object */ return function deserialize(arg_buf) { - return cls.decode(arg_buf).toObject(conversion_options); + return cls.toObject(cls.decode(arg_buf), conversion_options); }; }; From d58cfb078c95ea0093aca0265f53e411c680c892 Mon Sep 17 00:00:00 2001 From: murgatroid99 Date: Thu, 15 Jun 2017 17:32:32 -0700 Subject: [PATCH 05/41] Fix missing return after callback in a function --- src/node/src/client.js | 1 + 1 file changed, 1 insertion(+) diff --git a/src/node/src/client.js b/src/node/src/client.js index f59ac5c94c5..8892aa7c50e 100644 --- a/src/node/src/client.js +++ b/src/node/src/client.js @@ -152,6 +152,7 @@ function _write(chunk, encoding, callback) { /* Once a write fails, just call the callback immediately to let the caller flush any pending writes. */ setImmediate(callback); + return; } try { message = this.serialize(chunk); From 8e7a95d67c3611434e26eb916760e5ab3caea72a Mon Sep 17 00:00:00 2001 From: murgatroid99 Date: Mon, 19 Jun 2017 12:36:11 -0700 Subject: [PATCH 06/41] Add another missing return after a callback --- src/node/src/client.js | 1 + 1 file changed, 1 insertion(+) diff --git a/src/node/src/client.js b/src/node/src/client.js index 8892aa7c50e..0b44144920f 100644 --- a/src/node/src/client.js +++ b/src/node/src/client.js @@ -165,6 +165,7 @@ function _write(chunk, encoding, callback) { this.call.cancelWithStatus(constants.status.INTERNAL, 'Serialization failure'); callback(e); + return; } if (_.isFinite(encoding)) { /* Attach the encoding if it is a finite number. This is the closest we From 33215f0dba2c1d29e877917df0e749358eb6afed Mon Sep 17 00:00:00 2001 From: Ken Payson Date: Mon, 19 Jun 2017 12:41:15 -0700 Subject: [PATCH 07/41] Fix protoc artifact --- tools/dockerfile/grpc_artifact_protoc/Dockerfile | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/tools/dockerfile/grpc_artifact_protoc/Dockerfile b/tools/dockerfile/grpc_artifact_protoc/Dockerfile index 2904a8fa51f..44d0eb57fdb 100644 --- a/tools/dockerfile/grpc_artifact_protoc/Dockerfile +++ b/tools/dockerfile/grpc_artifact_protoc/Dockerfile @@ -60,10 +60,9 @@ RUN yum install -y devtoolset-1.1 \ devtoolset-1.1-libstdc++-devel.i686 || true # Update Git to version >1.7 to allow cloning submodules with --reference arg. -RUN yum remove -y git -RUN yum install -y epel-release -RUN yum install -y https://centos6.iuscommunity.org/ius-release.rpm -RUN yum install -y git2u +RUN yum remove -y git && yum clean all +RUN yum install -y https://centos6.iuscommunity.org/ius-release.rpm && yum clean all +RUN yum install -y git2u && yum clean all # Start in devtoolset environment that uses GCC 4.7 CMD ["scl", "enable", "devtoolset-1.1", "bash"] From 1a0e8073a1a5926d30427b4da13a60f91b39c0cb Mon Sep 17 00:00:00 2001 From: murgatroid99 Date: Mon, 19 Jun 2017 13:19:49 -0700 Subject: [PATCH 08/41] Fix racy Node reconnect test --- src/node/test/surface_test.js | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/src/node/test/surface_test.js b/src/node/test/surface_test.js index 11577e797d9..71782ae6923 100644 --- a/src/node/test/surface_test.js +++ b/src/node/test/surface_test.js @@ -1413,13 +1413,25 @@ describe('Client reconnect', function() { }); server.bind('localhost:' + port, server_insecure_creds); server.start(); - client.echo(undefined, function(error, response) { - if (error) { - console.log(error); - } + + /* We create a new client, that will not throw an error if the server + * is not immediately available. Instead, it will wait for the server + * to be available, then the call will complete. Once this happens, the + * original client should be able to make a new call and connect to the + * restarted server without having the call fail due to connection + * errors. */ + var client2 = new Client('localhost:' + port, + grpc.credentials.createInsecure()); + client2.echo({value: 'test', value2: 3}, function(error, response) { assert.ifError(error); - assert.deepEqual(response, {value: '', value2: 0}); - done(); + client.echo(undefined, function(error, response) { + if (error) { + console.log(error); + } + assert.ifError(error); + assert.deepEqual(response, {value: '', value2: 0}); + done(); + }); }); }); }); From ebc12e56f25bfb893f3d412cc083de36df5ef558 Mon Sep 17 00:00:00 2001 From: Matt Kwong Date: Fri, 2 Jun 2017 14:02:47 -0700 Subject: [PATCH 09/41] Update Mono to Jessie for C# Dockerfiles --- templates/tools/dockerfile/csharp_deps.include | 4 ++-- tools/dockerfile/interoptest/grpc_interop_csharp/Dockerfile | 4 ++-- .../interoptest/grpc_interop_csharpcoreclr/Dockerfile | 4 ++-- tools/dockerfile/test/csharp_coreclr_x64/Dockerfile | 4 ++-- tools/dockerfile/test/csharp_jessie_x64/Dockerfile | 4 ++-- tools/dockerfile/test/multilang_jessie_x64/Dockerfile | 4 ++-- 6 files changed, 12 insertions(+), 12 deletions(-) diff --git a/templates/tools/dockerfile/csharp_deps.include b/templates/tools/dockerfile/csharp_deps.include index 612b119e1c9..3a40711e372 100644 --- a/templates/tools/dockerfile/csharp_deps.include +++ b/templates/tools/dockerfile/csharp_deps.include @@ -2,8 +2,8 @@ # C# dependencies # Update to a newer version of mono -RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3FA7E0328081BFF6A14DA29AA6A19B38D3D831EF -RUN echo "deb http://download.mono-project.com/repo/debian wheezy main" | tee /etc/apt/sources.list.d/mono-xamarin.list +RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 3FA7E0328081BFF6A14DA29AA6A19B38D3D831EF +RUN echo "deb http://download.mono-project.com/repo/debian jessie main" | tee /etc/apt/sources.list.d/mono-official.list RUN echo "deb http://download.mono-project.com/repo/debian wheezy-apache24-compat main" | tee -a /etc/apt/sources.list.d/mono-xamarin.list RUN echo "deb http://download.mono-project.com/repo/debian wheezy-libjpeg62-compat main" | tee -a /etc/apt/sources.list.d/mono-xamarin.list diff --git a/tools/dockerfile/interoptest/grpc_interop_csharp/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_csharp/Dockerfile index f9e709dccb1..90624e60105 100644 --- a/tools/dockerfile/interoptest/grpc_interop_csharp/Dockerfile +++ b/tools/dockerfile/interoptest/grpc_interop_csharp/Dockerfile @@ -82,8 +82,8 @@ RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 # C# dependencies # Update to a newer version of mono -RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3FA7E0328081BFF6A14DA29AA6A19B38D3D831EF -RUN echo "deb http://download.mono-project.com/repo/debian wheezy main" | tee /etc/apt/sources.list.d/mono-xamarin.list +RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 3FA7E0328081BFF6A14DA29AA6A19B38D3D831EF +RUN echo "deb http://download.mono-project.com/repo/debian jessie main" | tee /etc/apt/sources.list.d/mono-official.list RUN echo "deb http://download.mono-project.com/repo/debian wheezy-apache24-compat main" | tee -a /etc/apt/sources.list.d/mono-xamarin.list RUN echo "deb http://download.mono-project.com/repo/debian wheezy-libjpeg62-compat main" | tee -a /etc/apt/sources.list.d/mono-xamarin.list diff --git a/tools/dockerfile/interoptest/grpc_interop_csharpcoreclr/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_csharpcoreclr/Dockerfile index f9e709dccb1..90624e60105 100644 --- a/tools/dockerfile/interoptest/grpc_interop_csharpcoreclr/Dockerfile +++ b/tools/dockerfile/interoptest/grpc_interop_csharpcoreclr/Dockerfile @@ -82,8 +82,8 @@ RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 # C# dependencies # Update to a newer version of mono -RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3FA7E0328081BFF6A14DA29AA6A19B38D3D831EF -RUN echo "deb http://download.mono-project.com/repo/debian wheezy main" | tee /etc/apt/sources.list.d/mono-xamarin.list +RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 3FA7E0328081BFF6A14DA29AA6A19B38D3D831EF +RUN echo "deb http://download.mono-project.com/repo/debian jessie main" | tee /etc/apt/sources.list.d/mono-official.list RUN echo "deb http://download.mono-project.com/repo/debian wheezy-apache24-compat main" | tee -a /etc/apt/sources.list.d/mono-xamarin.list RUN echo "deb http://download.mono-project.com/repo/debian wheezy-libjpeg62-compat main" | tee -a /etc/apt/sources.list.d/mono-xamarin.list diff --git a/tools/dockerfile/test/csharp_coreclr_x64/Dockerfile b/tools/dockerfile/test/csharp_coreclr_x64/Dockerfile index f9e709dccb1..90624e60105 100644 --- a/tools/dockerfile/test/csharp_coreclr_x64/Dockerfile +++ b/tools/dockerfile/test/csharp_coreclr_x64/Dockerfile @@ -82,8 +82,8 @@ RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 # C# dependencies # Update to a newer version of mono -RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3FA7E0328081BFF6A14DA29AA6A19B38D3D831EF -RUN echo "deb http://download.mono-project.com/repo/debian wheezy main" | tee /etc/apt/sources.list.d/mono-xamarin.list +RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 3FA7E0328081BFF6A14DA29AA6A19B38D3D831EF +RUN echo "deb http://download.mono-project.com/repo/debian jessie main" | tee /etc/apt/sources.list.d/mono-official.list RUN echo "deb http://download.mono-project.com/repo/debian wheezy-apache24-compat main" | tee -a /etc/apt/sources.list.d/mono-xamarin.list RUN echo "deb http://download.mono-project.com/repo/debian wheezy-libjpeg62-compat main" | tee -a /etc/apt/sources.list.d/mono-xamarin.list diff --git a/tools/dockerfile/test/csharp_jessie_x64/Dockerfile b/tools/dockerfile/test/csharp_jessie_x64/Dockerfile index 0b21a222263..7c2a2df30c4 100644 --- a/tools/dockerfile/test/csharp_jessie_x64/Dockerfile +++ b/tools/dockerfile/test/csharp_jessie_x64/Dockerfile @@ -86,8 +86,8 @@ RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 # C# dependencies # Update to a newer version of mono -RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3FA7E0328081BFF6A14DA29AA6A19B38D3D831EF -RUN echo "deb http://download.mono-project.com/repo/debian wheezy main" | tee /etc/apt/sources.list.d/mono-xamarin.list +RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 3FA7E0328081BFF6A14DA29AA6A19B38D3D831EF +RUN echo "deb http://download.mono-project.com/repo/debian jessie main" | tee /etc/apt/sources.list.d/mono-official.list RUN echo "deb http://download.mono-project.com/repo/debian wheezy-apache24-compat main" | tee -a /etc/apt/sources.list.d/mono-xamarin.list RUN echo "deb http://download.mono-project.com/repo/debian wheezy-libjpeg62-compat main" | tee -a /etc/apt/sources.list.d/mono-xamarin.list diff --git a/tools/dockerfile/test/multilang_jessie_x64/Dockerfile b/tools/dockerfile/test/multilang_jessie_x64/Dockerfile index 9b50d85e2f3..d85411810d7 100644 --- a/tools/dockerfile/test/multilang_jessie_x64/Dockerfile +++ b/tools/dockerfile/test/multilang_jessie_x64/Dockerfile @@ -71,8 +71,8 @@ RUN pip install --upgrade google-api-python-client # C# dependencies # Update to a newer version of mono -RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 3FA7E0328081BFF6A14DA29AA6A19B38D3D831EF -RUN echo "deb http://download.mono-project.com/repo/debian wheezy main" | tee /etc/apt/sources.list.d/mono-xamarin.list +RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 3FA7E0328081BFF6A14DA29AA6A19B38D3D831EF +RUN echo "deb http://download.mono-project.com/repo/debian jessie main" | tee /etc/apt/sources.list.d/mono-official.list RUN echo "deb http://download.mono-project.com/repo/debian wheezy-apache24-compat main" | tee -a /etc/apt/sources.list.d/mono-xamarin.list RUN echo "deb http://download.mono-project.com/repo/debian wheezy-libjpeg62-compat main" | tee -a /etc/apt/sources.list.d/mono-xamarin.list From e9c17873af0653da9d70ccd68d94f4872f16f1e5 Mon Sep 17 00:00:00 2001 From: Ken Payson Date: Mon, 19 Jun 2017 18:31:15 -0700 Subject: [PATCH 10/41] Bump to version 1.4.0 --- BUILD | 4 ++-- CMakeLists.txt | 2 +- Makefile | 6 +++--- build.yaml | 4 ++-- gRPC-Core.podspec | 2 +- gRPC-ProtoRPC.podspec | 2 +- gRPC-RxLibrary.podspec | 2 +- gRPC.podspec | 2 +- package.json | 2 +- package.xml | 4 ++-- src/core/lib/surface/version.c | 2 +- src/cpp/common/version_cc.cc | 2 +- src/csharp/Grpc.Core/Version.csproj.include | 2 +- src/csharp/Grpc.Core/VersionInfo.cs | 2 +- src/csharp/build_packages_dotnetcli.bat | 2 +- src/csharp/build_packages_dotnetcli.sh | 4 ++-- src/node/health_check/package.json | 4 ++-- src/node/tools/package.json | 2 +- src/objective-c/!ProtoCompiler-gRPCPlugin.podspec | 2 +- src/objective-c/GRPCClient/private/version.h | 2 +- src/php/ext/grpc/version.h | 2 +- src/python/grpcio/grpc/_grpcio_metadata.py | 2 +- src/python/grpcio/grpc_version.py | 2 +- src/python/grpcio_health_checking/grpc_version.py | 2 +- src/python/grpcio_reflection/grpc_version.py | 2 +- src/python/grpcio_tests/grpc_version.py | 2 +- src/ruby/lib/grpc/version.rb | 2 +- src/ruby/tools/version.rb | 2 +- tools/distrib/python/grpcio_tools/grpc_version.py | 2 +- tools/doxygen/Doxyfile.c++ | 2 +- tools/doxygen/Doxyfile.c++.internal | 2 +- tools/doxygen/Doxyfile.core | 2 +- tools/doxygen/Doxyfile.core.internal | 2 +- 33 files changed, 40 insertions(+), 40 deletions(-) diff --git a/BUILD b/BUILD index 2070ce3311d..9b36e3bd17e 100644 --- a/BUILD +++ b/BUILD @@ -51,9 +51,9 @@ load( # This should be updated along with build.yaml g_stands_for = "gregarious" -core_version = "4.0.0-pre1" +core_version = "4.0.0" -version = "1.4.0-pre1" +version = "1.4.0" grpc_cc_library( name = "gpr", diff --git a/CMakeLists.txt b/CMakeLists.txt index acae19ef39f..4b43c5482dd 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -39,7 +39,7 @@ cmake_minimum_required(VERSION 2.8) set(PACKAGE_NAME "grpc") -set(PACKAGE_VERSION "1.4.0-pre1") +set(PACKAGE_VERSION "1.4.0") set(PACKAGE_STRING "${PACKAGE_NAME} ${PACKAGE_VERSION}") set(PACKAGE_TARNAME "${PACKAGE_NAME}-${PACKAGE_VERSION}") set(PACKAGE_BUGREPORT "https://github.com/grpc/grpc/issues/") diff --git a/Makefile b/Makefile index c87fd191445..e3ba47cd036 100644 --- a/Makefile +++ b/Makefile @@ -422,9 +422,9 @@ E = @echo Q = @ endif -CORE_VERSION = 4.0.0-pre1 -CPP_VERSION = 1.4.0-pre1 -CSHARP_VERSION = 1.4.0-pre1 +CORE_VERSION = 4.0.0 +CPP_VERSION = 1.4.0 +CSHARP_VERSION = 1.4.0 CPPFLAGS_NO_ARCH += $(addprefix -I, $(INCLUDES)) $(addprefix -D, $(DEFINES)) CPPFLAGS += $(CPPFLAGS_NO_ARCH) $(ARCH_FLAGS) diff --git a/build.yaml b/build.yaml index fd43d64b1a9..627ff5cce28 100644 --- a/build.yaml +++ b/build.yaml @@ -12,9 +12,9 @@ settings: '#08': Use "-preN" suffixes to identify pre-release versions '#09': Per-language overrides are possible with (eg) ruby_version tag here '#10': See the expand_version.py for all the quirks here - core_version: 4.0.0-pre1 + core_version: 4.0.0 g_stands_for: gregarious - version: 1.4.0-pre1 + version: 1.4.0 filegroups: - name: census public_headers: diff --git a/gRPC-Core.podspec b/gRPC-Core.podspec index 44e8931fbca..3b1fd4b534c 100644 --- a/gRPC-Core.podspec +++ b/gRPC-Core.podspec @@ -37,7 +37,7 @@ Pod::Spec.new do |s| s.name = 'gRPC-Core' - version = '1.4.0-pre1' + version = '1.4.0' s.version = version s.summary = 'Core cross-platform gRPC library, written in C' s.homepage = 'http://www.grpc.io' diff --git a/gRPC-ProtoRPC.podspec b/gRPC-ProtoRPC.podspec index 88a64b8d11b..596579e693e 100644 --- a/gRPC-ProtoRPC.podspec +++ b/gRPC-ProtoRPC.podspec @@ -36,7 +36,7 @@ Pod::Spec.new do |s| s.name = 'gRPC-ProtoRPC' - version = '1.4.0-pre1' + version = '1.4.0' s.version = version s.summary = 'RPC library for Protocol Buffers, based on gRPC' s.homepage = 'http://www.grpc.io' diff --git a/gRPC-RxLibrary.podspec b/gRPC-RxLibrary.podspec index cc6ab6c5cf8..0cb7128f598 100644 --- a/gRPC-RxLibrary.podspec +++ b/gRPC-RxLibrary.podspec @@ -36,7 +36,7 @@ Pod::Spec.new do |s| s.name = 'gRPC-RxLibrary' - version = '1.4.0-pre1' + version = '1.4.0' s.version = version s.summary = 'Reactive Extensions library for iOS/OSX.' s.homepage = 'http://www.grpc.io' diff --git a/gRPC.podspec b/gRPC.podspec index cf63bfeac75..cf261a102b9 100644 --- a/gRPC.podspec +++ b/gRPC.podspec @@ -35,7 +35,7 @@ Pod::Spec.new do |s| s.name = 'gRPC' - version = '1.4.0-pre1' + version = '1.4.0' s.version = version s.summary = 'gRPC client library for iOS/OSX' s.homepage = 'http://www.grpc.io' diff --git a/package.json b/package.json index c78040e9fd4..4b1b7d7d6ac 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "grpc", - "version": "1.4.0-pre1", + "version": "1.4.0", "author": "Google Inc.", "description": "gRPC Library for Node", "homepage": "http://www.grpc.io/", diff --git a/package.xml b/package.xml index 5d6c089f04f..ea1b3001eb1 100644 --- a/package.xml +++ b/package.xml @@ -13,8 +13,8 @@ 2017-05-22 - 1.4.0RC1 - 1.4.0RC1 + 1.4.0 + 1.4.0 beta diff --git a/src/core/lib/surface/version.c b/src/core/lib/surface/version.c index 39dfebb9485..59c1fd7fe99 100644 --- a/src/core/lib/surface/version.c +++ b/src/core/lib/surface/version.c @@ -36,6 +36,6 @@ #include -const char *grpc_version_string(void) { return "4.0.0-pre1"; } +const char *grpc_version_string(void) { return "4.0.0"; } const char *grpc_g_stands_for(void) { return "gregarious"; } diff --git a/src/cpp/common/version_cc.cc b/src/cpp/common/version_cc.cc index 8359016c022..4691696a23c 100644 --- a/src/cpp/common/version_cc.cc +++ b/src/cpp/common/version_cc.cc @@ -37,5 +37,5 @@ #include namespace grpc { -grpc::string Version() { return "1.4.0-pre1"; } +grpc::string Version() { return "1.4.0"; } } diff --git a/src/csharp/Grpc.Core/Version.csproj.include b/src/csharp/Grpc.Core/Version.csproj.include index 8a15980f0aa..09fef990c4b 100755 --- a/src/csharp/Grpc.Core/Version.csproj.include +++ b/src/csharp/Grpc.Core/Version.csproj.include @@ -1,7 +1,7 @@ - 1.4.0-pre1 + 1.4.0 3.3.0 diff --git a/src/csharp/Grpc.Core/VersionInfo.cs b/src/csharp/Grpc.Core/VersionInfo.cs index feac0736c37..541323fb3d0 100644 --- a/src/csharp/Grpc.Core/VersionInfo.cs +++ b/src/csharp/Grpc.Core/VersionInfo.cs @@ -53,6 +53,6 @@ namespace Grpc.Core /// /// Current version of gRPC C# /// - public const string CurrentVersion = "1.4.0-pre1"; + public const string CurrentVersion = "1.4.0"; } } diff --git a/src/csharp/build_packages_dotnetcli.bat b/src/csharp/build_packages_dotnetcli.bat index cb4ff60d554..4fcf209c078 100755 --- a/src/csharp/build_packages_dotnetcli.bat +++ b/src/csharp/build_packages_dotnetcli.bat @@ -28,7 +28,7 @@ @rem OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. @rem Current package versions -set VERSION=1.4.0-pre1 +set VERSION=1.4.0 @rem Adjust the location of nuget.exe set NUGET=C:\nuget\nuget.exe diff --git a/src/csharp/build_packages_dotnetcli.sh b/src/csharp/build_packages_dotnetcli.sh index f3dc8435677..9f4bf2fa91f 100755 --- a/src/csharp/build_packages_dotnetcli.sh +++ b/src/csharp/build_packages_dotnetcli.sh @@ -54,7 +54,7 @@ dotnet pack --configuration Release Grpc.Auth --output ../../../artifacts dotnet pack --configuration Release Grpc.HealthCheck --output ../../../artifacts dotnet pack --configuration Release Grpc.Reflection --output ../../../artifacts -nuget pack Grpc.nuspec -Version "1.4.0-pre1" -OutputDirectory ../../artifacts -nuget pack Grpc.Tools.nuspec -Version "1.4.0-pre1" -OutputDirectory ../../artifacts +nuget pack Grpc.nuspec -Version "1.4.0" -OutputDirectory ../../artifacts +nuget pack Grpc.Tools.nuspec -Version "1.4.0" -OutputDirectory ../../artifacts (cd ../../artifacts && zip csharp_nugets_dotnetcli.zip *.nupkg) diff --git a/src/node/health_check/package.json b/src/node/health_check/package.json index 238547c1771..f619e3f3e11 100644 --- a/src/node/health_check/package.json +++ b/src/node/health_check/package.json @@ -1,6 +1,6 @@ { "name": "grpc-health-check", - "version": "1.4.0-pre1", + "version": "1.4.0", "author": "Google Inc.", "description": "Health check service for use with gRPC", "repository": { @@ -15,7 +15,7 @@ } ], "dependencies": { - "grpc": "^1.4.0-pre1", + "grpc": "^1.4.0", "lodash": "^3.9.3", "google-protobuf": "^3.0.0" }, diff --git a/src/node/tools/package.json b/src/node/tools/package.json index f4f72a4de5e..777be92f2d1 100644 --- a/src/node/tools/package.json +++ b/src/node/tools/package.json @@ -1,6 +1,6 @@ { "name": "grpc-tools", - "version": "1.4.0-pre1", + "version": "1.4.0", "author": "Google Inc.", "description": "Tools for developing with gRPC on Node.js", "homepage": "http://www.grpc.io/", diff --git a/src/objective-c/!ProtoCompiler-gRPCPlugin.podspec b/src/objective-c/!ProtoCompiler-gRPCPlugin.podspec index 09f3303654e..180b2e9cdef 100644 --- a/src/objective-c/!ProtoCompiler-gRPCPlugin.podspec +++ b/src/objective-c/!ProtoCompiler-gRPCPlugin.podspec @@ -42,7 +42,7 @@ Pod::Spec.new do |s| # exclamation mark ensures that other "regular" pods will be able to find it as it'll be installed # before them. s.name = '!ProtoCompiler-gRPCPlugin' - v = '1.4.0-pre1' + v = '1.4.0' s.version = v s.summary = 'The gRPC ProtoC plugin generates Objective-C files from .proto services.' s.description = <<-DESC diff --git a/src/objective-c/GRPCClient/private/version.h b/src/objective-c/GRPCClient/private/version.h index 25a232a7222..7b8da4db664 100644 --- a/src/objective-c/GRPCClient/private/version.h +++ b/src/objective-c/GRPCClient/private/version.h @@ -38,4 +38,4 @@ // `tools/buildgen/generate_projects.sh`. -#define GRPC_OBJC_VERSION_STRING @"1.4.0-pre1" +#define GRPC_OBJC_VERSION_STRING @"1.4.0" diff --git a/src/php/ext/grpc/version.h b/src/php/ext/grpc/version.h index 0d5d36d9cf4..993ef2de274 100644 --- a/src/php/ext/grpc/version.h +++ b/src/php/ext/grpc/version.h @@ -35,6 +35,6 @@ #ifndef VERSION_H #define VERSION_H -#define PHP_GRPC_VERSION "1.4.0RC1" +#define PHP_GRPC_VERSION "1.4.0" #endif /* VERSION_H */ diff --git a/src/python/grpcio/grpc/_grpcio_metadata.py b/src/python/grpcio/grpc/_grpcio_metadata.py index fcfd1976322..4ef40343142 100644 --- a/src/python/grpcio/grpc/_grpcio_metadata.py +++ b/src/python/grpcio/grpc/_grpcio_metadata.py @@ -29,4 +29,4 @@ # AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio/grpc/_grpcio_metadata.py.template`!!! -__version__ = """1.4.0rc1""" +__version__ = """1.4.0""" diff --git a/src/python/grpcio/grpc_version.py b/src/python/grpcio/grpc_version.py index 31b85e5aee7..044a9cd9f6c 100644 --- a/src/python/grpcio/grpc_version.py +++ b/src/python/grpcio/grpc_version.py @@ -29,4 +29,4 @@ # AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio/grpc_version.py.template`!!! -VERSION='1.4.0rc1' +VERSION='1.4.0' diff --git a/src/python/grpcio_health_checking/grpc_version.py b/src/python/grpcio_health_checking/grpc_version.py index 53d798f603d..15636c7bc0f 100644 --- a/src/python/grpcio_health_checking/grpc_version.py +++ b/src/python/grpcio_health_checking/grpc_version.py @@ -29,4 +29,4 @@ # AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_health_checking/grpc_version.py.template`!!! -VERSION='1.4.0rc1' +VERSION='1.4.0' diff --git a/src/python/grpcio_reflection/grpc_version.py b/src/python/grpcio_reflection/grpc_version.py index 25ee2808c6a..7e699db4f61 100644 --- a/src/python/grpcio_reflection/grpc_version.py +++ b/src/python/grpcio_reflection/grpc_version.py @@ -29,4 +29,4 @@ # AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_reflection/grpc_version.py.template`!!! -VERSION='1.4.0rc1' +VERSION='1.4.0' diff --git a/src/python/grpcio_tests/grpc_version.py b/src/python/grpcio_tests/grpc_version.py index ea9d30704a6..00b31bcdb98 100644 --- a/src/python/grpcio_tests/grpc_version.py +++ b/src/python/grpcio_tests/grpc_version.py @@ -29,4 +29,4 @@ # AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_tests/grpc_version.py.template`!!! -VERSION='1.4.0rc1' +VERSION='1.4.0' diff --git a/src/ruby/lib/grpc/version.rb b/src/ruby/lib/grpc/version.rb index 103e5cbbcda..7801316561d 100644 --- a/src/ruby/lib/grpc/version.rb +++ b/src/ruby/lib/grpc/version.rb @@ -29,5 +29,5 @@ # GRPC contains the General RPC module. module GRPC - VERSION = '1.4.0.pre1' + VERSION = '1.4.0' end diff --git a/src/ruby/tools/version.rb b/src/ruby/tools/version.rb index 9ce13f3f827..74c2fd38a45 100644 --- a/src/ruby/tools/version.rb +++ b/src/ruby/tools/version.rb @@ -29,6 +29,6 @@ module GRPC module Tools - VERSION = '1.4.0.pre1' + VERSION = '1.4.0' end end diff --git a/tools/distrib/python/grpcio_tools/grpc_version.py b/tools/distrib/python/grpcio_tools/grpc_version.py index 09738ea9f30..1bfc4b79d18 100644 --- a/tools/distrib/python/grpcio_tools/grpc_version.py +++ b/tools/distrib/python/grpcio_tools/grpc_version.py @@ -29,4 +29,4 @@ # AUTO-GENERATED FROM `$REPO_ROOT/templates/tools/distrib/python/grpcio_tools/grpc_version.py.template`!!! -VERSION='1.4.0rc1' +VERSION='1.4.0' diff --git a/tools/doxygen/Doxyfile.c++ b/tools/doxygen/Doxyfile.c++ index d3eb740e296..0d4a85cfc3d 100644 --- a/tools/doxygen/Doxyfile.c++ +++ b/tools/doxygen/Doxyfile.c++ @@ -40,7 +40,7 @@ PROJECT_NAME = "GRPC C++" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = 1.4.0-pre1 +PROJECT_NUMBER = 1.4.0 # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a diff --git a/tools/doxygen/Doxyfile.c++.internal b/tools/doxygen/Doxyfile.c++.internal index c87e52df82b..82f1667a7ff 100644 --- a/tools/doxygen/Doxyfile.c++.internal +++ b/tools/doxygen/Doxyfile.c++.internal @@ -40,7 +40,7 @@ PROJECT_NAME = "GRPC C++" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = 1.4.0-pre1 +PROJECT_NUMBER = 1.4.0 # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a diff --git a/tools/doxygen/Doxyfile.core b/tools/doxygen/Doxyfile.core index 1a8902a4ae0..6c79093142d 100644 --- a/tools/doxygen/Doxyfile.core +++ b/tools/doxygen/Doxyfile.core @@ -40,7 +40,7 @@ PROJECT_NAME = "GRPC Core" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = 4.0.0-pre1 +PROJECT_NUMBER = 4.0.0 # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a diff --git a/tools/doxygen/Doxyfile.core.internal b/tools/doxygen/Doxyfile.core.internal index 49ebc2a9b7e..9ec3078bbd0 100644 --- a/tools/doxygen/Doxyfile.core.internal +++ b/tools/doxygen/Doxyfile.core.internal @@ -40,7 +40,7 @@ PROJECT_NAME = "GRPC Core" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = 4.0.0-pre1 +PROJECT_NUMBER = 4.0.0 # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a From 4b65414d2578dc6c1e0c6b162cb942c05e0d7a6a Mon Sep 17 00:00:00 2001 From: yang-g Date: Tue, 20 Jun 2017 10:38:17 -0700 Subject: [PATCH 11/41] Unref existing error before setting new one --- src/core/ext/transport/chttp2/transport/chttp2_transport.c | 1 + 1 file changed, 1 insertion(+) diff --git a/src/core/ext/transport/chttp2/transport/chttp2_transport.c b/src/core/ext/transport/chttp2/transport/chttp2_transport.c index f3268bcfcac..3e88df9dcae 100644 --- a/src/core/ext/transport/chttp2/transport/chttp2_transport.c +++ b/src/core/ext/transport/chttp2/transport/chttp2_transport.c @@ -2772,6 +2772,7 @@ grpc_chttp2_incoming_byte_stream *grpc_chttp2_incoming_byte_stream_create( gpr_ref_init(&incoming_byte_stream->refs, 2); incoming_byte_stream->transport = t; incoming_byte_stream->stream = s; + GRPC_ERROR_UNREF(s->byte_stream_error); s->byte_stream_error = GRPC_ERROR_NONE; return incoming_byte_stream; } From 1def5a181d30dbeefb1e7c35fe5d50207188eafe Mon Sep 17 00:00:00 2001 From: Yuchen Zeng Date: Mon, 19 Jun 2017 16:53:43 -0700 Subject: [PATCH 12/41] Fix max_message_length --- test/core/end2end/tests/max_message_length.c | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/test/core/end2end/tests/max_message_length.c b/test/core/end2end/tests/max_message_length.c index f65edab8657..a4262df1201 100644 --- a/test/core/end2end/tests/max_message_length.c +++ b/test/core/end2end/tests/max_message_length.c @@ -83,11 +83,11 @@ static void drain_cq(grpc_completion_queue *cq) { static void shutdown_server(grpc_end2end_test_fixture *f) { if (!f->server) return; - grpc_server_shutdown_and_notify(f->server, f->shutdown_cq, tag(1000)); - GPR_ASSERT(grpc_completion_queue_pluck(f->shutdown_cq, tag(1000), - grpc_timeout_seconds_to_deadline(5), - NULL) - .type == GRPC_OP_COMPLETE); + grpc_server_shutdown_and_notify(f->server, f->cq, tag(1000)); + grpc_event ev = grpc_completion_queue_next( + f->cq, grpc_timeout_seconds_to_deadline(5), NULL); + GPR_ASSERT(ev.type == GRPC_OP_COMPLETE); + GPR_ASSERT(ev.tag == tag(1000)); grpc_server_destroy(f->server); f->server = NULL; } From 9cd1ba1c3ed4a1a5582c000bda6e20763cdc7ef4 Mon Sep 17 00:00:00 2001 From: Muxi Yan Date: Fri, 16 Jun 2017 14:28:34 -0700 Subject: [PATCH 13/41] Release slice no longer owned --- src/objective-c/GRPCClient/private/GRPCChannel.m | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/src/objective-c/GRPCClient/private/GRPCChannel.m b/src/objective-c/GRPCClient/private/GRPCChannel.m index c533c5ae711..fcfaa4a134d 100644 --- a/src/objective-c/GRPCClient/private/GRPCChannel.m +++ b/src/objective-c/GRPCClient/private/GRPCChannel.m @@ -197,12 +197,15 @@ static grpc_channel_args *BuildChannelArgs(NSDictionary *dictionary) { - (grpc_call *)unmanagedCallWithPath:(NSString *)path completionQueue:(GRPCCompletionQueue *)queue { - return grpc_channel_create_call(_unmanagedChannel, - NULL, GRPC_PROPAGATE_DEFAULTS, - queue.unmanagedQueue, - grpc_slice_from_copied_string(path.UTF8String), - NULL, // Passing NULL for host - gpr_inf_future(GPR_CLOCK_REALTIME), NULL); + grpc_slice path_slice = grpc_slice_from_copied_string(path.UTF8String); + grpc_call *call = grpc_channel_create_call(_unmanagedChannel, + NULL, GRPC_PROPAGATE_DEFAULTS, + queue.unmanagedQueue, + path_slice, + NULL, // Passing NULL for host + gpr_inf_future(GPR_CLOCK_REALTIME), NULL); + grpc_slice_unref(path_slice); + return call; } @end From bb8b1c9be4ea443cb7c21ed2760e6e7358ea11bc Mon Sep 17 00:00:00 2001 From: murgatroid99 Date: Fri, 23 Jun 2017 15:10:18 -0700 Subject: [PATCH 14/41] Node: fix status memory leak, improve tcp_uv read buffer allocation code --- src/core/lib/iomgr/tcp_uv.c | 14 ++++++++++++-- src/node/ext/call.cc | 5 ++++- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/src/core/lib/iomgr/tcp_uv.c b/src/core/lib/iomgr/tcp_uv.c index dc23e4f5211..e6c253a3f06 100644 --- a/src/core/lib/iomgr/tcp_uv.c +++ b/src/core/lib/iomgr/tcp_uv.c @@ -80,6 +80,7 @@ typedef struct { } grpc_tcp; static void tcp_free(grpc_exec_ctx *exec_ctx, grpc_tcp *tcp) { + grpc_slice_unref(tcp->read_slice); grpc_resource_user_unref(exec_ctx, tcp->resource_user); gpr_free(tcp); } @@ -125,13 +126,17 @@ static void uv_close_callback(uv_handle_t *handle) { grpc_exec_ctx_finish(&exec_ctx); } +static grpc_slice alloc_read_slice(grpc_exec_ctx *exec_ctx, + grpc_resource_user *resource_user) { + return grpc_resource_user_slice_malloc(exec_ctx, resource_user, + GRPC_TCP_DEFAULT_READ_SLICE_SIZE); +} + static void alloc_uv_buf(uv_handle_t *handle, size_t suggested_size, uv_buf_t *buf) { grpc_exec_ctx exec_ctx = GRPC_EXEC_CTX_INIT; grpc_tcp *tcp = handle->data; (void)suggested_size; - tcp->read_slice = grpc_resource_user_slice_malloc( - &exec_ctx, tcp->resource_user, GRPC_TCP_DEFAULT_READ_SLICE_SIZE); buf->base = (char *)GRPC_SLICE_START_PTR(tcp->read_slice); buf->len = GRPC_SLICE_LENGTH(tcp->read_slice); grpc_exec_ctx_finish(&exec_ctx); @@ -158,6 +163,7 @@ static void read_callback(uv_stream_t *stream, ssize_t nread, // Successful read sub = grpc_slice_sub_no_ref(tcp->read_slice, 0, (size_t)nread); grpc_slice_buffer_add(tcp->read_slices, sub); + tcp->read_slice = alloc_read_slice(&exec_ctx, tcp->resource_user); error = GRPC_ERROR_NONE; if (GRPC_TRACER_ON(grpc_tcp_trace)) { size_t i; @@ -347,6 +353,7 @@ grpc_endpoint *grpc_tcp_create(uv_tcp_t *handle, grpc_resource_quota *resource_quota, char *peer_string) { grpc_tcp *tcp = (grpc_tcp *)gpr_malloc(sizeof(grpc_tcp)); + grpc_exec_ctx exec_ctx = GRPC_EXEC_CTX_INIT; if (GRPC_TRACER_ON(grpc_tcp_trace)) { gpr_log(GPR_DEBUG, "Creating TCP endpoint %p", tcp); @@ -363,6 +370,7 @@ grpc_endpoint *grpc_tcp_create(uv_tcp_t *handle, tcp->peer_string = gpr_strdup(peer_string); tcp->shutting_down = false; tcp->resource_user = grpc_resource_user_create(resource_quota, peer_string); + tcp->read_slice = alloc_read_slice(&exec_ctx, tcp->resource_user); /* Tell network status tracking code about the new endpoint */ grpc_network_status_register_endpoint(&tcp->base); @@ -370,6 +378,8 @@ grpc_endpoint *grpc_tcp_create(uv_tcp_t *handle, uv_unref((uv_handle_t *)handle); #endif + grpc_exec_ctx_finish(&exec_ctx); + return &tcp->base; } diff --git a/src/node/ext/call.cc b/src/node/ext/call.cc index 9453000ad3f..7446c8d03fb 100644 --- a/src/node/ext/call.cc +++ b/src/node/ext/call.cc @@ -398,7 +398,10 @@ class ClientStatusOp : public Op { public: ClientStatusOp() { grpc_metadata_array_init(&metadata_array); } - ~ClientStatusOp() { grpc_metadata_array_destroy(&metadata_array); } + ~ClientStatusOp() { + grpc_metadata_array_destroy(&metadata_array); + grpc_slice_unref(status_details); + } bool ParseOp(Local value, grpc_op *out) { out->data.recv_status_on_client.trailing_metadata = &metadata_array; From 64ea30fe5b9dacc2f2288e20dd0835933e5ca836 Mon Sep 17 00:00:00 2001 From: David Garcia Quintas Date: Fri, 23 Jun 2017 17:41:20 -0700 Subject: [PATCH 15/41] Fix RR policy connectivity state upon subchannels shutdown --- .../lb_policy/round_robin/round_robin.c | 6 +- test/cpp/end2end/round_robin_end2end_test.cc | 55 ++++++++++++++++--- 2 files changed, 52 insertions(+), 9 deletions(-) diff --git a/src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.c b/src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.c index 7ee6ffb7875..d9bba3598e5 100644 --- a/src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.c +++ b/src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.c @@ -138,6 +138,8 @@ struct round_robin_lb_policy { size_t num_ready; /** how many subchannels are in state TRANSIENT_FAILURE */ size_t num_transient_failures; + /** how many subchannels are in state SHUTDOWN */ + size_t num_shutdown; /** how many subchannels are in state IDLE */ size_t num_idle; @@ -381,6 +383,8 @@ static void update_state_counters_locked(subchannel_data *sd) { ++p->num_ready; } else if (sd->curr_connectivity_state == GRPC_CHANNEL_TRANSIENT_FAILURE) { ++p->num_transient_failures; + } else if (sd->curr_connectivity_state == GRPC_CHANNEL_SHUTDOWN) { + ++p->num_shutdown; } else if (sd->curr_connectivity_state == GRPC_CHANNEL_IDLE) { ++p->num_idle; } @@ -421,7 +425,7 @@ static grpc_connectivity_state update_lb_connectivity_status_locked( GRPC_CHANNEL_CONNECTING, GRPC_ERROR_NONE, "rr_connecting"); return GRPC_CHANNEL_CONNECTING; - } else if (p->num_subchannels == 0) { /* 3) SHUTDOWN */ + } else if (p->num_shutdown == p->num_subchannels) { /* 3) SHUTDOWN */ grpc_connectivity_state_set(exec_ctx, &p->state_tracker, GRPC_CHANNEL_SHUTDOWN, GRPC_ERROR_REF(error), "rr_shutdown"); diff --git a/test/cpp/end2end/round_robin_end2end_test.cc b/test/cpp/end2end/round_robin_end2end_test.cc index ea7639bc8f0..164740cace3 100644 --- a/test/cpp/end2end/round_robin_end2end_test.cc +++ b/test/cpp/end2end/round_robin_end2end_test.cc @@ -88,9 +88,12 @@ class RoundRobinEnd2endTest : public ::testing::Test { protected: RoundRobinEnd2endTest() : server_host_("localhost") {} - void StartServers(int num_servers) { - for (int i = 0; i < num_servers; ++i) { - servers_.emplace_back(new ServerData(server_host_)); + void StartServers(size_t num_servers, + std::vector ports = std::vector()) { + for (size_t i = 0; i < num_servers; ++i) { + int port = 0; + if (ports.size() == num_servers) port = ports[i]; + servers_.emplace_back(new ServerData(server_host_, port)); } } @@ -114,15 +117,19 @@ class RoundRobinEnd2endTest : public ::testing::Test { stub_ = grpc::testing::EchoTestService::NewStub(channel_); } - void SendRpc(int num_rpcs) { + void SendRpc(int num_rpcs, bool expect_ok = true) { EchoRequest request; EchoResponse response; request.set_message("Live long and prosper."); for (int i = 0; i < num_rpcs; i++) { ClientContext context; Status status = stub_->Echo(&context, request, &response); - EXPECT_TRUE(status.ok()); - EXPECT_EQ(response.message(), request.message()); + if (expect_ok) { + EXPECT_TRUE(status.ok()); + EXPECT_EQ(response.message(), request.message()); + } else { + EXPECT_FALSE(status.ok()); + } } } @@ -131,8 +138,8 @@ class RoundRobinEnd2endTest : public ::testing::Test { std::unique_ptr server_; MyTestServiceImpl service_; - explicit ServerData(const grpc::string& server_host) { - port_ = grpc_pick_unused_port_or_die(); + explicit ServerData(const grpc::string& server_host, int port = 0) { + port_ = port > 0 ? port : grpc_pick_unused_port_or_die(); gpr_log(GPR_INFO, "starting server on port %d", port_); std::ostringstream server_address; server_address << server_host << ":" << port_; @@ -191,6 +198,38 @@ TEST_F(RoundRobinEnd2endTest, RoundRobin) { EXPECT_EQ("round_robin", channel_->GetLoadBalancingPolicyName()); } +TEST_F(RoundRobinEnd2endTest, RoundRobinReconnect) { + // Start servers and send one RPC per server. + const int kNumServers = 1; + std::vector ports; + ports.push_back(grpc_pick_unused_port_or_die()); + StartServers(kNumServers, ports); + ResetStub(true /* round_robin */); + // Send one RPC per backend and make sure they are used in order. + // Note: This relies on the fact that the subchannels are reported in + // state READY in the order in which the addresses are specified, + // which is only true because the backends are all local. + for (size_t i = 0; i < servers_.size(); ++i) { + SendRpc(1); + EXPECT_EQ(1, servers_[i]->service_.request_count()) << "for backend #" << i; + } + // Check LB policy name for the channel. + EXPECT_EQ("round_robin", channel_->GetLoadBalancingPolicyName()); + + // Kill all servers + for (size_t i = 0; i < servers_.size(); ++i) { + servers_[i]->Shutdown(); + } + // Client request should fail. + SendRpc(1, false); + + // Bring servers back up on the same port (we aren't recreating the channel). + StartServers(kNumServers, ports); + + // Client request should succeed. + SendRpc(1); +} + } // namespace } // namespace testing } // namespace grpc From bc6bc090f471ac60c28a00152c637522e3bc72aa Mon Sep 17 00:00:00 2001 From: David Garcia Quintas Date: Sat, 24 Jun 2017 21:47:26 -0700 Subject: [PATCH 16/41] fixed leak and outdated comment --- .../client_channel/lb_policy/round_robin/round_robin.c | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.c b/src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.c index d9bba3598e5..330516a2ac5 100644 --- a/src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.c +++ b/src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.c @@ -405,7 +405,7 @@ static grpc_connectivity_state update_lb_connectivity_status_locked( * CHECK: sd->curr_connectivity_state == CONNECTING. * * 3) RULE: ALL subchannels are SHUTDOWN => policy is SHUTDOWN. - * CHECK: p->num_subchannels = 0. + * CHECK: p->num_shutdown == p->num_subchannels. * * 4) RULE: ALL subchannels are TRANSIENT_FAILURE => policy is * TRANSIENT_FAILURE. @@ -427,14 +427,13 @@ static grpc_connectivity_state update_lb_connectivity_status_locked( return GRPC_CHANNEL_CONNECTING; } else if (p->num_shutdown == p->num_subchannels) { /* 3) SHUTDOWN */ grpc_connectivity_state_set(exec_ctx, &p->state_tracker, - GRPC_CHANNEL_SHUTDOWN, GRPC_ERROR_REF(error), - "rr_shutdown"); + GRPC_CHANNEL_SHUTDOWN, error, "rr_shutdown"); return GRPC_CHANNEL_SHUTDOWN; } else if (p->num_transient_failures == p->num_subchannels) { /* 4) TRANSIENT_FAILURE */ grpc_connectivity_state_set(exec_ctx, &p->state_tracker, - GRPC_CHANNEL_TRANSIENT_FAILURE, - GRPC_ERROR_REF(error), "rr_transient_failure"); + GRPC_CHANNEL_TRANSIENT_FAILURE, error, + "rr_transient_failure"); return GRPC_CHANNEL_TRANSIENT_FAILURE; } else if (p->num_idle == p->num_subchannels) { /* 5) IDLE */ grpc_connectivity_state_set(exec_ctx, &p->state_tracker, GRPC_CHANNEL_IDLE, From eb74c92f90aea23c9cfcc633f6711a7a567e3781 Mon Sep 17 00:00:00 2001 From: murgatroid99 Date: Mon, 26 Jun 2017 09:42:04 -0700 Subject: [PATCH 17/41] Fix unref of read slice in tcp_uv.c --- src/core/lib/iomgr/tcp_uv.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/core/lib/iomgr/tcp_uv.c b/src/core/lib/iomgr/tcp_uv.c index e6c253a3f06..31d1a5e84dd 100644 --- a/src/core/lib/iomgr/tcp_uv.c +++ b/src/core/lib/iomgr/tcp_uv.c @@ -80,7 +80,7 @@ typedef struct { } grpc_tcp; static void tcp_free(grpc_exec_ctx *exec_ctx, grpc_tcp *tcp) { - grpc_slice_unref(tcp->read_slice); + grpc_slice_unref_internal(exec_ctx, tcp->read_slice); grpc_resource_user_unref(exec_ctx, tcp->resource_user); gpr_free(tcp); } From cebc1d73690f66127600b9c705bb8827ad413a1c Mon Sep 17 00:00:00 2001 From: David Garcia Quintas Date: Mon, 26 Jun 2017 09:59:04 -0700 Subject: [PATCH 18/41] Deal with errors refs --- .../lb_policy/round_robin/round_robin.c | 27 ++++++++++--------- 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.c b/src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.c index 330516a2ac5..5b3476e5b4f 100644 --- a/src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.c +++ b/src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.c @@ -415,33 +415,35 @@ static grpc_connectivity_state update_lb_connectivity_status_locked( * CHECK: p->num_idle == p->num_subchannels. */ round_robin_lb_policy *p = sd->policy; + grpc_connectivity_state new_state = sd->curr_connectivity_state; if (p->num_ready > 0) { /* 1) READY */ grpc_connectivity_state_set(exec_ctx, &p->state_tracker, GRPC_CHANNEL_READY, GRPC_ERROR_NONE, "rr_ready"); - return GRPC_CHANNEL_READY; + new_state = GRPC_CHANNEL_READY; } else if (sd->curr_connectivity_state == GRPC_CHANNEL_CONNECTING) { /* 2) CONNECTING */ grpc_connectivity_state_set(exec_ctx, &p->state_tracker, GRPC_CHANNEL_CONNECTING, GRPC_ERROR_NONE, "rr_connecting"); - return GRPC_CHANNEL_CONNECTING; + new_state = GRPC_CHANNEL_CONNECTING; } else if (p->num_shutdown == p->num_subchannels) { /* 3) SHUTDOWN */ grpc_connectivity_state_set(exec_ctx, &p->state_tracker, - GRPC_CHANNEL_SHUTDOWN, error, "rr_shutdown"); - return GRPC_CHANNEL_SHUTDOWN; + GRPC_CHANNEL_SHUTDOWN, GRPC_ERROR_REF(error), + "rr_shutdown"); + new_state = GRPC_CHANNEL_SHUTDOWN; } else if (p->num_transient_failures == p->num_subchannels) { /* 4) TRANSIENT_FAILURE */ grpc_connectivity_state_set(exec_ctx, &p->state_tracker, - GRPC_CHANNEL_TRANSIENT_FAILURE, error, - "rr_transient_failure"); - return GRPC_CHANNEL_TRANSIENT_FAILURE; + GRPC_CHANNEL_TRANSIENT_FAILURE, + GRPC_ERROR_REF(error), "rr_transient_failure"); + new_state = GRPC_CHANNEL_TRANSIENT_FAILURE; } else if (p->num_idle == p->num_subchannels) { /* 5) IDLE */ grpc_connectivity_state_set(exec_ctx, &p->state_tracker, GRPC_CHANNEL_IDLE, GRPC_ERROR_NONE, "rr_idle"); - return GRPC_CHANNEL_IDLE; + new_state = GRPC_CHANNEL_IDLE; } - /* no change */ - return sd->curr_connectivity_state; + GRPC_ERROR_UNREF(error); + return new_state; } static void rr_connectivity_changed_locked(grpc_exec_ctx *exec_ctx, void *arg, @@ -557,8 +559,9 @@ static void rr_ping_one_locked(grpc_exec_ctx *exec_ctx, grpc_lb_policy *pol, grpc_connected_subchannel_ping(exec_ctx, target, closure); GRPC_CONNECTED_SUBCHANNEL_UNREF(exec_ctx, target, "rr_picked"); } else { - grpc_closure_sched(exec_ctx, closure, GRPC_ERROR_CREATE_FROM_STATIC_STRING( - "Round Robin not connected")); + grpc_closure_sched( + exec_ctx, closure, + GRPC_ERROR_CREATE_FROM_STATIC_STRING("Round Robin not connected")); } } From 8ba7b04759ea1aa7d7b75fd341f6cb95408e42a0 Mon Sep 17 00:00:00 2001 From: murgatroid99 Date: Mon, 26 Jun 2017 16:04:49 -0700 Subject: [PATCH 19/41] Remove extra blank line --- src/core/lib/iomgr/tcp_uv.c | 1 - 1 file changed, 1 deletion(-) diff --git a/src/core/lib/iomgr/tcp_uv.c b/src/core/lib/iomgr/tcp_uv.c index 31d1a5e84dd..2965337a4a8 100644 --- a/src/core/lib/iomgr/tcp_uv.c +++ b/src/core/lib/iomgr/tcp_uv.c @@ -379,7 +379,6 @@ grpc_endpoint *grpc_tcp_create(uv_tcp_t *handle, #endif grpc_exec_ctx_finish(&exec_ctx); - return &tcp->base; } From 3d995970322902a9554a285ef181fcdf06b33f7e Mon Sep 17 00:00:00 2001 From: Makarand Dharmapurikar Date: Tue, 30 May 2017 14:03:01 -0700 Subject: [PATCH 20/41] s/inline/__inline/. Visual studio incompatiblity. MS Visual studio '13 and before don't understand inline and throw Error C2054. Reference: https://msdn.microsoft.com/en-us/library/bw1hbe6y.aspx --- src/core/ext/census/intrusive_hash_map.c | 17 +++++++++-------- src/core/ext/census/intrusive_hash_map.h | 2 +- test/core/census/intrusive_hash_map_test.c | 4 ++-- 3 files changed, 12 insertions(+), 11 deletions(-) diff --git a/src/core/ext/census/intrusive_hash_map.c b/src/core/ext/census/intrusive_hash_map.c index 77512a3aac8..9f56b765e12 100644 --- a/src/core/ext/census/intrusive_hash_map.c +++ b/src/core/ext/census/intrusive_hash_map.c @@ -37,7 +37,7 @@ extern bool hm_index_compare(const hm_index *A, const hm_index *B); /* Simple hashing function that takes lower 32 bits. */ -static inline uint32_t chunked_vector_hasher(uint64_t key) { +static __inline uint32_t chunked_vector_hasher(uint64_t key) { return (uint32_t)key; } @@ -45,8 +45,8 @@ static inline uint32_t chunked_vector_hasher(uint64_t key) { static const size_t VECTOR_CHUNK_SIZE = (1 << 20) / sizeof(void *); /* Helper functions which return buckets from the chunked vector. */ -static inline void **get_mutable_bucket(const chunked_vector *buckets, - uint32_t index) { +static __inline void **get_mutable_bucket(const chunked_vector *buckets, + uint32_t index) { if (index < VECTOR_CHUNK_SIZE) { return &buckets->first_[index]; } @@ -54,7 +54,8 @@ static inline void **get_mutable_bucket(const chunked_vector *buckets, return &buckets->rest_[rest_index][index % VECTOR_CHUNK_SIZE]; } -static inline void *get_bucket(const chunked_vector *buckets, uint32_t index) { +static __inline void *get_bucket(const chunked_vector *buckets, + uint32_t index) { if (index < VECTOR_CHUNK_SIZE) { return buckets->first_[index]; } @@ -63,7 +64,7 @@ static inline void *get_bucket(const chunked_vector *buckets, uint32_t index) { } /* Helper function. */ -static inline size_t RestSize(const chunked_vector *vec) { +static __inline size_t RestSize(const chunked_vector *vec) { return (vec->size_ <= VECTOR_CHUNK_SIZE) ? 0 : (vec->size_ - VECTOR_CHUNK_SIZE - 1) / VECTOR_CHUNK_SIZE + 1; @@ -222,9 +223,9 @@ hm_item *intrusive_hash_map_erase(intrusive_hash_map *hash_map, uint64_t key) { * array_size-1. Returns true if it is a new hm_item and false if the hm_item * already existed. */ -static inline bool intrusive_hash_map_internal_insert(chunked_vector *buckets, - uint32_t hash_mask, - hm_item *item) { +static __inline bool intrusive_hash_map_internal_insert(chunked_vector *buckets, + uint32_t hash_mask, + hm_item *item) { const uint64_t key = item->key; uint32_t index = chunked_vector_hasher(key) & hash_mask; hm_item **slot = (hm_item **)get_mutable_bucket(buckets, index); diff --git a/src/core/ext/census/intrusive_hash_map.h b/src/core/ext/census/intrusive_hash_map.h index a8405517b89..e316bf4b161 100644 --- a/src/core/ext/census/intrusive_hash_map.h +++ b/src/core/ext/census/intrusive_hash_map.h @@ -101,7 +101,7 @@ typedef struct hm_index { /* Returns true if two hm_indices point to the same object within the hash map * and false otherwise. */ -inline bool hm_index_compare(const hm_index *A, const hm_index *B) { +__inline bool hm_index_compare(const hm_index *A, const hm_index *B) { return (A->item == B->item && A->bucket_index == B->bucket_index); } diff --git a/test/core/census/intrusive_hash_map_test.c b/test/core/census/intrusive_hash_map_test.c index fe8d3a1675a..552546f9a31 100644 --- a/test/core/census/intrusive_hash_map_test.c +++ b/test/core/census/intrusive_hash_map_test.c @@ -49,7 +49,7 @@ static const uint32_t kInitialLog2Size = 4; typedef struct object { uint64_t val; } object; /* Helper function to allocate and initialize object. */ -static inline object *make_new_object(uint64_t val) { +static __inline object *make_new_object(uint64_t val) { object *obj = (object *)gpr_malloc(sizeof(object)); obj->val = val; return obj; @@ -63,7 +63,7 @@ typedef struct ptr_item { /* Helper function that creates a new hash map item. It is up to the user to * free the item that was allocated. */ -static inline ptr_item *make_ptr_item(uint64_t key, uint64_t value) { +static __inline ptr_item *make_ptr_item(uint64_t key, uint64_t value) { ptr_item *new_item = (ptr_item *)gpr_malloc(sizeof(ptr_item)); new_item->IHM_key = key; new_item->IHM_hash_link = NULL; From d6cc5303770379a4588cdefba0ce436e4e4004ab Mon Sep 17 00:00:00 2001 From: David Garcia Quintas Date: Mon, 26 Jun 2017 21:00:28 -0700 Subject: [PATCH 21/41] clang-format --- .../client_channel/lb_policy/round_robin/round_robin.c | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.c b/src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.c index 5b3476e5b4f..218f0d754ae 100644 --- a/src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.c +++ b/src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.c @@ -559,9 +559,8 @@ static void rr_ping_one_locked(grpc_exec_ctx *exec_ctx, grpc_lb_policy *pol, grpc_connected_subchannel_ping(exec_ctx, target, closure); GRPC_CONNECTED_SUBCHANNEL_UNREF(exec_ctx, target, "rr_picked"); } else { - grpc_closure_sched( - exec_ctx, closure, - GRPC_ERROR_CREATE_FROM_STATIC_STRING("Round Robin not connected")); + grpc_closure_sched(exec_ctx, closure, GRPC_ERROR_CREATE_FROM_STATIC_STRING( + "Round Robin not connected")); } } From 0911b1612afa2155940cb2bcaf8ea4e1533e1b88 Mon Sep 17 00:00:00 2001 From: ncteisen Date: Mon, 22 May 2017 10:04:01 -0700 Subject: [PATCH 22/41] Fix bm_diff --- .../microbenchmarks/bm_fullstack_trickle.cc | 19 ++++++++++++------- tools/profiling/microbenchmarks/bm_json.py | 2 +- 2 files changed, 13 insertions(+), 8 deletions(-) diff --git a/test/cpp/microbenchmarks/bm_fullstack_trickle.cc b/test/cpp/microbenchmarks/bm_fullstack_trickle.cc index d7e3a9cf47d..41be66d4ea7 100644 --- a/test/cpp/microbenchmarks/bm_fullstack_trickle.cc +++ b/test/cpp/microbenchmarks/bm_fullstack_trickle.cc @@ -382,7 +382,8 @@ static void BM_PumpUnbalancedUnary_Trickle(benchmark::State& state) { senv->response_writer.Finish(send_response, Status::OK, tag(3)); response_reader->Finish(&recv_response, &recv_status, tag(4)); for (int i = (1 << 3) | (1 << 4); i != 0;) { - TrickleCQNext(fixture.get(), &t, &ok, state.iterations()); + TrickleCQNext(fixture.get(), &t, &ok, + in_warmup ? -1 : state.iterations()); GPR_ASSERT(ok); int tagnum = (int)reinterpret_cast(t); GPR_ASSERT(i & (1 << tagnum)); @@ -419,18 +420,22 @@ static void BM_PumpUnbalancedUnary_Trickle(benchmark::State& state) { } static void UnaryTrickleArgs(benchmark::internal::Benchmark* b) { + // A selection of interesting numbers const int cli_1024k = 1024 * 1024; const int cli_32M = 32 * 1024 * 1024; const int svr_256k = 256 * 1024; const int svr_4M = 4 * 1024 * 1024; const int svr_64M = 64 * 1024 * 1024; for (int bw = 64; bw <= 128 * 1024 * 1024; bw *= 16) { - b->Args({bw, cli_1024k, svr_256k}); - b->Args({bw, cli_1024k, svr_4M}); - b->Args({bw, cli_1024k, svr_64M}); - b->Args({bw, cli_32M, svr_256k}); - b->Args({bw, cli_32M, svr_4M}); - b->Args({bw, cli_32M, svr_64M}); + b->Args({1, 1, bw}); + for (int i = 64; i <= 128 * 1024 * 1024; i *= 64) { + double expected_time = + static_cast(14 + i) / (125.0 * static_cast(bw)); + if (expected_time > 2.0) continue; + b->Args({i, 1, bw}); + b->Args({1, i, bw}); + b->Args({i, i, bw}); + } } } BENCHMARK(BM_PumpUnbalancedUnary_Trickle)->Apply(UnaryTrickleArgs); diff --git a/tools/profiling/microbenchmarks/bm_json.py b/tools/profiling/microbenchmarks/bm_json.py index f4d628e11f0..49a37072208 100644 --- a/tools/profiling/microbenchmarks/bm_json.py +++ b/tools/profiling/microbenchmarks/bm_json.py @@ -56,7 +56,7 @@ _BM_SPECS = { }, 'BM_PumpUnbalancedUnary_Trickle': { 'tpl': [], - 'dyn': ['request_size', 'bandwidth_kilobits'], + 'dyn': ['cli_req_size', 'svr_req_size', 'bandwidth_kilobits'], }, 'BM_ErrorStringOnNewError': { 'tpl': ['fixture'], From dd1ccc140833e6d4117be2fc774f982bfcb757c3 Mon Sep 17 00:00:00 2001 From: ncteisen Date: Mon, 26 Jun 2017 23:07:24 -0700 Subject: [PATCH 23/41] Modularize mmicrobenchmarks --- tools/jenkins/run_performance.sh | 2 +- tools/profiling/microbenchmarks/README.md | 4 + tools/profiling/microbenchmarks/bm_diff.py | 259 ------------------ .../microbenchmarks/bm_diff/README.md | 115 ++++++++ .../microbenchmarks/bm_diff/bm_build.py | 75 +++++ .../microbenchmarks/bm_diff/bm_constants.py | 29 ++ .../microbenchmarks/bm_diff/bm_diff.py | 206 ++++++++++++++ .../microbenchmarks/bm_diff/bm_main.py | 137 +++++++++ .../microbenchmarks/bm_diff/bm_run.py | 113 ++++++++ .../microbenchmarks/bm_diff/bm_speedup.py | 59 ++++ tools/profiling/microbenchmarks/speedup.py | 67 ----- tools/run_tests/run_microbenchmark.py | 18 +- 12 files changed, 743 insertions(+), 341 deletions(-) create mode 100644 tools/profiling/microbenchmarks/README.md delete mode 100755 tools/profiling/microbenchmarks/bm_diff.py create mode 100644 tools/profiling/microbenchmarks/bm_diff/README.md create mode 100644 tools/profiling/microbenchmarks/bm_diff/bm_build.py create mode 100644 tools/profiling/microbenchmarks/bm_diff/bm_constants.py create mode 100644 tools/profiling/microbenchmarks/bm_diff/bm_diff.py create mode 100644 tools/profiling/microbenchmarks/bm_diff/bm_main.py create mode 100644 tools/profiling/microbenchmarks/bm_diff/bm_run.py create mode 100644 tools/profiling/microbenchmarks/bm_diff/bm_speedup.py delete mode 100644 tools/profiling/microbenchmarks/speedup.py diff --git a/tools/jenkins/run_performance.sh b/tools/jenkins/run_performance.sh index f530fb46b86..99214ab0b1f 100755 --- a/tools/jenkins/run_performance.sh +++ b/tools/jenkins/run_performance.sh @@ -38,4 +38,4 @@ BENCHMARKS_TO_RUN="bm_fullstack_unary_ping_pong bm_fullstack_streaming_ping_pong cd $(dirname $0)/../.. tools/run_tests/start_port_server.py -tools/profiling/microbenchmarks/bm_diff.py -d origin/$ghprbTargetBranch -b $BENCHMARKS_TO_RUN +tools/profiling/microbenchmarks/bm_diff/bm_main.py -d origin/$ghprbTargetBranch -b $BENCHMARKS_TO_RUN diff --git a/tools/profiling/microbenchmarks/README.md b/tools/profiling/microbenchmarks/README.md new file mode 100644 index 00000000000..035888ee188 --- /dev/null +++ b/tools/profiling/microbenchmarks/README.md @@ -0,0 +1,4 @@ +Microbenchmarks +==== + +This directory contains helper scripts for the microbenchmark suites. diff --git a/tools/profiling/microbenchmarks/bm_diff.py b/tools/profiling/microbenchmarks/bm_diff.py deleted file mode 100755 index 299abb5fdb7..00000000000 --- a/tools/profiling/microbenchmarks/bm_diff.py +++ /dev/null @@ -1,259 +0,0 @@ -#!/usr/bin/env python2.7 -# Copyright 2017, Google Inc. -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -import sys -import json -import bm_json -import tabulate -import argparse -from scipy import stats -import subprocess -import multiprocessing -import collections -import pipes -import os -sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), '..', '..', 'run_tests', 'python_utils')) -import comment_on_pr -import jobset -import itertools -import speedup -import random -import shutil -import errno - -_INTERESTING = ( - 'cpu_time', - 'real_time', - 'locks_per_iteration', - 'allocs_per_iteration', - 'writes_per_iteration', - 'atm_cas_per_iteration', - 'atm_add_per_iteration', - 'cli_transport_stalls_per_iteration', - 'cli_stream_stalls_per_iteration', - 'svr_transport_stalls_per_iteration', - 'svr_stream_stalls_per_iteration' - 'nows_per_iteration', -) - -def changed_ratio(n, o): - if float(o) <= .0001: o = 0 - if float(n) <= .0001: n = 0 - if o == 0 and n == 0: return 0 - if o == 0: return 100 - return (float(n)-float(o))/float(o) - -def median(ary): - ary = sorted(ary) - n = len(ary) - if n%2 == 0: - return (ary[n/2] + ary[n/2+1]) / 2.0 - else: - return ary[n/2] - -def min_change(pct): - return lambda n, o: abs(changed_ratio(n,o)) > pct/100.0 - -_AVAILABLE_BENCHMARK_TESTS = ['bm_fullstack_unary_ping_pong', - 'bm_fullstack_streaming_ping_pong', - 'bm_fullstack_streaming_pump', - 'bm_closure', - 'bm_cq', - 'bm_call_create', - 'bm_error', - 'bm_chttp2_hpack', - 'bm_chttp2_transport', - 'bm_pollset', - 'bm_metadata', - 'bm_fullstack_trickle'] - -argp = argparse.ArgumentParser(description='Perform diff on microbenchmarks') -argp.add_argument('-t', '--track', - choices=sorted(_INTERESTING), - nargs='+', - default=sorted(_INTERESTING), - help='Which metrics to track') -argp.add_argument('-b', '--benchmarks', nargs='+', choices=_AVAILABLE_BENCHMARK_TESTS, default=['bm_cq']) -argp.add_argument('-d', '--diff_base', type=str) -argp.add_argument('-r', '--repetitions', type=int, default=1) -argp.add_argument('-l', '--loops', type=int, default=20) -argp.add_argument('-j', '--jobs', type=int, default=multiprocessing.cpu_count()) -args = argp.parse_args() - -assert args.diff_base - -def avg(lst): - sum = 0.0 - n = 0.0 - for el in lst: - sum += el - n += 1 - return sum / n - -def make_cmd(cfg): - return ['make'] + args.benchmarks + [ - 'CONFIG=%s' % cfg, '-j', '%d' % args.jobs] - -def build(dest): - shutil.rmtree('bm_diff_%s' % dest, ignore_errors=True) - subprocess.check_call(['git', 'submodule', 'update']) - try: - subprocess.check_call(make_cmd('opt')) - subprocess.check_call(make_cmd('counters')) - except subprocess.CalledProcessError, e: - subprocess.check_call(['make', 'clean']) - subprocess.check_call(make_cmd('opt')) - subprocess.check_call(make_cmd('counters')) - os.rename('bins', 'bm_diff_%s' % dest) - -def collect1(bm, cfg, ver, idx): - cmd = ['bm_diff_%s/%s/%s' % (ver, cfg, bm), - '--benchmark_out=%s.%s.%s.%d.json' % (bm, cfg, ver, idx), - '--benchmark_out_format=json', - '--benchmark_repetitions=%d' % (args.repetitions) - ] - return jobset.JobSpec(cmd, shortname='%s %s %s %d/%d' % (bm, cfg, ver, idx+1, args.loops), - verbose_success=True, timeout_seconds=None) - -build('new') - -where_am_i = subprocess.check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip() -subprocess.check_call(['git', 'checkout', args.diff_base]) -try: - build('old') -finally: - subprocess.check_call(['git', 'checkout', where_am_i]) - subprocess.check_call(['git', 'submodule', 'update']) - -jobs = [] -for loop in range(0, args.loops): - jobs.extend(x for x in itertools.chain( - (collect1(bm, 'opt', 'new', loop) for bm in args.benchmarks), - (collect1(bm, 'counters', 'new', loop) for bm in args.benchmarks), - (collect1(bm, 'opt', 'old', loop) for bm in args.benchmarks), - (collect1(bm, 'counters', 'old', loop) for bm in args.benchmarks), - )) -random.shuffle(jobs, random.SystemRandom().random) - -jobset.run(jobs, maxjobs=args.jobs) - -class Benchmark: - - def __init__(self): - self.samples = { - True: collections.defaultdict(list), - False: collections.defaultdict(list) - } - self.final = {} - - def add_sample(self, data, new): - for f in args.track: - if f in data: - self.samples[new][f].append(float(data[f])) - - def process(self): - for f in sorted(args.track): - new = self.samples[True][f] - old = self.samples[False][f] - if not new or not old: continue - mdn_diff = abs(median(new) - median(old)) - print '%s: new=%r old=%r mdn_diff=%r' % (f, new, old, mdn_diff) - s = speedup.speedup(new, old) - if abs(s) > 3 and mdn_diff > 0.5: - self.final[f] = '%+d%%' % s - return self.final.keys() - - def skip(self): - return not self.final - - def row(self, flds): - return [self.final[f] if f in self.final else '' for f in flds] - - -def eintr_be_gone(fn): - """Run fn until it doesn't stop because of EINTR""" - while True: - try: - return fn() - except IOError, e: - if e.errno != errno.EINTR: - raise - - -def read_json(filename): - try: - with open(filename) as f: return json.loads(f.read()) - except ValueError, e: - return None - - -def finalize(): - benchmarks = collections.defaultdict(Benchmark) - - for bm in args.benchmarks: - for loop in range(0, args.loops): - js_new_ctr = read_json('%s.counters.new.%d.json' % (bm, loop)) - js_new_opt = read_json('%s.opt.new.%d.json' % (bm, loop)) - js_old_ctr = read_json('%s.counters.old.%d.json' % (bm, loop)) - js_old_opt = read_json('%s.opt.old.%d.json' % (bm, loop)) - - if js_new_ctr: - for row in bm_json.expand_json(js_new_ctr, js_new_opt): - print row - name = row['cpp_name'] - if name.endswith('_mean') or name.endswith('_stddev'): continue - benchmarks[name].add_sample(row, True) - if js_old_ctr: - for row in bm_json.expand_json(js_old_ctr, js_old_opt): - print row - name = row['cpp_name'] - if name.endswith('_mean') or name.endswith('_stddev'): continue - benchmarks[name].add_sample(row, False) - - really_interesting = set() - for name, bm in benchmarks.items(): - print name - really_interesting.update(bm.process()) - fields = [f for f in args.track if f in really_interesting] - - headers = ['Benchmark'] + fields - rows = [] - for name in sorted(benchmarks.keys()): - if benchmarks[name].skip(): continue - rows.append([name] + benchmarks[name].row(fields)) - if rows: - text = 'Performance differences noted:\n' + tabulate.tabulate(rows, headers=headers, floatfmt='+.2f') - else: - text = 'No significant performance differences' - print text - comment_on_pr.comment_on_pr('```\n%s\n```' % text) - - -eintr_be_gone(finalize) diff --git a/tools/profiling/microbenchmarks/bm_diff/README.md b/tools/profiling/microbenchmarks/bm_diff/README.md new file mode 100644 index 00000000000..4911523cc17 --- /dev/null +++ b/tools/profiling/microbenchmarks/bm_diff/README.md @@ -0,0 +1,115 @@ +The bm_diff Family +==== + +This family of python scripts can be incredibly useful for fast iteration over +different performance tweaks. The tools allow you to save performance data from +a baseline commit, then quickly compare data from your working branch to that +baseline data to see if you have made any performance wins. + +The tools operate with three concrete steps, which can be invoked separately, +or all together via the driver script, bm_main.py. This readme will describe +the typical workflow for these scripts, then it will include sections on the +details of every script for advanced usage. + +## Normal Workflow + +Let's say you are working on a performance optimization for grpc_error. You have +made some significant changes and want to see some data. From your branch, run +(ensure everything is committed first): + +`tools/profiling/microbenchmarks/bm_diff/bm_main.py -b bm_error -l 5 -d master` + +This will build the `bm_error` binary on your branch, and then it will checkout +master and build it there too. It will then run these benchmarks 5 times each. +Lastly it will compute the statistically significant performance differences +between the two branches. This should show the nice performance wins your +changes have made. + +If you have already invoked bm_main with `-d master`, you should instead use +`-o` for subsequent runs. This allows the script to skip re-building and +re-running the unchanged master branch. For example: + +`tools/profiling/microbenchmarks/bm_diff/bm_main.py -b bm_error -l 5 -o` + +This will only build and run `bm_error` on your branch. It will then compare +the output to the saved runs from master. + +## Advanced Workflow + +If you have a deeper knowledge of these scripts, you can use them to do more +fine tuned benchmark comparisons. For example, you could build, run, and save +the benchmark output from two different base branches. Then you could diff both +of these baselines against your working branch to see how the different metrics +change. The rest of this doc goes over the details of what each of the +individual modules accomplishes. + +## bm_build.py + +This scrips builds the benchmarks. It takes in a name parameter, and will +store the binaries based on that. Both `opt` and `counter` configurations +will be used. The `opt` is used to get cpu_time and real_time, and the +`counters` build is used to track other metrics like allocs, atomic adds, +etc etc etc. + +For example, if you were to invoke (we assume everything is run from the +root of the repo): + +`tools/profiling/microbenchmarks/bm_diff/bm_build.py -b bm_error -n baseline` + +then the microbenchmark binaries will show up under +`bm_diff_baseline/{opt,counters}/bm_error` + +## bm_run.py + +This script runs the benchmarks. It takes a name parameter that must match the +name that was passed to `bm_build.py`. The script then runs the benchmark +multiple times (default is 20, can be toggled via the loops parameter). The +output is saved as `....json` + +For example, if you were to run: + +`tools/profiling/microbenchmarks/bm_diff/bm_run.py -b bm_error -b baseline -l 5` + +Then an example output file would be `bm_error.opt.baseline.0.json` + +## bm_diff.py + +This script takes in the output from two benchmark runs, computes the diff +between them, and prints any significant improvements or regressions. It takes +in two name parameters, old and new. These must have previously been built and +run. + +For example, assuming you had already built and run a 'baseline' microbenchmark +from master, and then you also built and ran a 'current' microbenchmark from +the branch you were working on, you could invoke: + +`tools/profiling/microbenchmarks/bm_diff/bm_diff.py -b bm_error -o baseline -n current -l 5` + +This would output the percent difference between your branch and master. + +## bm_main.py + +This is the driver script. It uses the previous three modules and does +everything for you. You pass in the benchmarks to be run, the number of loops, +number of CPUs to use, and the commit to compare to. Then the script will: +* Build the benchmarks at head, then checkout the branch to compare to and + build the benchmarks there +* Run both sets of microbenchmarks +* Run bm_diff.py to compare the two, outputs the difference. + +For example, one might run: + +`tools/profiling/microbenchmarks/bm_diff/bm_main.py -b bm_error -l 5 -d master` + +This would compare the current branch's error benchmarks to master. + +This script is invoked by our infrastructure on every PR to protect against +regressions and demonstrate performance wins. + +However, if you are iterating over different performance tweaks quickly, it is +unnecessary to build and run the baseline commit every time. That is why we +provide a different flag in case you are sure that the baseline benchmark has +already been built and run. In that case use the --old flag to pass in the name +of the baseline. This will only build and run the current branch. For example: + +`tools/profiling/microbenchmarks/bm_diff/bm_main.py -b bm_error -l 5 -o old` diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_build.py b/tools/profiling/microbenchmarks/bm_diff/bm_build.py new file mode 100644 index 00000000000..650ccdc2b21 --- /dev/null +++ b/tools/profiling/microbenchmarks/bm_diff/bm_build.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python2.7 +# +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" Python utility to build opt and counters benchmarks """ + +import bm_constants + +import argparse +import subprocess +import multiprocessing +import os +import shutil + + +def _args(): + argp = argparse.ArgumentParser(description='Builds microbenchmarks') + argp.add_argument( + '-b', + '--benchmarks', + nargs='+', + choices=bm_constants._AVAILABLE_BENCHMARK_TESTS, + default=bm_constants._AVAILABLE_BENCHMARK_TESTS, + help='Which benchmarks to build') + argp.add_argument( + '-j', + '--jobs', + type=int, + default=multiprocessing.cpu_count(), + help='How many CPUs to dedicate to this task') + argp.add_argument( + '-n', + '--name', + type=str, + help='Unique name of this build. To be used as a handle to pass to the other bm* scripts' + ) + args = argp.parse_args() + assert args.name + return args + + +def _make_cmd(cfg, benchmarks, jobs): + return ['make'] + benchmarks + ['CONFIG=%s' % cfg, '-j', '%d' % jobs] + + +def build(name, benchmarks, jobs): + shutil.rmtree('bm_diff_%s' % name, ignore_errors=True) + subprocess.check_call(['git', 'submodule', 'update']) + try: + subprocess.check_call(_make_cmd('opt', benchmarks, jobs)) + subprocess.check_call(_make_cmd('counters', benchmarks, jobs)) + except subprocess.CalledProcessError, e: + subprocess.check_call(['make', 'clean']) + subprocess.check_call(_make_cmd('opt', benchmarks, jobs)) + subprocess.check_call(_make_cmd('counters', benchmarks, jobs)) + os.rename( + 'bins', + 'bm_diff_%s' % name,) + + +if __name__ == '__main__': + args = _args() + build(args.name, args.benchmarks, args.jobs) diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_constants.py b/tools/profiling/microbenchmarks/bm_diff/bm_constants.py new file mode 100644 index 00000000000..2bbd987b20f --- /dev/null +++ b/tools/profiling/microbenchmarks/bm_diff/bm_constants.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python2.7 +# +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" Configurable constants for the bm_*.py family """ + +_AVAILABLE_BENCHMARK_TESTS = [ + 'bm_fullstack_unary_ping_pong', 'bm_fullstack_streaming_ping_pong', + 'bm_fullstack_streaming_pump', 'bm_closure', 'bm_cq', 'bm_call_create', + 'bm_error', 'bm_chttp2_hpack', 'bm_chttp2_transport', 'bm_pollset', + 'bm_metadata', 'bm_fullstack_trickle' +] + +_INTERESTING = ('cpu_time', 'real_time', 'locks_per_iteration', + 'allocs_per_iteration', 'writes_per_iteration', + 'atm_cas_per_iteration', 'atm_add_per_iteration', + 'nows_per_iteration',) diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_diff.py b/tools/profiling/microbenchmarks/bm_diff/bm_diff.py new file mode 100644 index 00000000000..b8e803749a2 --- /dev/null +++ b/tools/profiling/microbenchmarks/bm_diff/bm_diff.py @@ -0,0 +1,206 @@ +#!/usr/bin/env python2.7 +# +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" Computes the diff between two bm runs and outputs significant results """ + +import bm_constants +import bm_speedup + +import sys +import os + +sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), '..')) +import bm_json + +import json +import tabulate +import argparse +import collections +import subprocess + +verbose = False + + +def _median(ary): + assert (len(ary)) + ary = sorted(ary) + n = len(ary) + if n % 2 == 0: + return (ary[(n - 1) / 2] + ary[(n - 1) / 2 + 1]) / 2.0 + else: + return ary[n / 2] + + +def _args(): + argp = argparse.ArgumentParser( + description='Perform diff on microbenchmarks') + argp.add_argument( + '-t', + '--track', + choices=sorted(bm_constants._INTERESTING), + nargs='+', + default=sorted(bm_constants._INTERESTING), + help='Which metrics to track') + argp.add_argument( + '-b', + '--benchmarks', + nargs='+', + choices=bm_constants._AVAILABLE_BENCHMARK_TESTS, + default=bm_constants._AVAILABLE_BENCHMARK_TESTS, + help='Which benchmarks to run') + argp.add_argument( + '-l', + '--loops', + type=int, + default=20, + help='Number of times to loops the benchmarks. Must match what was passed to bm_run.py' + ) + argp.add_argument('-n', '--new', type=str, help='New benchmark name') + argp.add_argument('-o', '--old', type=str, help='Old benchmark name') + argp.add_argument( + '-v', '--verbose', type=bool, help='Print details of before/after') + args = argp.parse_args() + global verbose + if args.verbose: verbose = True + assert args.new + assert args.old + return args + + +def _maybe_print(str): + if verbose: print str + + +class Benchmark: + + def __init__(self): + self.samples = { + True: collections.defaultdict(list), + False: collections.defaultdict(list) + } + self.final = {} + + def add_sample(self, track, data, new): + for f in track: + if f in data: + self.samples[new][f].append(float(data[f])) + + def process(self, track, new_name, old_name): + for f in sorted(track): + new = self.samples[True][f] + old = self.samples[False][f] + if not new or not old: continue + mdn_diff = abs(_median(new) - _median(old)) + _maybe_print('%s: %s=%r %s=%r mdn_diff=%r' % + (f, new_name, new, old_name, old, mdn_diff)) + s = bm_speedup.speedup(new, old) + if abs(s) > 3 and mdn_diff > 0.5: + self.final[f] = '%+d%%' % s + return self.final.keys() + + def skip(self): + return not self.final + + def row(self, flds): + return [self.final[f] if f in self.final else '' for f in flds] + + +def _read_json(filename, badjson_files, nonexistant_files): + stripped = ".".join(filename.split(".")[:-2]) + try: + with open(filename) as f: + return json.loads(f.read()) + except IOError, e: + if stripped in nonexistant_files: + nonexistant_files[stripped] += 1 + else: + nonexistant_files[stripped] = 1 + return None + except ValueError, e: + if stripped in badjson_files: + badjson_files[stripped] += 1 + else: + badjson_files[stripped] = 1 + return None + + +def diff(bms, loops, track, old, new): + benchmarks = collections.defaultdict(Benchmark) + + badjson_files = {} + nonexistant_files = {} + for bm in bms: + for loop in range(0, loops): + for line in subprocess.check_output( + ['bm_diff_%s/opt/%s' % (old, bm), + '--benchmark_list_tests']).splitlines(): + stripped_line = line.strip().replace("/", "_").replace( + "<", "_").replace(">", "_").replace(", ", "_") + js_new_ctr = _read_json('%s.%s.counters.%s.%d.json' % + (bm, stripped_line, new, loop), + badjson_files, nonexistant_files) + js_new_opt = _read_json('%s.%s.opt.%s.%d.json' % + (bm, stripped_line, new, loop), + badjson_files, nonexistant_files) + js_old_ctr = _read_json('%s.%s.counters.%s.%d.json' % + (bm, stripped_line, old, loop), + badjson_files, nonexistant_files) + js_old_opt = _read_json('%s.%s.opt.%s.%d.json' % + (bm, stripped_line, old, loop), + badjson_files, nonexistant_files) + + if js_new_ctr: + for row in bm_json.expand_json(js_new_ctr, js_new_opt): + name = row['cpp_name'] + if name.endswith('_mean') or name.endswith('_stddev'): + continue + benchmarks[name].add_sample(track, row, True) + if js_old_ctr: + for row in bm_json.expand_json(js_old_ctr, js_old_opt): + name = row['cpp_name'] + if name.endswith('_mean') or name.endswith('_stddev'): + continue + benchmarks[name].add_sample(track, row, False) + + really_interesting = set() + for name, bm in benchmarks.items(): + _maybe_print(name) + really_interesting.update(bm.process(track, new, old)) + fields = [f for f in track if f in really_interesting] + + headers = ['Benchmark'] + fields + rows = [] + for name in sorted(benchmarks.keys()): + if benchmarks[name].skip(): continue + rows.append([name] + benchmarks[name].row(fields)) + note = None + if len(badjson_files): + note = 'Corrupt JSON data (indicates timeout or crash) = %s' % str(badjson_files) + if len(nonexistant_files): + if note: + note += '\n\nMissing files (indicates new benchmark) = %s' % str(nonexistant_files) + else: + note = '\n\nMissing files (indicates new benchmark) = %s' % str(nonexistant_files) + if rows: + return tabulate.tabulate(rows, headers=headers, floatfmt='+.2f'), note + else: + return None, note + + +if __name__ == '__main__': + args = _args() + diff, note = diff(args.benchmarks, args.loops, args.track, args.old, + args.new) + print('%s\n%s' % (note, diff if diff else "No performance differences")) diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_main.py b/tools/profiling/microbenchmarks/bm_diff/bm_main.py new file mode 100644 index 00000000000..8a54f198ab2 --- /dev/null +++ b/tools/profiling/microbenchmarks/bm_diff/bm_main.py @@ -0,0 +1,137 @@ +#!/usr/bin/env python2.7 +# +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" Runs the entire bm_*.py pipeline, and possible comments on the PR """ + +import bm_constants +import bm_build +import bm_run +import bm_diff + +import sys +import os +import argparse +import multiprocessing +import subprocess + +sys.path.append( + os.path.join( + os.path.dirname(sys.argv[0]), '..', '..', 'run_tests', 'python_utils')) +import comment_on_pr + + +def _args(): + argp = argparse.ArgumentParser( + description='Perform diff on microbenchmarks') + argp.add_argument( + '-t', + '--track', + choices=sorted(bm_constants._INTERESTING), + nargs='+', + default=sorted(bm_constants._INTERESTING), + help='Which metrics to track') + argp.add_argument( + '-b', + '--benchmarks', + nargs='+', + choices=bm_constants._AVAILABLE_BENCHMARK_TESTS, + default=bm_constants._AVAILABLE_BENCHMARK_TESTS, + help='Which benchmarks to run') + argp.add_argument( + '-d', + '--diff_base', + type=str, + help='Commit or branch to compare the current one to') + argp.add_argument( + '-o', + '--old', + default='old', + type=str, + help='Name of baseline run to compare to. Ususally just called "old"') + argp.add_argument( + '-r', + '--repetitions', + type=int, + default=1, + help='Number of repetitions to pass to the benchmarks') + argp.add_argument( + '-l', + '--loops', + type=int, + default=20, + help='Number of times to loops the benchmarks. More loops cuts down on noise' + ) + argp.add_argument( + '-j', + '--jobs', + type=int, + default=multiprocessing.cpu_count(), + help='Number of CPUs to use') + args = argp.parse_args() + assert args.diff_base or args.old, "One of diff_base or old must be set!" + if args.loops < 3: + print "WARNING: This run will likely be noisy. Increase loops." + return args + + +def eintr_be_gone(fn): + """Run fn until it doesn't stop because of EINTR""" + + def inner(*args): + while True: + try: + return fn(*args) + except IOError, e: + if e.errno != errno.EINTR: + raise + + return inner + + +def main(args): + + bm_build.build('new', args.benchmarks, args.jobs) + + old = args.old + if args.diff_base: + old = 'old' + where_am_i = subprocess.check_output( + ['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip() + subprocess.check_call(['git', 'checkout', args.diff_base]) + try: + bm_build.build('old', args.benchmarks, args.jobs) + finally: + subprocess.check_call(['git', 'checkout', where_am_i]) + subprocess.check_call(['git', 'submodule', 'update']) + + bm_run.run('new', args.benchmarks, args.jobs, args.loops, args.repetitions) + bm_run.run(old, args.benchmarks, args.jobs, args.loops, args.repetitions) + + diff, note = bm_diff.diff(args.benchmarks, args.loops, args.track, old, + 'new') + if diff: + text = 'Performance differences noted:\n' + diff + else: + text = 'No significant performance differences' + if note: + text = note + '\n\n' + text + print('%s' % text) + comment_on_pr.comment_on_pr('```\n%s\n```' % text) + + +if __name__ == '__main__': + args = _args() + main(args) diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_run.py b/tools/profiling/microbenchmarks/bm_diff/bm_run.py new file mode 100644 index 00000000000..3457af916b4 --- /dev/null +++ b/tools/profiling/microbenchmarks/bm_diff/bm_run.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python2.7 +# +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" Python utility to run opt and counters benchmarks and save json output """ + +import bm_constants + +import argparse +import subprocess +import multiprocessing +import random +import itertools +import sys +import os + +sys.path.append( + os.path.join( + os.path.dirname(sys.argv[0]), '..', '..', '..', 'run_tests', + 'python_utils')) +import jobset + + +def _args(): + argp = argparse.ArgumentParser(description='Runs microbenchmarks') + argp.add_argument( + '-b', + '--benchmarks', + nargs='+', + choices=bm_constants._AVAILABLE_BENCHMARK_TESTS, + default=bm_constants._AVAILABLE_BENCHMARK_TESTS, + help='Benchmarks to run') + argp.add_argument( + '-j', + '--jobs', + type=int, + default=multiprocessing.cpu_count(), + help='Number of CPUs to use') + argp.add_argument( + '-n', + '--name', + type=str, + help='Unique name of the build to run. Needs to match the handle passed to bm_build.py' + ) + argp.add_argument( + '-r', + '--repetitions', + type=int, + default=1, + help='Number of repetitions to pass to the benchmarks') + argp.add_argument( + '-l', + '--loops', + type=int, + default=20, + help='Number of times to loops the benchmarks. More loops cuts down on noise' + ) + args = argp.parse_args() + assert args.name + if args.loops < 3: + print "WARNING: This run will likely be noisy. Increase loops to at least 3." + return args + + +def _collect_bm_data(bm, cfg, name, reps, idx, loops): + jobs_list = [] + for line in subprocess.check_output( + ['bm_diff_%s/%s/%s' % (name, cfg, bm), + '--benchmark_list_tests']).splitlines(): + stripped_line = line.strip().replace("/", "_").replace( + "<", "_").replace(">", "_").replace(", ", "_") + cmd = [ + 'bm_diff_%s/%s/%s' % (name, cfg, bm), '--benchmark_filter=^%s$' % + line, '--benchmark_out=%s.%s.%s.%s.%d.json' % + (bm, stripped_line, cfg, name, idx), '--benchmark_out_format=json', + '--benchmark_repetitions=%d' % (reps) + ] + jobs_list.append( + jobset.JobSpec( + cmd, + shortname='%s %s %s %s %d/%d' % (bm, line, cfg, name, idx + 1, + loops), + verbose_success=True, + timeout_seconds=60 * 2)) + return jobs_list + + +def run(name, benchmarks, jobs, loops, reps): + jobs_list = [] + for loop in range(0, loops): + for bm in benchmarks: + jobs_list += _collect_bm_data(bm, 'opt', name, reps, loop, loops) + jobs_list += _collect_bm_data(bm, 'counters', name, reps, loop, + loops) + random.shuffle(jobs_list, random.SystemRandom().random) + jobset.run(jobs_list, maxjobs=jobs) + + +if __name__ == '__main__': + args = _args() + run(args.name, args.benchmarks, args.jobs, args.loops, args.repetitions) diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_speedup.py b/tools/profiling/microbenchmarks/bm_diff/bm_speedup.py new file mode 100644 index 00000000000..3d126efa62b --- /dev/null +++ b/tools/profiling/microbenchmarks/bm_diff/bm_speedup.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python2.7 +# +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from scipy import stats +import math + +_THRESHOLD = 1e-10 + + +def scale(a, mul): + return [x * mul for x in a] + + +def cmp(a, b): + return stats.ttest_ind(a, b) + + +def speedup(new, old): + if (len(set(new))) == 1 and new == old: return 0 + s0, p0 = cmp(new, old) + if math.isnan(p0): return 0 + if s0 == 0: return 0 + if p0 > _THRESHOLD: return 0 + if s0 < 0: + pct = 1 + while pct < 101: + sp, pp = cmp(new, scale(old, 1 - pct / 100.0)) + if sp > 0: break + if pp > _THRESHOLD: break + pct += 1 + return -(pct - 1) + else: + pct = 1 + while pct < 100000: + sp, pp = cmp(new, scale(old, 1 + pct / 100.0)) + if sp < 0: break + if pp > _THRESHOLD: break + pct += 1 + return pct - 1 + + +if __name__ == "__main__": + new = [1.0, 1.0, 1.0, 1.0] + old = [2.0, 2.0, 2.0, 2.0] + print speedup(new, old) + print speedup(old, new) diff --git a/tools/profiling/microbenchmarks/speedup.py b/tools/profiling/microbenchmarks/speedup.py deleted file mode 100644 index 8af0066c9df..00000000000 --- a/tools/profiling/microbenchmarks/speedup.py +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright 2017, Google Inc. -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -from scipy import stats -import math - -_THRESHOLD = 1e-10 - -def scale(a, mul): - return [x*mul for x in a] - -def cmp(a, b): - return stats.ttest_ind(a, b) - -def speedup(new, old): - s0, p0 = cmp(new, old) - if math.isnan(p0): return 0 - if s0 == 0: return 0 - if p0 > _THRESHOLD: return 0 - if s0 < 0: - pct = 1 - while pct < 101: - sp, pp = cmp(new, scale(old, 1 - pct/100.0)) - if sp > 0: break - if pp > _THRESHOLD: break - pct += 1 - return -(pct - 1) - else: - pct = 1 - while pct < 100000: - sp, pp = cmp(new, scale(old, 1 + pct/100.0)) - if sp < 0: break - if pp > _THRESHOLD: break - pct += 1 - return pct - 1 - -if __name__ == "__main__": - new=[66034560.0, 126765693.0, 99074674.0, 98588433.0, 96731372.0, 110179725.0, 103802110.0, 101139800.0, 102357205.0, 99016353.0, 98840824.0, 99585632.0, 98791720.0, 96171521.0, 95327098.0, 95629704.0, 98209772.0, 99779411.0, 100182488.0, 98354192.0, 99644781.0, 98546709.0, 99019176.0, 99543014.0, 99077269.0, 98046601.0, 99319039.0, 98542572.0, 98886614.0, 72560968.0] - old=[60423464.0, 71249570.0, 73213089.0, 73200055.0, 72911768.0, 72347798.0, 72494672.0, 72756976.0, 72116565.0, 71541342.0, 73442538.0, 74817383.0, 73007780.0, 72499062.0, 72404945.0, 71843504.0, 73245405.0, 72778304.0, 74004519.0, 73694464.0, 72919931.0, 72955481.0, 71583857.0, 71350467.0, 71836817.0, 70064115.0, 70355345.0, 72516202.0, 71716777.0, 71532266.0] - print speedup(new, old) - print speedup(old, new) diff --git a/tools/run_tests/run_microbenchmark.py b/tools/run_tests/run_microbenchmark.py index 17b156c78f9..dadebb1b54b 100755 --- a/tools/run_tests/run_microbenchmark.py +++ b/tools/run_tests/run_microbenchmark.py @@ -38,18 +38,8 @@ import argparse import python_utils.jobset as jobset import python_utils.start_port_server as start_port_server -_AVAILABLE_BENCHMARK_TESTS = ['bm_fullstack_unary_ping_pong', - 'bm_fullstack_streaming_ping_pong', - 'bm_fullstack_streaming_pump', - 'bm_closure', - 'bm_cq', - 'bm_call_create', - 'bm_error', - 'bm_chttp2_hpack', - 'bm_chttp2_transport', - 'bm_pollset', - 'bm_metadata', - 'bm_fullstack_trickle'] +sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), '..', 'profiling', 'microbenchmarks', 'bm_diff')) +import bm_constants flamegraph_dir = os.path.join(os.path.expanduser('~'), 'FlameGraph') @@ -214,8 +204,8 @@ argp.add_argument('-c', '--collect', default=sorted(collectors.keys()), help='Which collectors should be run against each benchmark') argp.add_argument('-b', '--benchmarks', - choices=_AVAILABLE_BENCHMARK_TESTS, - default=_AVAILABLE_BENCHMARK_TESTS, + choices=bm_constants._AVAILABLE_BENCHMARK_TESTS, + default=bm_constants._AVAILABLE_BENCHMARK_TESTS, nargs='+', type=str, help='Which microbenchmarks should be run') From e726e324e13af3de1381340354115f8292f8e597 Mon Sep 17 00:00:00 2001 From: ncteisen Date: Sun, 11 Jun 2017 22:44:00 -0700 Subject: [PATCH 24/41] Add trickle diff job --- tools/jenkins/run_performance.sh | 2 +- tools/jenkins/run_trickle_diff.sh | 23 ++++++++++++ .../microbenchmarks/bm_diff/bm_build.py | 13 ++++--- .../microbenchmarks/bm_diff/bm_constants.py | 3 +- .../microbenchmarks/bm_diff/bm_diff.py | 35 ++++++++++++------- .../microbenchmarks/bm_diff/bm_main.py | 22 ++++++++---- .../microbenchmarks/bm_diff/bm_run.py | 14 +++++--- 7 files changed, 82 insertions(+), 30 deletions(-) create mode 100755 tools/jenkins/run_trickle_diff.sh diff --git a/tools/jenkins/run_performance.sh b/tools/jenkins/run_performance.sh index 99214ab0b1f..4bd3defe091 100755 --- a/tools/jenkins/run_performance.sh +++ b/tools/jenkins/run_performance.sh @@ -28,7 +28,7 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # -# This script is invoked by Jenkins and runs performance smoke test. +# This script is invoked by Jenkins and runs a diff on the microbenchmarks set -ex # List of benchmarks that provide good signal for analyzing performance changes in pull requests diff --git a/tools/jenkins/run_trickle_diff.sh b/tools/jenkins/run_trickle_diff.sh new file mode 100755 index 00000000000..da905d02490 --- /dev/null +++ b/tools/jenkins/run_trickle_diff.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# This script is invoked by Jenkins and runs a diff on bm_fullstack_trickle +set -ex + +# Enter the gRPC repo root +cd $(dirname $0)/../.. + +tools/run_tests/start_port_server.py +tools/profiling/microbenchmarks/bm_diff/bm_main.py -d origin/$ghprbTargetBranch -b bm_fullstack_trickle -l 4 -t cli_transport_stalls cli_stream_stalls svr_transport_stalls svr_stream_stalls --no-counters --pr_comment_name trickle diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_build.py b/tools/profiling/microbenchmarks/bm_diff/bm_build.py index 650ccdc2b21..ce62c09d72f 100644 --- a/tools/profiling/microbenchmarks/bm_diff/bm_build.py +++ b/tools/profiling/microbenchmarks/bm_diff/bm_build.py @@ -46,6 +46,9 @@ def _args(): type=str, help='Unique name of this build. To be used as a handle to pass to the other bm* scripts' ) + argp.add_argument('--counters', dest='counters', action='store_true') + argp.add_argument('--no-counters', dest='counters', action='store_false') + argp.set_defaults(counters=True) args = argp.parse_args() assert args.name return args @@ -55,16 +58,18 @@ def _make_cmd(cfg, benchmarks, jobs): return ['make'] + benchmarks + ['CONFIG=%s' % cfg, '-j', '%d' % jobs] -def build(name, benchmarks, jobs): +def build(name, benchmarks, jobs, counters): shutil.rmtree('bm_diff_%s' % name, ignore_errors=True) subprocess.check_call(['git', 'submodule', 'update']) try: subprocess.check_call(_make_cmd('opt', benchmarks, jobs)) - subprocess.check_call(_make_cmd('counters', benchmarks, jobs)) + if counters: + subprocess.check_call(_make_cmd('counters', benchmarks, jobs)) except subprocess.CalledProcessError, e: subprocess.check_call(['make', 'clean']) subprocess.check_call(_make_cmd('opt', benchmarks, jobs)) - subprocess.check_call(_make_cmd('counters', benchmarks, jobs)) + if counters: + subprocess.check_call(_make_cmd('counters', benchmarks, jobs)) os.rename( 'bins', 'bm_diff_%s' % name,) @@ -72,4 +77,4 @@ def build(name, benchmarks, jobs): if __name__ == '__main__': args = _args() - build(args.name, args.benchmarks, args.jobs) + build(args.name, args.benchmarks, args.jobs, args.counters) diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_constants.py b/tools/profiling/microbenchmarks/bm_diff/bm_constants.py index 2bbd987b20f..4cd65867c37 100644 --- a/tools/profiling/microbenchmarks/bm_diff/bm_constants.py +++ b/tools/profiling/microbenchmarks/bm_diff/bm_constants.py @@ -26,4 +26,5 @@ _AVAILABLE_BENCHMARK_TESTS = [ _INTERESTING = ('cpu_time', 'real_time', 'locks_per_iteration', 'allocs_per_iteration', 'writes_per_iteration', 'atm_cas_per_iteration', 'atm_add_per_iteration', - 'nows_per_iteration',) + 'nows_per_iteration', 'cli_transport_stalls', 'cli_stream_stalls', + 'svr_transport_stalls', 'svr_stream_stalls',) diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_diff.py b/tools/profiling/microbenchmarks/bm_diff/bm_diff.py index b8e803749a2..73abf90ff52 100644 --- a/tools/profiling/microbenchmarks/bm_diff/bm_diff.py +++ b/tools/profiling/microbenchmarks/bm_diff/bm_diff.py @@ -67,6 +67,9 @@ def _args(): default=20, help='Number of times to loops the benchmarks. Must match what was passed to bm_run.py' ) + argp.add_argument('--counters', dest='counters', action='store_true') + argp.add_argument('--no-counters', dest='counters', action='store_false') + argp.set_defaults(counters=True) argp.add_argument('-n', '--new', type=str, help='New benchmark name') argp.add_argument('-o', '--old', type=str, help='Old benchmark name') argp.add_argument( @@ -121,7 +124,8 @@ def _read_json(filename, badjson_files, nonexistant_files): stripped = ".".join(filename.split(".")[:-2]) try: with open(filename) as f: - return json.loads(f.read()) + r = f.read(); + return json.loads(r) except IOError, e: if stripped in nonexistant_files: nonexistant_files[stripped] += 1 @@ -129,14 +133,17 @@ def _read_json(filename, badjson_files, nonexistant_files): nonexistant_files[stripped] = 1 return None except ValueError, e: + print r if stripped in badjson_files: badjson_files[stripped] += 1 else: badjson_files[stripped] = 1 return None +def fmt_dict(d): + return ''.join([" " + k + ": " + str(d[k]) + "\n" for k in d]) -def diff(bms, loops, track, old, new): +def diff(bms, loops, track, old, new, counters): benchmarks = collections.defaultdict(Benchmark) badjson_files = {} @@ -148,18 +155,22 @@ def diff(bms, loops, track, old, new): '--benchmark_list_tests']).splitlines(): stripped_line = line.strip().replace("/", "_").replace( "<", "_").replace(">", "_").replace(", ", "_") - js_new_ctr = _read_json('%s.%s.counters.%s.%d.json' % - (bm, stripped_line, new, loop), - badjson_files, nonexistant_files) js_new_opt = _read_json('%s.%s.opt.%s.%d.json' % (bm, stripped_line, new, loop), badjson_files, nonexistant_files) - js_old_ctr = _read_json('%s.%s.counters.%s.%d.json' % - (bm, stripped_line, old, loop), - badjson_files, nonexistant_files) js_old_opt = _read_json('%s.%s.opt.%s.%d.json' % (bm, stripped_line, old, loop), badjson_files, nonexistant_files) + if counters: + js_new_ctr = _read_json('%s.%s.counters.%s.%d.json' % + (bm, stripped_line, new, loop), + badjson_files, nonexistant_files) + js_old_ctr = _read_json('%s.%s.counters.%s.%d.json' % + (bm, stripped_line, old, loop), + badjson_files, nonexistant_files) + else: + js_new_ctr = None + js_old_ctr = None if js_new_ctr: for row in bm_json.expand_json(js_new_ctr, js_new_opt): @@ -187,12 +198,12 @@ def diff(bms, loops, track, old, new): rows.append([name] + benchmarks[name].row(fields)) note = None if len(badjson_files): - note = 'Corrupt JSON data (indicates timeout or crash) = %s' % str(badjson_files) + note = 'Corrupt JSON data (indicates timeout or crash): \n%s' % fmt_dict(badjson_files) if len(nonexistant_files): if note: - note += '\n\nMissing files (indicates new benchmark) = %s' % str(nonexistant_files) + note += '\n\nMissing files (indicates new benchmark): \n%s' % fmt_dict(nonexistant_files) else: - note = '\n\nMissing files (indicates new benchmark) = %s' % str(nonexistant_files) + note = '\n\nMissing files (indicates new benchmark): \n%s' % fmt_dict(nonexistant_files) if rows: return tabulate.tabulate(rows, headers=headers, floatfmt='+.2f'), note else: @@ -202,5 +213,5 @@ def diff(bms, loops, track, old, new): if __name__ == '__main__': args = _args() diff, note = diff(args.benchmarks, args.loops, args.track, args.old, - args.new) + args.new, args.counters) print('%s\n%s' % (note, diff if diff else "No performance differences")) diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_main.py b/tools/profiling/microbenchmarks/bm_diff/bm_main.py index 8a54f198ab2..8b4e0cb69a4 100644 --- a/tools/profiling/microbenchmarks/bm_diff/bm_main.py +++ b/tools/profiling/microbenchmarks/bm_diff/bm_main.py @@ -80,6 +80,14 @@ def _args(): type=int, default=multiprocessing.cpu_count(), help='Number of CPUs to use') + argp.add_argument( + '--pr_comment_name', + type=str, + default="microbenchmarks", + help='Name that Jenkins will use to commen on the PR') + argp.add_argument('--counters', dest='counters', action='store_true') + argp.add_argument('--no-counters', dest='counters', action='store_false') + argp.set_defaults(counters=True) args = argp.parse_args() assert args.diff_base or args.old, "One of diff_base or old must be set!" if args.loops < 3: @@ -103,7 +111,7 @@ def eintr_be_gone(fn): def main(args): - bm_build.build('new', args.benchmarks, args.jobs) + bm_build.build('new', args.benchmarks, args.jobs, args.counters) old = args.old if args.diff_base: @@ -112,20 +120,20 @@ def main(args): ['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip() subprocess.check_call(['git', 'checkout', args.diff_base]) try: - bm_build.build('old', args.benchmarks, args.jobs) + bm_build.build(old, args.benchmarks, args.jobs, args.counters) finally: subprocess.check_call(['git', 'checkout', where_am_i]) subprocess.check_call(['git', 'submodule', 'update']) - bm_run.run('new', args.benchmarks, args.jobs, args.loops, args.repetitions) - bm_run.run(old, args.benchmarks, args.jobs, args.loops, args.repetitions) + bm_run.run('new', args.benchmarks, args.jobs, args.loops, args.repetitions, args.counters) + bm_run.run(old, args.benchmarks, args.jobs, args.loops, args.repetitions, args.counters) diff, note = bm_diff.diff(args.benchmarks, args.loops, args.track, old, - 'new') + 'new', args.counters) if diff: - text = 'Performance differences noted:\n' + diff + text = '[%s] Performance differences noted:\n%s' % (args.pr_comment_name, diff) else: - text = 'No significant performance differences' + text = '[%s] No significant performance differences' % args.pr_comment_name if note: text = note + '\n\n' + text print('%s' % text) diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_run.py b/tools/profiling/microbenchmarks/bm_diff/bm_run.py index 3457af916b4..72b3d3cf106 100644 --- a/tools/profiling/microbenchmarks/bm_diff/bm_run.py +++ b/tools/profiling/microbenchmarks/bm_diff/bm_run.py @@ -67,6 +67,9 @@ def _args(): default=20, help='Number of times to loops the benchmarks. More loops cuts down on noise' ) + argp.add_argument('--counters', dest='counters', action='store_true') + argp.add_argument('--no-counters', dest='counters', action='store_false') + argp.set_defaults(counters=True) args = argp.parse_args() assert args.name if args.loops < 3: @@ -93,21 +96,22 @@ def _collect_bm_data(bm, cfg, name, reps, idx, loops): shortname='%s %s %s %s %d/%d' % (bm, line, cfg, name, idx + 1, loops), verbose_success=True, - timeout_seconds=60 * 2)) + timeout_seconds=60 * 60)) # one hour return jobs_list -def run(name, benchmarks, jobs, loops, reps): +def run(name, benchmarks, jobs, loops, reps, counters): jobs_list = [] for loop in range(0, loops): for bm in benchmarks: jobs_list += _collect_bm_data(bm, 'opt', name, reps, loop, loops) - jobs_list += _collect_bm_data(bm, 'counters', name, reps, loop, - loops) + if counters: + jobs_list += _collect_bm_data(bm, 'counters', name, reps, loop, + loops) random.shuffle(jobs_list, random.SystemRandom().random) jobset.run(jobs_list, maxjobs=jobs) if __name__ == '__main__': args = _args() - run(args.name, args.benchmarks, args.jobs, args.loops, args.repetitions) + run(args.name, args.benchmarks, args.jobs, args.loops, args.repetitions, args.counters) From 8f9e01242867b5e2f23224d7efa371dd63840b6e Mon Sep 17 00:00:00 2001 From: ncteisen Date: Wed, 14 Jun 2017 19:56:29 -0700 Subject: [PATCH 25/41] Add json out flag to qps driver --- test/cpp/qps/qps_json_driver.cc | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/test/cpp/qps/qps_json_driver.cc b/test/cpp/qps/qps_json_driver.cc index f00f771ea02..762b2e0485f 100644 --- a/test/cpp/qps/qps_json_driver.cc +++ b/test/cpp/qps/qps_json_driver.cc @@ -32,6 +32,7 @@ */ #include +#include #include #include @@ -72,6 +73,9 @@ DEFINE_string(qps_server_target_override, "", "Override QPS server target to configure in client configs." "Only applicable if there is a single benchmark server."); +DEFINE_string(json_file_out, "", + "File to write the JSON output to."); + namespace grpc { namespace testing { @@ -103,6 +107,13 @@ static std::unique_ptr RunAndReport(const Scenario& scenario, *success = result->server_success(i); } + if (FLAGS_json_file_out != "") { + std::ofstream json_outfile; + json_outfile.open(FLAGS_json_file_out); + json_outfile << "{\"qps\": " << result->summary().qps() << "}\n"; + json_outfile.close(); + } + return result; } From 025e5522e3a4bc8ec993021d1f2eea72f1afc1f5 Mon Sep 17 00:00:00 2001 From: ncteisen Date: Wed, 14 Jun 2017 16:58:09 -0700 Subject: [PATCH 26/41] Add QPS Diff --- tools/jenkins/run_qps_diff.sh | 23 +++ tools/profiling/microbenchmarks/bm_json.py | 2 + tools/profiling/qps/qps_diff.py | 169 +++++++++++++++++++++ tools/profiling/qps/qps_scenarios.py | 19 +++ 4 files changed, 213 insertions(+) create mode 100755 tools/jenkins/run_qps_diff.sh create mode 100755 tools/profiling/qps/qps_diff.py create mode 100644 tools/profiling/qps/qps_scenarios.py diff --git a/tools/jenkins/run_qps_diff.sh b/tools/jenkins/run_qps_diff.sh new file mode 100755 index 00000000000..9529b0126fc --- /dev/null +++ b/tools/jenkins/run_qps_diff.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash +# Copyright 2015 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# This script is invoked by Jenkins and runs a diff on the qps drivers +set -ex + +# Enter the gRPC repo root +cd $(dirname $0)/../.. + +tools/run_tests/start_port_server.py +tools/profiling/qps/qps_diff.py -d origin/$ghprbTargetBranch diff --git a/tools/profiling/microbenchmarks/bm_json.py b/tools/profiling/microbenchmarks/bm_json.py index 49a37072208..fa4e44987bd 100644 --- a/tools/profiling/microbenchmarks/bm_json.py +++ b/tools/profiling/microbenchmarks/bm_json.py @@ -182,6 +182,8 @@ def parse_name(name): return out def expand_json(js, js2 = None): + if not js and not js2: raise StopIteration() + if not js: js = js2 for bm in js['benchmarks']: if bm['name'].endswith('_stddev') or bm['name'].endswith('_mean'): continue context = js['context'] diff --git a/tools/profiling/qps/qps_diff.py b/tools/profiling/qps/qps_diff.py new file mode 100755 index 00000000000..0654f45666f --- /dev/null +++ b/tools/profiling/qps/qps_diff.py @@ -0,0 +1,169 @@ +#!/usr/bin/env python2.7 +# +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" Computes the diff between two qps runs and outputs significant results """ + +import argparse +import json +import multiprocessing +import os +import qps_scenarios +import shutil +import subprocess +import sys +import tabulate + +sys.path.append( + os.path.join( + os.path.dirname(sys.argv[0]), '..', 'microbenchmarks', 'bm_diff')) +import bm_speedup + +sys.path.append( + os.path.join( + os.path.dirname(sys.argv[0]), '..', '..', 'run_tests', 'python_utils')) +import comment_on_pr + + +def _args(): + argp = argparse.ArgumentParser( + description='Perform diff on QPS Driver') + argp.add_argument( + '-d', + '--diff_base', + type=str, + help='Commit or branch to compare the current one to') + argp.add_argument( + '-l', + '--loops', + type=int, + default=4, + help='Number of loops for each benchmark. More loops cuts down on noise' + ) + argp.add_argument( + '-j', + '--jobs', + type=int, + default=multiprocessing.cpu_count(), + help='Number of CPUs to use') + args = argp.parse_args() + assert args.diff_base, "diff_base must be set" + return args + + +def _make_cmd(jobs): + return ['make', '-j', '%d' % jobs, 'qps_json_driver', 'qps_worker'] + + +def build(name, jobs): + shutil.rmtree('qps_diff_%s' % name, ignore_errors=True) + subprocess.check_call(['git', 'submodule', 'update']) + try: + subprocess.check_call(_make_cmd(jobs)) + except subprocess.CalledProcessError, e: + subprocess.check_call(['make', 'clean']) + subprocess.check_call(_make_cmd(jobs)) + os.rename('bins', 'qps_diff_%s' % name) + + +def _run_cmd(name, scenario, fname): + return ['qps_diff_%s/opt/qps_json_driver' % name, '--scenarios_json', scenario, '--json_file_out', fname] + + +def run(name, scenarios, loops): + for sn in scenarios: + for i in range(0, loops): + fname = "%s.%s.%d.json" % (sn, name, i) + subprocess.check_call(_run_cmd(name, scenarios[sn], fname)) + + +def _load_qps(fname): + try: + with open(fname) as f: + return json.loads(f.read())['qps'] + except IOError, e: + print("IOError occurred reading file: %s" % fname) + return None + except ValueError, e: + print("ValueError occurred reading file: %s" % fname) + return None + + +def _median(ary): + assert (len(ary)) + ary = sorted(ary) + n = len(ary) + if n % 2 == 0: + return (ary[(n - 1) / 2] + ary[(n - 1) / 2 + 1]) / 2.0 + else: + return ary[n / 2] + + +def diff(scenarios, loops, old, new): + old_data = {} + new_data = {} + + # collect data + for sn in scenarios: + old_data[sn] = [] + new_data[sn] = [] + for i in range(loops): + old_data[sn].append(_load_qps("%s.%s.%d.json" % (sn, old, i))) + new_data[sn].append(_load_qps("%s.%s.%d.json" % (sn, new, i))) + + # crunch data + headers = ['Benchmark', 'qps'] + rows = [] + for sn in scenarios: + mdn_diff = abs(_median(new_data[sn]) - _median(old_data[sn])) + print('%s: %s=%r %s=%r mdn_diff=%r' % (sn, new, new_data[sn], old, old_data[sn], mdn_diff)) + s = bm_speedup.speedup(new_data[sn], old_data[sn], 10e-5) + if abs(s) > 3 and mdn_diff > 0.5: + rows.append([sn, '%+d%%' % s]) + + if rows: + return tabulate.tabulate(rows, headers=headers, floatfmt='+.2f') + else: + return None + + +def main(args): + build('new', args.jobs) + + if args.diff_base: + where_am_i = subprocess.check_output( + ['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip() + subprocess.check_call(['git', 'checkout', args.diff_base]) + try: + build('old', args.jobs) + finally: + subprocess.check_call(['git', 'checkout', where_am_i]) + subprocess.check_call(['git', 'submodule', 'update']) + + run('new', qps_scenarios._SCENARIOS, args.loops) + run('old', qps_scenarios._SCENARIOS, args.loops) + + diff_output = diff(qps_scenarios._SCENARIOS, args.loops, 'old', 'new') + + if diff_output: + text = '[qps] Performance differences noted:\n%s' % diff_output + else: + text = '[qps] No significant performance differences' + print('%s' % text) + comment_on_pr.comment_on_pr('```\n%s\n```' % text) + + +if __name__ == '__main__': + args = _args() + main(args) diff --git a/tools/profiling/qps/qps_scenarios.py b/tools/profiling/qps/qps_scenarios.py new file mode 100644 index 00000000000..4fbbdefc4db --- /dev/null +++ b/tools/profiling/qps/qps_scenarios.py @@ -0,0 +1,19 @@ +# Copyright 2017 gRPC authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" QPS Scenarios to run """ + +_SCENARIOS = { + 'large-message-throughput': '{"scenarios":[{"name":"large-message-throughput", "spawn_local_worker_count": -2, "warmup_seconds": 30, "benchmark_seconds": 270, "num_servers": 1, "server_config": {"async_server_threads": 1, "security_params": null, "server_type": "ASYNC_SERVER"}, "num_clients": 1, "client_config": {"client_type": "ASYNC_CLIENT", "security_params": null, "payload_config": {"simple_params": {"resp_size": 1048576, "req_size": 1048576}}, "client_channels": 1, "async_client_threads": 1, "outstanding_rpcs_per_channel": 1, "rpc_type": "UNARY", "load_params": {"closed_loop": {}}, "histogram_params": {"max_possible": 60000000000.0, "resolution": 0.01}}}]}', + 'multi-channel-64-KiB': '{"scenarios":[{"name":"multi-channel-64-KiB", "spawn_local_worker_count": -3, "warmup_seconds": 30, "benchmark_seconds": 270, "num_servers": 1, "server_config": {"async_server_threads": 31, "security_params": null, "server_type": "ASYNC_SERVER"}, "num_clients": 2, "client_config": {"client_type": "ASYNC_CLIENT", "security_params": null, "payload_config": {"simple_params": {"resp_size": 65536, "req_size": 65536}}, "client_channels": 32, "async_client_threads": 31, "outstanding_rpcs_per_channel": 100, "rpc_type": "UNARY", "load_params": {"closed_loop": {}}, "histogram_params": {"max_possible": 60000000000.0, "resolution": 0.01}}}]}' +} From f7fd97c5fe0669a1b2834a7472ef0a4b38592332 Mon Sep 17 00:00:00 2001 From: ncteisen Date: Mon, 19 Jun 2017 09:28:22 -0700 Subject: [PATCH 27/41] Actually enable trickle diff --- tools/jenkins/run_trickle_diff.sh | 2 +- .../microbenchmarks/bm_diff/bm_constants.py | 5 ++-- .../microbenchmarks/bm_diff/bm_diff.py | 29 +++++++++---------- .../microbenchmarks/bm_diff/bm_speedup.py | 12 ++++---- 4 files changed, 24 insertions(+), 24 deletions(-) diff --git a/tools/jenkins/run_trickle_diff.sh b/tools/jenkins/run_trickle_diff.sh index da905d02490..47dd8b44d64 100755 --- a/tools/jenkins/run_trickle_diff.sh +++ b/tools/jenkins/run_trickle_diff.sh @@ -20,4 +20,4 @@ set -ex cd $(dirname $0)/../.. tools/run_tests/start_port_server.py -tools/profiling/microbenchmarks/bm_diff/bm_main.py -d origin/$ghprbTargetBranch -b bm_fullstack_trickle -l 4 -t cli_transport_stalls cli_stream_stalls svr_transport_stalls svr_stream_stalls --no-counters --pr_comment_name trickle +tools/profiling/microbenchmarks/bm_diff/bm_main.py -d origin/$ghprbTargetBranch -b bm_fullstack_trickle -l 4 -t cli_transport_stalls_per_iteration cli_stream_stalls_per_iteration svr_transport_stalls_per_iteration svr_stream_stalls_per_iteration --no-counters --pr_comment_name trickle diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_constants.py b/tools/profiling/microbenchmarks/bm_diff/bm_constants.py index 4cd65867c37..ad79a0a1972 100644 --- a/tools/profiling/microbenchmarks/bm_diff/bm_constants.py +++ b/tools/profiling/microbenchmarks/bm_diff/bm_constants.py @@ -26,5 +26,6 @@ _AVAILABLE_BENCHMARK_TESTS = [ _INTERESTING = ('cpu_time', 'real_time', 'locks_per_iteration', 'allocs_per_iteration', 'writes_per_iteration', 'atm_cas_per_iteration', 'atm_add_per_iteration', - 'nows_per_iteration', 'cli_transport_stalls', 'cli_stream_stalls', - 'svr_transport_stalls', 'svr_stream_stalls',) + 'nows_per_iteration', 'cli_transport_stalls_per_iteration', + 'cli_stream_stalls_per_iteration', 'svr_transport_stalls_per_iteration', + 'svr_stream_stalls_per_iteration',) diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_diff.py b/tools/profiling/microbenchmarks/bm_diff/bm_diff.py index 73abf90ff52..809817a1a8c 100644 --- a/tools/profiling/microbenchmarks/bm_diff/bm_diff.py +++ b/tools/profiling/microbenchmarks/bm_diff/bm_diff.py @@ -108,9 +108,10 @@ class Benchmark: mdn_diff = abs(_median(new) - _median(old)) _maybe_print('%s: %s=%r %s=%r mdn_diff=%r' % (f, new_name, new, old_name, old, mdn_diff)) - s = bm_speedup.speedup(new, old) - if abs(s) > 3 and mdn_diff > 0.5: - self.final[f] = '%+d%%' % s + s = bm_speedup.speedup(new, old, 1e-5) + if abs(s) > 3: + if mdn_diff > 0.5 or 'trickle' in f: + self.final[f] = '%+d%%' % s return self.final.keys() def skip(self): @@ -172,18 +173,16 @@ def diff(bms, loops, track, old, new, counters): js_new_ctr = None js_old_ctr = None - if js_new_ctr: - for row in bm_json.expand_json(js_new_ctr, js_new_opt): - name = row['cpp_name'] - if name.endswith('_mean') or name.endswith('_stddev'): - continue - benchmarks[name].add_sample(track, row, True) - if js_old_ctr: - for row in bm_json.expand_json(js_old_ctr, js_old_opt): - name = row['cpp_name'] - if name.endswith('_mean') or name.endswith('_stddev'): - continue - benchmarks[name].add_sample(track, row, False) + for row in bm_json.expand_json(js_new_ctr, js_new_opt): + name = row['cpp_name'] + if name.endswith('_mean') or name.endswith('_stddev'): + continue + benchmarks[name].add_sample(track, row, True) + for row in bm_json.expand_json(js_old_ctr, js_old_opt): + name = row['cpp_name'] + if name.endswith('_mean') or name.endswith('_stddev'): + continue + benchmarks[name].add_sample(track, row, False) really_interesting = set() for name, bm in benchmarks.items(): diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_speedup.py b/tools/profiling/microbenchmarks/bm_diff/bm_speedup.py index 3d126efa62b..d1f27c41da3 100644 --- a/tools/profiling/microbenchmarks/bm_diff/bm_speedup.py +++ b/tools/profiling/microbenchmarks/bm_diff/bm_speedup.py @@ -36,7 +36,7 @@ def speedup(new, old): if p0 > _THRESHOLD: return 0 if s0 < 0: pct = 1 - while pct < 101: + while pct < 100: sp, pp = cmp(new, scale(old, 1 - pct / 100.0)) if sp > 0: break if pp > _THRESHOLD: break @@ -44,7 +44,7 @@ def speedup(new, old): return -(pct - 1) else: pct = 1 - while pct < 100000: + while pct < 10000: sp, pp = cmp(new, scale(old, 1 + pct / 100.0)) if sp < 0: break if pp > _THRESHOLD: break @@ -53,7 +53,7 @@ def speedup(new, old): if __name__ == "__main__": - new = [1.0, 1.0, 1.0, 1.0] - old = [2.0, 2.0, 2.0, 2.0] - print speedup(new, old) - print speedup(old, new) + new = [0.0, 0.0, 0.0, 0.0] + old=[2.96608e-06, 3.35076e-06, 3.45384e-06, 3.34407e-06] + print speedup(new, old, 1e-5) + print speedup(old, new, 1e-5) From 81db061d2a0ea1ceb87126456929cb85cb2c8dba Mon Sep 17 00:00:00 2001 From: ncteisen Date: Mon, 26 Jun 2017 23:24:04 -0700 Subject: [PATCH 28/41] Clang fmt and copyright sanity --- test/cpp/qps/qps_json_driver.cc | 5 +-- tools/jenkins/run_qps_diff.sh | 36 ++++++++++++------ tools/jenkins/run_trickle_diff.sh | 36 ++++++++++++------ .../microbenchmarks/bm_diff/bm_build.py | 37 +++++++++++++------ .../microbenchmarks/bm_diff/bm_constants.py | 37 +++++++++++++------ .../microbenchmarks/bm_diff/bm_diff.py | 37 +++++++++++++------ .../microbenchmarks/bm_diff/bm_main.py | 37 +++++++++++++------ .../microbenchmarks/bm_diff/bm_run.py | 37 +++++++++++++------ .../microbenchmarks/bm_diff/bm_speedup.py | 37 +++++++++++++------ tools/profiling/qps/qps_diff.py | 37 +++++++++++++------ tools/profiling/qps/qps_scenarios.py | 35 +++++++++++++----- 11 files changed, 252 insertions(+), 119 deletions(-) diff --git a/test/cpp/qps/qps_json_driver.cc b/test/cpp/qps/qps_json_driver.cc index 762b2e0485f..6c277e2b0dc 100644 --- a/test/cpp/qps/qps_json_driver.cc +++ b/test/cpp/qps/qps_json_driver.cc @@ -31,8 +31,8 @@ * */ -#include #include +#include #include #include @@ -73,8 +73,7 @@ DEFINE_string(qps_server_target_override, "", "Override QPS server target to configure in client configs." "Only applicable if there is a single benchmark server."); -DEFINE_string(json_file_out, "", - "File to write the JSON output to."); +DEFINE_string(json_file_out, "", "File to write the JSON output to."); namespace grpc { namespace testing { diff --git a/tools/jenkins/run_qps_diff.sh b/tools/jenkins/run_qps_diff.sh index 9529b0126fc..693bab0223a 100755 --- a/tools/jenkins/run_qps_diff.sh +++ b/tools/jenkins/run_qps_diff.sh @@ -1,17 +1,31 @@ -#!/usr/bin/env bash -# Copyright 2015 gRPC authors. +# Copyright 2017, Google Inc. +# All rights reserved. # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: # -# http://www.apache.org/licenses/LICENSE-2.0 +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. # -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # This script is invoked by Jenkins and runs a diff on the qps drivers set -ex diff --git a/tools/jenkins/run_trickle_diff.sh b/tools/jenkins/run_trickle_diff.sh index 47dd8b44d64..774be860909 100755 --- a/tools/jenkins/run_trickle_diff.sh +++ b/tools/jenkins/run_trickle_diff.sh @@ -1,17 +1,31 @@ -#!/usr/bin/env bash -# Copyright 2015 gRPC authors. +# Copyright 2017, Google Inc. +# All rights reserved. # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: # -# http://www.apache.org/licenses/LICENSE-2.0 +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. # -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # This script is invoked by Jenkins and runs a diff on bm_fullstack_trickle set -ex diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_build.py b/tools/profiling/microbenchmarks/bm_diff/bm_build.py index ce62c09d72f..794573d5548 100644 --- a/tools/profiling/microbenchmarks/bm_diff/bm_build.py +++ b/tools/profiling/microbenchmarks/bm_diff/bm_build.py @@ -1,18 +1,31 @@ -#!/usr/bin/env python2.7 +# Copyright 2017, Google Inc. +# All rights reserved. # -# Copyright 2017 gRPC authors. +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. # -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ Python utility to build opt and counters benchmarks """ diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_constants.py b/tools/profiling/microbenchmarks/bm_diff/bm_constants.py index ad79a0a1972..5e1ec1fd406 100644 --- a/tools/profiling/microbenchmarks/bm_diff/bm_constants.py +++ b/tools/profiling/microbenchmarks/bm_diff/bm_constants.py @@ -1,18 +1,31 @@ -#!/usr/bin/env python2.7 +# Copyright 2017, Google Inc. +# All rights reserved. # -# Copyright 2017 gRPC authors. +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. # -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ Configurable constants for the bm_*.py family """ diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_diff.py b/tools/profiling/microbenchmarks/bm_diff/bm_diff.py index 809817a1a8c..1e5329e9be5 100644 --- a/tools/profiling/microbenchmarks/bm_diff/bm_diff.py +++ b/tools/profiling/microbenchmarks/bm_diff/bm_diff.py @@ -1,18 +1,31 @@ -#!/usr/bin/env python2.7 +# Copyright 2017, Google Inc. +# All rights reserved. # -# Copyright 2017 gRPC authors. +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. # -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ Computes the diff between two bm runs and outputs significant results """ import bm_constants diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_main.py b/tools/profiling/microbenchmarks/bm_diff/bm_main.py index 8b4e0cb69a4..f2422f60b00 100644 --- a/tools/profiling/microbenchmarks/bm_diff/bm_main.py +++ b/tools/profiling/microbenchmarks/bm_diff/bm_main.py @@ -1,18 +1,31 @@ -#!/usr/bin/env python2.7 +# Copyright 2017, Google Inc. +# All rights reserved. # -# Copyright 2017 gRPC authors. +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. # -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ Runs the entire bm_*.py pipeline, and possible comments on the PR """ diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_run.py b/tools/profiling/microbenchmarks/bm_diff/bm_run.py index 72b3d3cf106..d8101a2c440 100644 --- a/tools/profiling/microbenchmarks/bm_diff/bm_run.py +++ b/tools/profiling/microbenchmarks/bm_diff/bm_run.py @@ -1,18 +1,31 @@ -#!/usr/bin/env python2.7 +# Copyright 2017, Google Inc. +# All rights reserved. # -# Copyright 2017 gRPC authors. +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. # -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ Python utility to run opt and counters benchmarks and save json output """ diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_speedup.py b/tools/profiling/microbenchmarks/bm_diff/bm_speedup.py index d1f27c41da3..41e10a6cbe0 100644 --- a/tools/profiling/microbenchmarks/bm_diff/bm_speedup.py +++ b/tools/profiling/microbenchmarks/bm_diff/bm_speedup.py @@ -1,18 +1,31 @@ -#!/usr/bin/env python2.7 +# Copyright 2017, Google Inc. +# All rights reserved. # -# Copyright 2017 gRPC authors. +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. # -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from scipy import stats import math diff --git a/tools/profiling/qps/qps_diff.py b/tools/profiling/qps/qps_diff.py index 0654f45666f..c7a0afc4049 100755 --- a/tools/profiling/qps/qps_diff.py +++ b/tools/profiling/qps/qps_diff.py @@ -1,18 +1,31 @@ -#!/usr/bin/env python2.7 +# Copyright 2017, Google Inc. +# All rights reserved. # -# Copyright 2017 gRPC authors. +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. # -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ Computes the diff between two qps runs and outputs significant results """ import argparse diff --git a/tools/profiling/qps/qps_scenarios.py b/tools/profiling/qps/qps_scenarios.py index 4fbbdefc4db..2d725233311 100644 --- a/tools/profiling/qps/qps_scenarios.py +++ b/tools/profiling/qps/qps_scenarios.py @@ -1,16 +1,31 @@ -# Copyright 2017 gRPC authors. +# Copyright 2017, Google Inc. +# All rights reserved. # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: # -# http://www.apache.org/licenses/LICENSE-2.0 +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. # -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ QPS Scenarios to run """ _SCENARIOS = { From 1da91b3afe069d87753a0fb921f656647dddbd72 Mon Sep 17 00:00:00 2001 From: ncteisen Date: Mon, 26 Jun 2017 23:34:16 -0700 Subject: [PATCH 29/41] Make scripts executable --- tools/profiling/microbenchmarks/bm_diff/bm_main.py | 2 ++ tools/profiling/qps/qps_diff.py | 3 +++ 2 files changed, 5 insertions(+) mode change 100644 => 100755 tools/profiling/microbenchmarks/bm_diff/bm_main.py diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_main.py b/tools/profiling/microbenchmarks/bm_diff/bm_main.py old mode 100644 new mode 100755 index f2422f60b00..e171bc31d0e --- a/tools/profiling/microbenchmarks/bm_diff/bm_main.py +++ b/tools/profiling/microbenchmarks/bm_diff/bm_main.py @@ -1,3 +1,5 @@ +#!/usr/bin/env python2.7 +# # Copyright 2017, Google Inc. # All rights reserved. # diff --git a/tools/profiling/qps/qps_diff.py b/tools/profiling/qps/qps_diff.py index c7a0afc4049..7773a0451b5 100755 --- a/tools/profiling/qps/qps_diff.py +++ b/tools/profiling/qps/qps_diff.py @@ -1,3 +1,5 @@ +#!/usr/bin/env python2.7 +# # Copyright 2017, Google Inc. # All rights reserved. # @@ -26,6 +28,7 @@ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + """ Computes the diff between two qps runs and outputs significant results """ import argparse From 131d2f19a1e9dfc3e98916726ed4cc32aafcf05a Mon Sep 17 00:00:00 2001 From: ncteisen Date: Mon, 26 Jun 2017 23:45:16 -0700 Subject: [PATCH 30/41] Few more bm trickle fixes --- test/cpp/microbenchmarks/bm_fullstack_trickle.cc | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/test/cpp/microbenchmarks/bm_fullstack_trickle.cc b/test/cpp/microbenchmarks/bm_fullstack_trickle.cc index 41be66d4ea7..f4c3396969a 100644 --- a/test/cpp/microbenchmarks/bm_fullstack_trickle.cc +++ b/test/cpp/microbenchmarks/bm_fullstack_trickle.cc @@ -334,7 +334,7 @@ BENCHMARK(BM_PumpStreamServerToClient_Trickle)->Apply(StreamingTrickleArgs); static void BM_PumpUnbalancedUnary_Trickle(benchmark::State& state) { EchoTestService::AsyncService service; std::unique_ptr fixture(new TrickledCHTTP2( - &service, true, state.range(0) /* req_size */, + &service, false, state.range(0) /* req_size */, state.range(1) /* resp_size */, state.range(2) /* bw in kbit/s */)); EchoRequest send_request; EchoResponse send_response; @@ -374,7 +374,7 @@ static void BM_PumpUnbalancedUnary_Trickle(benchmark::State& state) { stub->AsyncEcho(&cli_ctx, send_request, fixture->cq())); void* t; bool ok; - TrickleCQNext(fixture.get(), &t, &ok, state.iterations()); + TrickleCQNext(fixture.get(), &t, &ok, in_warmup ? -1 : state.iterations()); GPR_ASSERT(ok); GPR_ASSERT(t == tag(0) || t == tag(1)); intptr_t slot = reinterpret_cast(t); @@ -420,12 +420,6 @@ static void BM_PumpUnbalancedUnary_Trickle(benchmark::State& state) { } static void UnaryTrickleArgs(benchmark::internal::Benchmark* b) { - // A selection of interesting numbers - const int cli_1024k = 1024 * 1024; - const int cli_32M = 32 * 1024 * 1024; - const int svr_256k = 256 * 1024; - const int svr_4M = 4 * 1024 * 1024; - const int svr_64M = 64 * 1024 * 1024; for (int bw = 64; bw <= 128 * 1024 * 1024; bw *= 16) { b->Args({1, 1, bw}); for (int i = 64; i <= 128 * 1024 * 1024; i *= 64) { From 027a02733c13ece1091c3f88755e3340e22593dc Mon Sep 17 00:00:00 2001 From: ncteisen Date: Mon, 19 Jun 2017 14:30:33 -0700 Subject: [PATCH 31/41] Address github comments --- .../microbenchmarks/bm_diff/bm_speedup.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_speedup.py b/tools/profiling/microbenchmarks/bm_diff/bm_speedup.py index 41e10a6cbe0..4b57d24ca3a 100644 --- a/tools/profiling/microbenchmarks/bm_diff/bm_speedup.py +++ b/tools/profiling/microbenchmarks/bm_diff/bm_speedup.py @@ -30,8 +30,7 @@ from scipy import stats import math -_THRESHOLD = 1e-10 - +_DEFAULT_THRESHOLD = 1e-10 def scale(a, mul): return [x * mul for x in a] @@ -40,19 +39,18 @@ def scale(a, mul): def cmp(a, b): return stats.ttest_ind(a, b) - -def speedup(new, old): +def speedup(new, old, threshold = _DEFAULT_THRESHOLD): if (len(set(new))) == 1 and new == old: return 0 s0, p0 = cmp(new, old) if math.isnan(p0): return 0 if s0 == 0: return 0 - if p0 > _THRESHOLD: return 0 + if p0 > _DEFAULT_THRESHOLD: return 0 if s0 < 0: pct = 1 while pct < 100: sp, pp = cmp(new, scale(old, 1 - pct / 100.0)) if sp > 0: break - if pp > _THRESHOLD: break + if pp > _DEFAULT_THRESHOLD: break pct += 1 return -(pct - 1) else: @@ -60,13 +58,13 @@ def speedup(new, old): while pct < 10000: sp, pp = cmp(new, scale(old, 1 + pct / 100.0)) if sp < 0: break - if pp > _THRESHOLD: break + if pp > _DEFAULT_THRESHOLD: break pct += 1 return pct - 1 if __name__ == "__main__": new = [0.0, 0.0, 0.0, 0.0] - old=[2.96608e-06, 3.35076e-06, 3.45384e-06, 3.34407e-06] + old = [2.96608e-06, 3.35076e-06, 3.45384e-06, 3.34407e-06] print speedup(new, old, 1e-5) print speedup(old, new, 1e-5) From ab14a0b4c839a3960d9ca3a8c1e044a28a696f82 Mon Sep 17 00:00:00 2001 From: murgatroid99 Date: Tue, 27 Jun 2017 10:40:48 -0700 Subject: [PATCH 32/41] Bump version to 1.4.1 --- BUILD | 2 +- CMakeLists.txt | 2 +- Makefile | 4 ++-- build.yaml | 2 +- gRPC-Core.podspec | 2 +- gRPC-ProtoRPC.podspec | 2 +- gRPC-RxLibrary.podspec | 2 +- gRPC.podspec | 2 +- package.json | 2 +- package.xml | 4 ++-- src/cpp/common/version_cc.cc | 2 +- src/csharp/Grpc.Core/Version.csproj.include | 2 +- src/csharp/Grpc.Core/VersionInfo.cs | 4 ++-- src/csharp/build_packages_dotnetcli.bat | 2 +- src/csharp/build_packages_dotnetcli.sh | 4 ++-- src/node/health_check/package.json | 4 ++-- src/node/tools/package.json | 2 +- src/objective-c/!ProtoCompiler-gRPCPlugin.podspec | 2 +- src/objective-c/GRPCClient/private/version.h | 2 +- src/php/composer.json | 2 +- src/php/ext/grpc/version.h | 2 +- src/python/grpcio/grpc/_grpcio_metadata.py | 2 +- src/python/grpcio/grpc_version.py | 2 +- src/python/grpcio_health_checking/grpc_version.py | 2 +- src/python/grpcio_reflection/grpc_version.py | 2 +- src/python/grpcio_tests/grpc_version.py | 2 +- src/ruby/lib/grpc/version.rb | 2 +- src/ruby/tools/version.rb | 2 +- tools/distrib/python/grpcio_tools/grpc_version.py | 2 +- tools/doxygen/Doxyfile.c++ | 2 +- tools/doxygen/Doxyfile.c++.internal | 2 +- 31 files changed, 36 insertions(+), 36 deletions(-) diff --git a/BUILD b/BUILD index 9b36e3bd17e..db5a9da7925 100644 --- a/BUILD +++ b/BUILD @@ -53,7 +53,7 @@ g_stands_for = "gregarious" core_version = "4.0.0" -version = "1.4.0" +version = "1.4.1" grpc_cc_library( name = "gpr", diff --git a/CMakeLists.txt b/CMakeLists.txt index 4b43c5482dd..b7555b1bc39 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -39,7 +39,7 @@ cmake_minimum_required(VERSION 2.8) set(PACKAGE_NAME "grpc") -set(PACKAGE_VERSION "1.4.0") +set(PACKAGE_VERSION "1.4.1") set(PACKAGE_STRING "${PACKAGE_NAME} ${PACKAGE_VERSION}") set(PACKAGE_TARNAME "${PACKAGE_NAME}-${PACKAGE_VERSION}") set(PACKAGE_BUGREPORT "https://github.com/grpc/grpc/issues/") diff --git a/Makefile b/Makefile index e3ba47cd036..86941b71055 100644 --- a/Makefile +++ b/Makefile @@ -423,8 +423,8 @@ Q = @ endif CORE_VERSION = 4.0.0 -CPP_VERSION = 1.4.0 -CSHARP_VERSION = 1.4.0 +CPP_VERSION = 1.4.1 +CSHARP_VERSION = 1.4.1 CPPFLAGS_NO_ARCH += $(addprefix -I, $(INCLUDES)) $(addprefix -D, $(DEFINES)) CPPFLAGS += $(CPPFLAGS_NO_ARCH) $(ARCH_FLAGS) diff --git a/build.yaml b/build.yaml index 627ff5cce28..c137bc25f54 100644 --- a/build.yaml +++ b/build.yaml @@ -14,7 +14,7 @@ settings: '#10': See the expand_version.py for all the quirks here core_version: 4.0.0 g_stands_for: gregarious - version: 1.4.0 + version: 1.4.1 filegroups: - name: census public_headers: diff --git a/gRPC-Core.podspec b/gRPC-Core.podspec index 3b1fd4b534c..5c7486b461a 100644 --- a/gRPC-Core.podspec +++ b/gRPC-Core.podspec @@ -37,7 +37,7 @@ Pod::Spec.new do |s| s.name = 'gRPC-Core' - version = '1.4.0' + version = '1.4.1' s.version = version s.summary = 'Core cross-platform gRPC library, written in C' s.homepage = 'http://www.grpc.io' diff --git a/gRPC-ProtoRPC.podspec b/gRPC-ProtoRPC.podspec index 596579e693e..f4210d4a3c9 100644 --- a/gRPC-ProtoRPC.podspec +++ b/gRPC-ProtoRPC.podspec @@ -36,7 +36,7 @@ Pod::Spec.new do |s| s.name = 'gRPC-ProtoRPC' - version = '1.4.0' + version = '1.4.1' s.version = version s.summary = 'RPC library for Protocol Buffers, based on gRPC' s.homepage = 'http://www.grpc.io' diff --git a/gRPC-RxLibrary.podspec b/gRPC-RxLibrary.podspec index 0cb7128f598..07144815ea1 100644 --- a/gRPC-RxLibrary.podspec +++ b/gRPC-RxLibrary.podspec @@ -36,7 +36,7 @@ Pod::Spec.new do |s| s.name = 'gRPC-RxLibrary' - version = '1.4.0' + version = '1.4.1' s.version = version s.summary = 'Reactive Extensions library for iOS/OSX.' s.homepage = 'http://www.grpc.io' diff --git a/gRPC.podspec b/gRPC.podspec index cf261a102b9..6f1ef12ce76 100644 --- a/gRPC.podspec +++ b/gRPC.podspec @@ -35,7 +35,7 @@ Pod::Spec.new do |s| s.name = 'gRPC' - version = '1.4.0' + version = '1.4.1' s.version = version s.summary = 'gRPC client library for iOS/OSX' s.homepage = 'http://www.grpc.io' diff --git a/package.json b/package.json index 4b1b7d7d6ac..1f6da78107a 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "grpc", - "version": "1.4.0", + "version": "1.4.1", "author": "Google Inc.", "description": "gRPC Library for Node", "homepage": "http://www.grpc.io/", diff --git a/package.xml b/package.xml index ea1b3001eb1..b52d1891e26 100644 --- a/package.xml +++ b/package.xml @@ -13,8 +13,8 @@ 2017-05-22 - 1.4.0 - 1.4.0 + 1.4.1 + 1.4.1 beta diff --git a/src/cpp/common/version_cc.cc b/src/cpp/common/version_cc.cc index 4691696a23c..75763fa6a29 100644 --- a/src/cpp/common/version_cc.cc +++ b/src/cpp/common/version_cc.cc @@ -37,5 +37,5 @@ #include namespace grpc { -grpc::string Version() { return "1.4.0"; } +grpc::string Version() { return "1.4.1"; } } diff --git a/src/csharp/Grpc.Core/Version.csproj.include b/src/csharp/Grpc.Core/Version.csproj.include index 09fef990c4b..4b4f7f18d01 100755 --- a/src/csharp/Grpc.Core/Version.csproj.include +++ b/src/csharp/Grpc.Core/Version.csproj.include @@ -1,7 +1,7 @@ - 1.4.0 + 1.4.1 3.3.0 diff --git a/src/csharp/Grpc.Core/VersionInfo.cs b/src/csharp/Grpc.Core/VersionInfo.cs index 541323fb3d0..e8f2f2a68df 100644 --- a/src/csharp/Grpc.Core/VersionInfo.cs +++ b/src/csharp/Grpc.Core/VersionInfo.cs @@ -48,11 +48,11 @@ namespace Grpc.Core /// /// Current AssemblyFileVersion of gRPC C# assemblies /// - public const string CurrentAssemblyFileVersion = "1.4.0.0"; + public const string CurrentAssemblyFileVersion = "1.4.1.0"; /// /// Current version of gRPC C# /// - public const string CurrentVersion = "1.4.0"; + public const string CurrentVersion = "1.4.1"; } } diff --git a/src/csharp/build_packages_dotnetcli.bat b/src/csharp/build_packages_dotnetcli.bat index 4fcf209c078..46a0d4320a6 100755 --- a/src/csharp/build_packages_dotnetcli.bat +++ b/src/csharp/build_packages_dotnetcli.bat @@ -28,7 +28,7 @@ @rem OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. @rem Current package versions -set VERSION=1.4.0 +set VERSION=1.4.1 @rem Adjust the location of nuget.exe set NUGET=C:\nuget\nuget.exe diff --git a/src/csharp/build_packages_dotnetcli.sh b/src/csharp/build_packages_dotnetcli.sh index 9f4bf2fa91f..7ffd178dce4 100755 --- a/src/csharp/build_packages_dotnetcli.sh +++ b/src/csharp/build_packages_dotnetcli.sh @@ -54,7 +54,7 @@ dotnet pack --configuration Release Grpc.Auth --output ../../../artifacts dotnet pack --configuration Release Grpc.HealthCheck --output ../../../artifacts dotnet pack --configuration Release Grpc.Reflection --output ../../../artifacts -nuget pack Grpc.nuspec -Version "1.4.0" -OutputDirectory ../../artifacts -nuget pack Grpc.Tools.nuspec -Version "1.4.0" -OutputDirectory ../../artifacts +nuget pack Grpc.nuspec -Version "1.4.1" -OutputDirectory ../../artifacts +nuget pack Grpc.Tools.nuspec -Version "1.4.1" -OutputDirectory ../../artifacts (cd ../../artifacts && zip csharp_nugets_dotnetcli.zip *.nupkg) diff --git a/src/node/health_check/package.json b/src/node/health_check/package.json index f619e3f3e11..3cdcb41c11b 100644 --- a/src/node/health_check/package.json +++ b/src/node/health_check/package.json @@ -1,6 +1,6 @@ { "name": "grpc-health-check", - "version": "1.4.0", + "version": "1.4.1", "author": "Google Inc.", "description": "Health check service for use with gRPC", "repository": { @@ -15,7 +15,7 @@ } ], "dependencies": { - "grpc": "^1.4.0", + "grpc": "^1.4.1", "lodash": "^3.9.3", "google-protobuf": "^3.0.0" }, diff --git a/src/node/tools/package.json b/src/node/tools/package.json index 777be92f2d1..03cd90f9503 100644 --- a/src/node/tools/package.json +++ b/src/node/tools/package.json @@ -1,6 +1,6 @@ { "name": "grpc-tools", - "version": "1.4.0", + "version": "1.4.1", "author": "Google Inc.", "description": "Tools for developing with gRPC on Node.js", "homepage": "http://www.grpc.io/", diff --git a/src/objective-c/!ProtoCompiler-gRPCPlugin.podspec b/src/objective-c/!ProtoCompiler-gRPCPlugin.podspec index 180b2e9cdef..5ad1eeb82f6 100644 --- a/src/objective-c/!ProtoCompiler-gRPCPlugin.podspec +++ b/src/objective-c/!ProtoCompiler-gRPCPlugin.podspec @@ -42,7 +42,7 @@ Pod::Spec.new do |s| # exclamation mark ensures that other "regular" pods will be able to find it as it'll be installed # before them. s.name = '!ProtoCompiler-gRPCPlugin' - v = '1.4.0' + v = '1.4.1' s.version = v s.summary = 'The gRPC ProtoC plugin generates Objective-C files from .proto services.' s.description = <<-DESC diff --git a/src/objective-c/GRPCClient/private/version.h b/src/objective-c/GRPCClient/private/version.h index 7b8da4db664..09130fd0859 100644 --- a/src/objective-c/GRPCClient/private/version.h +++ b/src/objective-c/GRPCClient/private/version.h @@ -38,4 +38,4 @@ // `tools/buildgen/generate_projects.sh`. -#define GRPC_OBJC_VERSION_STRING @"1.4.0" +#define GRPC_OBJC_VERSION_STRING @"1.4.1" diff --git a/src/php/composer.json b/src/php/composer.json index a4fba7e4f6a..56561b7b48b 100644 --- a/src/php/composer.json +++ b/src/php/composer.json @@ -2,7 +2,7 @@ "name": "grpc/grpc-dev", "description": "gRPC library for PHP - for Developement use only", "license": "BSD-3-Clause", - "version": "1.4.0", + "version": "1.4.1", "require": { "php": ">=5.5.0", "google/protobuf": "^v3.3.0" diff --git a/src/php/ext/grpc/version.h b/src/php/ext/grpc/version.h index 993ef2de274..eab6ffa34c7 100644 --- a/src/php/ext/grpc/version.h +++ b/src/php/ext/grpc/version.h @@ -35,6 +35,6 @@ #ifndef VERSION_H #define VERSION_H -#define PHP_GRPC_VERSION "1.4.0" +#define PHP_GRPC_VERSION "1.4.1" #endif /* VERSION_H */ diff --git a/src/python/grpcio/grpc/_grpcio_metadata.py b/src/python/grpcio/grpc/_grpcio_metadata.py index 4ef40343142..40a30092673 100644 --- a/src/python/grpcio/grpc/_grpcio_metadata.py +++ b/src/python/grpcio/grpc/_grpcio_metadata.py @@ -29,4 +29,4 @@ # AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio/grpc/_grpcio_metadata.py.template`!!! -__version__ = """1.4.0""" +__version__ = """1.4.1""" diff --git a/src/python/grpcio/grpc_version.py b/src/python/grpcio/grpc_version.py index 044a9cd9f6c..78f7e6ce311 100644 --- a/src/python/grpcio/grpc_version.py +++ b/src/python/grpcio/grpc_version.py @@ -29,4 +29,4 @@ # AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio/grpc_version.py.template`!!! -VERSION='1.4.0' +VERSION='1.4.1' diff --git a/src/python/grpcio_health_checking/grpc_version.py b/src/python/grpcio_health_checking/grpc_version.py index 15636c7bc0f..b323b5c93c6 100644 --- a/src/python/grpcio_health_checking/grpc_version.py +++ b/src/python/grpcio_health_checking/grpc_version.py @@ -29,4 +29,4 @@ # AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_health_checking/grpc_version.py.template`!!! -VERSION='1.4.0' +VERSION='1.4.1' diff --git a/src/python/grpcio_reflection/grpc_version.py b/src/python/grpcio_reflection/grpc_version.py index 7e699db4f61..cc54e601961 100644 --- a/src/python/grpcio_reflection/grpc_version.py +++ b/src/python/grpcio_reflection/grpc_version.py @@ -29,4 +29,4 @@ # AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_reflection/grpc_version.py.template`!!! -VERSION='1.4.0' +VERSION='1.4.1' diff --git a/src/python/grpcio_tests/grpc_version.py b/src/python/grpcio_tests/grpc_version.py index 00b31bcdb98..f26860ae831 100644 --- a/src/python/grpcio_tests/grpc_version.py +++ b/src/python/grpcio_tests/grpc_version.py @@ -29,4 +29,4 @@ # AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio_tests/grpc_version.py.template`!!! -VERSION='1.4.0' +VERSION='1.4.1' diff --git a/src/ruby/lib/grpc/version.rb b/src/ruby/lib/grpc/version.rb index 7801316561d..164ca177a5f 100644 --- a/src/ruby/lib/grpc/version.rb +++ b/src/ruby/lib/grpc/version.rb @@ -29,5 +29,5 @@ # GRPC contains the General RPC module. module GRPC - VERSION = '1.4.0' + VERSION = '1.4.1' end diff --git a/src/ruby/tools/version.rb b/src/ruby/tools/version.rb index 74c2fd38a45..9d70c0c23a6 100644 --- a/src/ruby/tools/version.rb +++ b/src/ruby/tools/version.rb @@ -29,6 +29,6 @@ module GRPC module Tools - VERSION = '1.4.0' + VERSION = '1.4.1' end end diff --git a/tools/distrib/python/grpcio_tools/grpc_version.py b/tools/distrib/python/grpcio_tools/grpc_version.py index 1bfc4b79d18..dd2194a2fb5 100644 --- a/tools/distrib/python/grpcio_tools/grpc_version.py +++ b/tools/distrib/python/grpcio_tools/grpc_version.py @@ -29,4 +29,4 @@ # AUTO-GENERATED FROM `$REPO_ROOT/templates/tools/distrib/python/grpcio_tools/grpc_version.py.template`!!! -VERSION='1.4.0' +VERSION='1.4.1' diff --git a/tools/doxygen/Doxyfile.c++ b/tools/doxygen/Doxyfile.c++ index 0d4a85cfc3d..4be278f527b 100644 --- a/tools/doxygen/Doxyfile.c++ +++ b/tools/doxygen/Doxyfile.c++ @@ -40,7 +40,7 @@ PROJECT_NAME = "GRPC C++" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = 1.4.0 +PROJECT_NUMBER = 1.4.1 # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a diff --git a/tools/doxygen/Doxyfile.c++.internal b/tools/doxygen/Doxyfile.c++.internal index 82f1667a7ff..a259fb22f7c 100644 --- a/tools/doxygen/Doxyfile.c++.internal +++ b/tools/doxygen/Doxyfile.c++.internal @@ -40,7 +40,7 @@ PROJECT_NAME = "GRPC C++" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = 1.4.0 +PROJECT_NUMBER = 1.4.1 # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a From 9516b10c6624df6829d42c20ace1c0a97e1c847e Mon Sep 17 00:00:00 2001 From: Muxi Yan Date: Tue, 27 Jun 2017 11:52:52 -0700 Subject: [PATCH 33/41] Fix the memory leak in metadata --- src/objective-c/GRPCClient/private/GRPCWrappedCall.m | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/objective-c/GRPCClient/private/GRPCWrappedCall.m b/src/objective-c/GRPCClient/private/GRPCWrappedCall.m index 1faba3e20b9..06570c5ea22 100644 --- a/src/objective-c/GRPCClient/private/GRPCWrappedCall.m +++ b/src/objective-c/GRPCClient/private/GRPCWrappedCall.m @@ -90,6 +90,10 @@ } - (void)dealloc { + for (int i = 0; i < _op.data.send_initial_metadata.count; i++) { + grpc_slice_unref(_op.data.send_initial_metadata.metadata[i].key); + grpc_slice_unref(_op.data.send_initial_metadata.metadata[i].value); + } gpr_free(_op.data.send_initial_metadata.metadata); } From d9edaa400eba7d387623db31338edcd9a10af653 Mon Sep 17 00:00:00 2001 From: ncteisen Date: Wed, 28 Jun 2017 15:12:53 -0700 Subject: [PATCH 34/41] Fix bm_speedup --- tools/profiling/microbenchmarks/bm_diff/bm_speedup.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_speedup.py b/tools/profiling/microbenchmarks/bm_diff/bm_speedup.py index 4b57d24ca3a..61ef3de62f7 100644 --- a/tools/profiling/microbenchmarks/bm_diff/bm_speedup.py +++ b/tools/profiling/microbenchmarks/bm_diff/bm_speedup.py @@ -44,13 +44,13 @@ def speedup(new, old, threshold = _DEFAULT_THRESHOLD): s0, p0 = cmp(new, old) if math.isnan(p0): return 0 if s0 == 0: return 0 - if p0 > _DEFAULT_THRESHOLD: return 0 + if p0 > threshold: return 0 if s0 < 0: pct = 1 while pct < 100: sp, pp = cmp(new, scale(old, 1 - pct / 100.0)) if sp > 0: break - if pp > _DEFAULT_THRESHOLD: break + if pp > threshold: break pct += 1 return -(pct - 1) else: @@ -58,7 +58,7 @@ def speedup(new, old, threshold = _DEFAULT_THRESHOLD): while pct < 10000: sp, pp = cmp(new, scale(old, 1 + pct / 100.0)) if sp < 0: break - if pp > _DEFAULT_THRESHOLD: break + if pp > threshold: break pct += 1 return pct - 1 From 7b774544f16c1685b3aa007ed5d4317ff038355d Mon Sep 17 00:00:00 2001 From: Muxi Yan Date: Thu, 29 Jun 2017 14:31:59 -0700 Subject: [PATCH 35/41] Use Cocoapods to manage nanopb --- gRPC-Core.podspec | 12 +----------- templates/gRPC-Core.podspec.template | 5 +++-- 2 files changed, 4 insertions(+), 13 deletions(-) diff --git a/gRPC-Core.podspec b/gRPC-Core.podspec index 5c7486b461a..513b31d39aa 100644 --- a/gRPC-Core.podspec +++ b/gRPC-Core.podspec @@ -194,6 +194,7 @@ Pod::Spec.new do |s| ss.libraries = 'z' ss.dependency "#{s.name}/Interface", version ss.dependency 'BoringSSL', '~> 8.0' + ss.dependency 'nanopb' # To save you from scrolling, this is the last part of the podspec. ss.source_files = 'src/core/lib/profiling/timers.h', @@ -448,10 +449,6 @@ Pod::Spec.new do |s| 'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.h', 'src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.h', 'src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.h', - 'third_party/nanopb/pb.h', - 'third_party/nanopb/pb_common.h', - 'third_party/nanopb/pb_decode.h', - 'third_party/nanopb/pb_encode.h', 'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver.h', 'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper.h', 'src/core/ext/filters/load_reporting/load_reporting.h', @@ -695,9 +692,6 @@ Pod::Spec.new do |s| 'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.c', 'src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.c', 'src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.c', - 'third_party/nanopb/pb_common.c', - 'third_party/nanopb/pb_decode.c', - 'third_party/nanopb/pb_encode.c', 'src/core/ext/filters/client_channel/lb_policy/pick_first/pick_first.c', 'src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.c', 'src/core/ext/filters/client_channel/resolver/dns/c_ares/dns_resolver_ares.c', @@ -934,10 +928,6 @@ Pod::Spec.new do |s| 'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.h', 'src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.h', 'src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.h', - 'third_party/nanopb/pb.h', - 'third_party/nanopb/pb_common.h', - 'third_party/nanopb/pb_decode.h', - 'third_party/nanopb/pb_encode.h', 'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver.h', 'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper.h', 'src/core/ext/filters/load_reporting/load_reporting.h', diff --git a/templates/gRPC-Core.podspec.template b/templates/gRPC-Core.podspec.template index cbebc5d005c..15fbb701062 100644 --- a/templates/gRPC-Core.podspec.template +++ b/templates/gRPC-Core.podspec.template @@ -43,7 +43,7 @@ if lib.name in ("grpc", "gpr"): out += lib.get('headers', []) out += lib.get('src', []) - return out; + return [f for f in out if not f.startswith("third_party/nanopb/")] def grpc_public_headers(libs): out = [] @@ -57,7 +57,7 @@ for lib in libs: if lib.name in ("grpc", "gpr"): out += lib.get('headers', []) - return out + return [f for f in out if not f.startswith("third_party/nanopb/")] def ruby_multiline_list(files, indent): return (',\n' + indent*' ').join('\'%s\'' % f for f in files) @@ -153,6 +153,7 @@ ss.libraries = 'z' ss.dependency "#{s.name}/Interface", version ss.dependency 'BoringSSL', '~> 8.0' + ss.dependency 'nanopb' # To save you from scrolling, this is the last part of the podspec. ss.source_files = ${ruby_multiline_list(grpc_private_files(libs), 22)} From 636eba9889b672eaf2641b351c69334e0043bf32 Mon Sep 17 00:00:00 2001 From: Muxi Yan Date: Thu, 29 Jun 2017 16:18:43 -0700 Subject: [PATCH 36/41] Polish gRPC-Core.podspec --- gRPC-Core.podspec | 4 +--- templates/gRPC-Core.podspec.template | 4 +--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/gRPC-Core.podspec b/gRPC-Core.podspec index 513b31d39aa..319f14bac8e 100644 --- a/gRPC-Core.podspec +++ b/gRPC-Core.podspec @@ -47,8 +47,6 @@ Pod::Spec.new do |s| s.source = { :git => 'https://github.com/grpc/grpc.git', :tag => "v#{version}", - # TODO(jcanizales): Depend explicitly on the nanopb pod, and disable submodules. - :submodules => true, } s.ios.deployment_target = '7.0' @@ -194,7 +192,7 @@ Pod::Spec.new do |s| ss.libraries = 'z' ss.dependency "#{s.name}/Interface", version ss.dependency 'BoringSSL', '~> 8.0' - ss.dependency 'nanopb' + ss.dependency 'nanopb', '~> 0.3' # To save you from scrolling, this is the last part of the podspec. ss.source_files = 'src/core/lib/profiling/timers.h', diff --git a/templates/gRPC-Core.podspec.template b/templates/gRPC-Core.podspec.template index 15fbb701062..d7a7c7cc644 100644 --- a/templates/gRPC-Core.podspec.template +++ b/templates/gRPC-Core.podspec.template @@ -74,8 +74,6 @@ s.source = { :git => 'https://github.com/grpc/grpc.git', :tag => "v#{version}", - # TODO(jcanizales): Depend explicitly on the nanopb pod, and disable submodules. - :submodules => true, } s.ios.deployment_target = '7.0' @@ -153,7 +151,7 @@ ss.libraries = 'z' ss.dependency "#{s.name}/Interface", version ss.dependency 'BoringSSL', '~> 8.0' - ss.dependency 'nanopb' + ss.dependency 'nanopb', '~> 0.3' # To save you from scrolling, this is the last part of the podspec. ss.source_files = ${ruby_multiline_list(grpc_private_files(libs), 22)} From 69d7bd67f47aa34f631300ac2933f48d391bcbf8 Mon Sep 17 00:00:00 2001 From: thassss Date: Mon, 26 Jun 2017 13:34:40 -0700 Subject: [PATCH 37/41] Expose :authority pseudo-header to Obj-C API. --- src/objective-c/GRPCClient/GRPCCall.h | 5 +++++ src/objective-c/GRPCClient/GRPCCall.m | 2 +- src/objective-c/GRPCClient/private/GRPCChannel.h | 1 + src/objective-c/GRPCClient/private/GRPCChannel.m | 5 ++++- src/objective-c/GRPCClient/private/GRPCHost.h | 1 + src/objective-c/GRPCClient/private/GRPCHost.m | 4 +++- src/objective-c/GRPCClient/private/GRPCWrappedCall.h | 1 + src/objective-c/GRPCClient/private/GRPCWrappedCall.m | 5 +++-- 8 files changed, 19 insertions(+), 5 deletions(-) diff --git a/src/objective-c/GRPCClient/GRPCCall.h b/src/objective-c/GRPCClient/GRPCCall.h index 5e9324c4456..b832d70d1a2 100644 --- a/src/objective-c/GRPCClient/GRPCCall.h +++ b/src/objective-c/GRPCClient/GRPCCall.h @@ -178,6 +178,11 @@ extern id const kGRPCTrailersKey; /** Represents a single gRPC remote call. */ @interface GRPCCall : GRXWriter +/** + * The server name for the RPC. If nil, the host name of the service object will be used instead. + */ +@property (atomic, readwrite) NSString *serverName; + /** * The container of the request headers of an RPC conforms to this protocol, which is a subset of * NSMutableDictionary's interface. It will become a NSMutableDictionary later on. diff --git a/src/objective-c/GRPCClient/GRPCCall.m b/src/objective-c/GRPCClient/GRPCCall.m index f9d13fea578..cb505bd82df 100644 --- a/src/objective-c/GRPCClient/GRPCCall.m +++ b/src/objective-c/GRPCClient/GRPCCall.m @@ -440,7 +440,7 @@ static NSMutableDictionary *callFlags; _responseWriteable = [[GRXConcurrentWriteable alloc] initWithWriteable:writeable dispatchQueue:_responseQueue]; - _wrappedCall = [[GRPCWrappedCall alloc] initWithHost:_host path:_path]; + _wrappedCall = [[GRPCWrappedCall alloc] initWithHost:_host serverName:_serverName path:_path]; NSAssert(_wrappedCall, @"Error allocating RPC objects. Low memory?"); [self sendHeaders:_requestHeaders]; diff --git a/src/objective-c/GRPCClient/private/GRPCChannel.h b/src/objective-c/GRPCClient/private/GRPCChannel.h index 5bada2dd50c..a7db8664736 100644 --- a/src/objective-c/GRPCClient/private/GRPCChannel.h +++ b/src/objective-c/GRPCClient/private/GRPCChannel.h @@ -77,5 +77,6 @@ struct grpc_channel_credentials; channelArgs:(nullable NSDictionary *)channelArgs; - (nullable grpc_call *)unmanagedCallWithPath:(nonnull NSString *)path + serverName:(nonnull NSString *)serverName completionQueue:(nonnull GRPCCompletionQueue *)queue; @end diff --git a/src/objective-c/GRPCClient/private/GRPCChannel.m b/src/objective-c/GRPCClient/private/GRPCChannel.m index fcfaa4a134d..845197126bc 100644 --- a/src/objective-c/GRPCClient/private/GRPCChannel.m +++ b/src/objective-c/GRPCClient/private/GRPCChannel.m @@ -196,14 +196,17 @@ static grpc_channel_args *BuildChannelArgs(NSDictionary *dictionary) { } - (grpc_call *)unmanagedCallWithPath:(NSString *)path + serverName:(NSString *)serverName completionQueue:(GRPCCompletionQueue *)queue { + grpc_slice host_slice = grpc_slice_from_copied_string(serverName.UTF8String); grpc_slice path_slice = grpc_slice_from_copied_string(path.UTF8String); grpc_call *call = grpc_channel_create_call(_unmanagedChannel, NULL, GRPC_PROPAGATE_DEFAULTS, queue.unmanagedQueue, path_slice, - NULL, // Passing NULL for host + &host_slice, gpr_inf_future(GPR_CLOCK_REALTIME), NULL); + grpc_slice_unref(host_slice); grpc_slice_unref(path_slice); return call; } diff --git a/src/objective-c/GRPCClient/private/GRPCHost.h b/src/objective-c/GRPCClient/private/GRPCHost.h index c8b5dd315b0..a157f69ec24 100644 --- a/src/objective-c/GRPCClient/private/GRPCHost.h +++ b/src/objective-c/GRPCClient/private/GRPCHost.h @@ -69,6 +69,7 @@ struct grpc_channel_credentials; /** Create a grpc_call object to the provided path on this host. */ - (nullable struct grpc_call *)unmanagedCallWithPath:(NSString *)path + serverName:(nullable NSString *)serverName completionQueue:(GRPCCompletionQueue *)queue; // TODO: There's a race when a new RPC is coming through just as an existing one is getting diff --git a/src/objective-c/GRPCClient/private/GRPCHost.m b/src/objective-c/GRPCClient/private/GRPCHost.m index 246af560cdb..da0b34e58a5 100644 --- a/src/objective-c/GRPCClient/private/GRPCHost.m +++ b/src/objective-c/GRPCClient/private/GRPCHost.m @@ -135,6 +135,7 @@ static GRPCConnectivityMonitor *connectivityMonitor = nil; } - (nullable grpc_call *)unmanagedCallWithPath:(NSString *)path + serverName:(nullable NSString *)serverName completionQueue:(GRPCCompletionQueue *)queue { GRPCChannel *channel; // This is racing -[GRPCHost disconnect]. @@ -144,7 +145,8 @@ static GRPCConnectivityMonitor *connectivityMonitor = nil; } channel = _channel; } - return [channel unmanagedCallWithPath:path completionQueue:queue]; + NSString *name = serverName ? serverName : _address; + return [channel unmanagedCallWithPath:path serverName:name completionQueue:queue]; } - (BOOL)setTLSPEMRootCerts:(nullable NSString *)pemRootCerts diff --git a/src/objective-c/GRPCClient/private/GRPCWrappedCall.h b/src/objective-c/GRPCClient/private/GRPCWrappedCall.h index 52233c82420..a7d7cd639df 100644 --- a/src/objective-c/GRPCClient/private/GRPCWrappedCall.h +++ b/src/objective-c/GRPCClient/private/GRPCWrappedCall.h @@ -90,6 +90,7 @@ @interface GRPCWrappedCall : NSObject - (instancetype)initWithHost:(NSString *)host + serverName:(nullable NSString *)serverName path:(NSString *)path NS_DESIGNATED_INITIALIZER; - (void)startBatchWithOperations:(NSArray *)ops errorHandler:(void(^)())errorHandler; diff --git a/src/objective-c/GRPCClient/private/GRPCWrappedCall.m b/src/objective-c/GRPCClient/private/GRPCWrappedCall.m index 06570c5ea22..645bbfc18d2 100644 --- a/src/objective-c/GRPCClient/private/GRPCWrappedCall.m +++ b/src/objective-c/GRPCClient/private/GRPCWrappedCall.m @@ -251,10 +251,11 @@ } - (instancetype)init { - return [self initWithHost:nil path:nil]; + return [self initWithHost:nil serverName:nil path:nil]; } - (instancetype)initWithHost:(NSString *)host + serverName:(nullable NSString *)serverName path:(NSString *)path { if (!path || !host) { [NSException raise:NSInvalidArgumentException @@ -267,7 +268,7 @@ // queue. Currently we use a singleton queue. _queue = [GRPCCompletionQueue completionQueue]; - _call = [[GRPCHost hostWithAddress:host] unmanagedCallWithPath:path completionQueue:_queue]; + _call = [[GRPCHost hostWithAddress:host] unmanagedCallWithPath:path serverName:serverName completionQueue:_queue]; if (_call == NULL) { return nil; } From 69776d4b3f5d65827633a706ae97690afc0bce41 Mon Sep 17 00:00:00 2001 From: thassss Date: Thu, 29 Jun 2017 09:23:23 -0700 Subject: [PATCH 38/41] Warn against using serverName property with Cronet --- src/objective-c/GRPCClient/GRPCCall.h | 1 + 1 file changed, 1 insertion(+) diff --git a/src/objective-c/GRPCClient/GRPCCall.h b/src/objective-c/GRPCClient/GRPCCall.h index b832d70d1a2..6056d4b629b 100644 --- a/src/objective-c/GRPCClient/GRPCCall.h +++ b/src/objective-c/GRPCClient/GRPCCall.h @@ -180,6 +180,7 @@ extern id const kGRPCTrailersKey; /** * The server name for the RPC. If nil, the host name of the service object will be used instead. + * This property must be nil when Cronet transport is enabled. */ @property (atomic, readwrite) NSString *serverName; From 0acc9e6b684807ce6c6fec98cb078f47ede2e421 Mon Sep 17 00:00:00 2001 From: thassss Date: Thu, 29 Jun 2017 09:24:16 -0700 Subject: [PATCH 39/41] Remove nullability specifiers --- src/objective-c/GRPCClient/private/GRPCHost.h | 2 +- src/objective-c/GRPCClient/private/GRPCHost.m | 2 +- src/objective-c/GRPCClient/private/GRPCWrappedCall.h | 2 +- src/objective-c/GRPCClient/private/GRPCWrappedCall.m | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/objective-c/GRPCClient/private/GRPCHost.h b/src/objective-c/GRPCClient/private/GRPCHost.h index a157f69ec24..1f74d5551ff 100644 --- a/src/objective-c/GRPCClient/private/GRPCHost.h +++ b/src/objective-c/GRPCClient/private/GRPCHost.h @@ -69,7 +69,7 @@ struct grpc_channel_credentials; /** Create a grpc_call object to the provided path on this host. */ - (nullable struct grpc_call *)unmanagedCallWithPath:(NSString *)path - serverName:(nullable NSString *)serverName + serverName:(NSString *)serverName completionQueue:(GRPCCompletionQueue *)queue; // TODO: There's a race when a new RPC is coming through just as an existing one is getting diff --git a/src/objective-c/GRPCClient/private/GRPCHost.m b/src/objective-c/GRPCClient/private/GRPCHost.m index da0b34e58a5..2d88de18c19 100644 --- a/src/objective-c/GRPCClient/private/GRPCHost.m +++ b/src/objective-c/GRPCClient/private/GRPCHost.m @@ -135,7 +135,7 @@ static GRPCConnectivityMonitor *connectivityMonitor = nil; } - (nullable grpc_call *)unmanagedCallWithPath:(NSString *)path - serverName:(nullable NSString *)serverName + serverName:(NSString *)serverName completionQueue:(GRPCCompletionQueue *)queue { GRPCChannel *channel; // This is racing -[GRPCHost disconnect]. diff --git a/src/objective-c/GRPCClient/private/GRPCWrappedCall.h b/src/objective-c/GRPCClient/private/GRPCWrappedCall.h index a7d7cd639df..171d2ec6127 100644 --- a/src/objective-c/GRPCClient/private/GRPCWrappedCall.h +++ b/src/objective-c/GRPCClient/private/GRPCWrappedCall.h @@ -90,7 +90,7 @@ @interface GRPCWrappedCall : NSObject - (instancetype)initWithHost:(NSString *)host - serverName:(nullable NSString *)serverName + serverName:(NSString *)serverName path:(NSString *)path NS_DESIGNATED_INITIALIZER; - (void)startBatchWithOperations:(NSArray *)ops errorHandler:(void(^)())errorHandler; diff --git a/src/objective-c/GRPCClient/private/GRPCWrappedCall.m b/src/objective-c/GRPCClient/private/GRPCWrappedCall.m index 645bbfc18d2..dcdc5c55b13 100644 --- a/src/objective-c/GRPCClient/private/GRPCWrappedCall.m +++ b/src/objective-c/GRPCClient/private/GRPCWrappedCall.m @@ -255,7 +255,7 @@ } - (instancetype)initWithHost:(NSString *)host - serverName:(nullable NSString *)serverName + serverName:(NSString *)serverName path:(NSString *)path { if (!path || !host) { [NSException raise:NSInvalidArgumentException From 97efa02a1269abf346addf50eb9b9eb360d90685 Mon Sep 17 00:00:00 2001 From: thassss Date: Thu, 29 Jun 2017 09:26:20 -0700 Subject: [PATCH 40/41] Pass NULL to grpc_channel_create_call for nil serverName --- src/objective-c/GRPCClient/private/GRPCChannel.m | 11 ++++++++--- src/objective-c/GRPCClient/private/GRPCHost.m | 3 +-- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/src/objective-c/GRPCClient/private/GRPCChannel.m b/src/objective-c/GRPCClient/private/GRPCChannel.m index 845197126bc..d37c2b2a4e3 100644 --- a/src/objective-c/GRPCClient/private/GRPCChannel.m +++ b/src/objective-c/GRPCClient/private/GRPCChannel.m @@ -198,15 +198,20 @@ static grpc_channel_args *BuildChannelArgs(NSDictionary *dictionary) { - (grpc_call *)unmanagedCallWithPath:(NSString *)path serverName:(NSString *)serverName completionQueue:(GRPCCompletionQueue *)queue { - grpc_slice host_slice = grpc_slice_from_copied_string(serverName.UTF8String); + grpc_slice host_slice; + if (serverName) { + host_slice = grpc_slice_from_copied_string(serverName.UTF8String); + } grpc_slice path_slice = grpc_slice_from_copied_string(path.UTF8String); grpc_call *call = grpc_channel_create_call(_unmanagedChannel, NULL, GRPC_PROPAGATE_DEFAULTS, queue.unmanagedQueue, path_slice, - &host_slice, + serverName ? &host_slice : NULL, gpr_inf_future(GPR_CLOCK_REALTIME), NULL); - grpc_slice_unref(host_slice); + if (serverName) { + grpc_slice_unref(host_slice); + } grpc_slice_unref(path_slice); return call; } diff --git a/src/objective-c/GRPCClient/private/GRPCHost.m b/src/objective-c/GRPCClient/private/GRPCHost.m index 2d88de18c19..13a9af9af5b 100644 --- a/src/objective-c/GRPCClient/private/GRPCHost.m +++ b/src/objective-c/GRPCClient/private/GRPCHost.m @@ -145,8 +145,7 @@ static GRPCConnectivityMonitor *connectivityMonitor = nil; } channel = _channel; } - NSString *name = serverName ? serverName : _address; - return [channel unmanagedCallWithPath:path serverName:name completionQueue:queue]; + return [channel unmanagedCallWithPath:path serverName:serverName completionQueue:queue]; } - (BOOL)setTLSPEMRootCerts:(nullable NSString *)pemRootCerts From 42301bfd0ea6f8881ca21c2e06a4474822f0f043 Mon Sep 17 00:00:00 2001 From: thassss Date: Thu, 29 Jun 2017 16:23:08 -0700 Subject: [PATCH 41/41] Update serverName comment to be more accurate --- src/objective-c/GRPCClient/GRPCCall.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/objective-c/GRPCClient/GRPCCall.h b/src/objective-c/GRPCClient/GRPCCall.h index 6056d4b629b..9a5a2a44d2b 100644 --- a/src/objective-c/GRPCClient/GRPCCall.h +++ b/src/objective-c/GRPCClient/GRPCCall.h @@ -179,8 +179,8 @@ extern id const kGRPCTrailersKey; @interface GRPCCall : GRXWriter /** - * The server name for the RPC. If nil, the host name of the service object will be used instead. - * This property must be nil when Cronet transport is enabled. + * The authority for the RPC. If nil, the default authority will be used. This property must be nil + * when Cronet transport is enabled. */ @property (atomic, readwrite) NSString *serverName;