Clang format

pull/18881/head
Na-Na Pang 6 years ago
parent 4198c4fcc6
commit c905f76a5b
  1. 2
      src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_libuv.cc
  2. 2
      src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_libuv.cc
  3. 2
      src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_libuv_windows.cc
  4. 2
      src/python/grpcio_tests/tests/interop/service.py
  5. 3
      test/cpp/microbenchmarks/bm_callback_cq.cc
  6. 4
      test/cpp/microbenchmarks/bm_callback_streaming_ping_pong.cc
  7. 4
      test/cpp/microbenchmarks/bm_callback_unary_ping_pong.cc
  8. 21
      test/cpp/microbenchmarks/callback_streaming_ping_pong.h
  9. 11
      test/cpp/microbenchmarks/callback_test_service.cc
  10. 75
      test/cpp/microbenchmarks/callback_test_service.h
  11. 2
      test/cpp/microbenchmarks/callback_unary_ping_pong.h

@ -176,4 +176,4 @@ UniquePtr<GrpcPolledFdFactory> NewGrpcPolledFdFactory(grpc_combiner* combiner) {
} // namespace grpc_core } // namespace grpc_core
#endif /* GRPC_ARES == 1 && defined(GRPC_UV) */ #endif /* GRPC_ARES == 1 && defined(GRPC_UV) */

@ -49,4 +49,4 @@ bool grpc_ares_maybe_resolve_localhost_manually_locked(
return out; return out;
} }
#endif /* GRPC_ARES == 1 && defined(GRPC_UV) */ #endif /* GRPC_ARES == 1 && defined(GRPC_UV) */

@ -80,4 +80,4 @@ bool inner_maybe_resolve_localhost_manually_locked(
return false; return false;
} }
#endif /* GRPC_ARES == 1 && (defined(GRPC_UV) || defined(GPR_WINDOWS)) */ #endif /* GRPC_ARES == 1 && (defined(GRPC_UV) || defined(GPR_WINDOWS)) */

@ -94,4 +94,4 @@ class TestService(test_pb2_grpc.TestServiceServicer):
# NOTE(nathaniel): Apparently this is the same as the full-duplex call? # NOTE(nathaniel): Apparently this is the same as the full-duplex call?
# NOTE(atash): It isn't even called in the interop spec (Oct 22 2015)... # NOTE(atash): It isn't even called in the interop spec (Oct 22 2015)...
def HalfDuplexCall(self, request_iterator, context): def HalfDuplexCall(self, request_iterator, context):
return self.FullDuplexCall(request_iterator, context) return self.FullDuplexCall(request_iterator, context)

@ -35,8 +35,6 @@
namespace grpc { namespace grpc {
namespace testing { namespace testing {
auto& force_library_initialization = Library::get();
class TagCallback : public grpc_experimental_completion_queue_functor { class TagCallback : public grpc_experimental_completion_queue_functor {
public: public:
TagCallback(int* counter, int tag) : counter_(counter), tag_(tag) { TagCallback(int* counter, int tag) : counter_(counter), tag_(tag) {
@ -220,6 +218,7 @@ void RunTheBenchmarksNamespaced() { RunSpecifiedBenchmarks(); }
} // namespace benchmark } // namespace benchmark
int main(int argc, char** argv) { int main(int argc, char** argv) {
LibraryInitializer libInit;
::benchmark::Initialize(&argc, argv); ::benchmark::Initialize(&argc, argv);
::grpc::testing::InitTest(&argc, &argv, false); ::grpc::testing::InitTest(&argc, &argv, false);
benchmark::RunTheBenchmarksNamespaced(); benchmark::RunTheBenchmarksNamespaced();

@ -22,9 +22,6 @@
namespace grpc { namespace grpc {
namespace testing { namespace testing {
// force library initialization
auto& force_library_initialization = Library::get();
/******************************************************************************* /*******************************************************************************
* CONFIGURATIONS * CONFIGURATIONS
*/ */
@ -131,6 +128,7 @@ void RunTheBenchmarksNamespaced() { RunSpecifiedBenchmarks(); }
} // namespace benchmark } // namespace benchmark
int main(int argc, char** argv) { int main(int argc, char** argv) {
LibraryInitializer libInit;
::benchmark::Initialize(&argc, argv); ::benchmark::Initialize(&argc, argv);
::grpc::testing::InitTest(&argc, &argv, false); ::grpc::testing::InitTest(&argc, &argv, false);
benchmark::RunTheBenchmarksNamespaced(); benchmark::RunTheBenchmarksNamespaced();

@ -22,9 +22,6 @@
namespace grpc { namespace grpc {
namespace testing { namespace testing {
// force library initialization
auto& force_library_initialization = Library::get();
/******************************************************************************* /*******************************************************************************
* CONFIGURATIONS * CONFIGURATIONS
*/ */
@ -113,6 +110,7 @@ void RunTheBenchmarksNamespaced() { RunSpecifiedBenchmarks(); }
} // namespace benchmark } // namespace benchmark
int main(int argc, char** argv) { int main(int argc, char** argv) {
LibraryInitializer libInit;
::benchmark::Initialize(&argc, argv); ::benchmark::Initialize(&argc, argv);
::grpc::testing::InitTest(&argc, &argv, false); ::grpc::testing::InitTest(&argc, &argv, false);
benchmark::RunTheBenchmarksNamespaced(); benchmark::RunTheBenchmarksNamespaced();

@ -44,19 +44,22 @@ static void BM_CallbackBidiStreaming(benchmark::State& state) {
EchoTestService::NewStub(fixture->channel())); EchoTestService::NewStub(fixture->channel()));
EchoRequest request; EchoRequest request;
EchoResponse response; EchoResponse response;
if (state.range(0) > 0) { if (message_size > 0) {
request.set_message(std::string(state.range(0), 'a')); request.set_message(std::string(message_size, 'a'));
} else { } else {
request.set_message(""); request.set_message("");
} }
while (state.KeepRunning()) { if (state.KeepRunning()) {
GPR_TIMER_SCOPE("BenchmarkCycle", 0); GPR_TIMER_SCOPE("BenchmarkCycle", 0);
ClientContext cli_ctx; std::mutex mu;
cli_ctx.AddMetadata(kServerFinishAfterNReads, std::condition_variable cv;
grpc::to_string(max_ping_pongs)); bool done = false;
cli_ctx.AddMetadata(kServerMessageSize, grpc::to_string(message_size)); gpr_log(GPR_INFO, "big enter");
BidiClient test{stub_.get(), &request, &response, &cli_ctx, max_ping_pongs}; BidiClient* test =
test.Await(); new BidiClient(state, stub_.get(), &request, &response, mu, cv, done);
test->StartNewRpc();
test->Await();
gpr_log(GPR_INFO, "big exit");
} }
fixture->Finish(state); fixture->Finish(state);
fixture.reset(); fixture.reset();

@ -71,6 +71,7 @@ CallbackStreamingTestService::BidiStream() {
kServerFinishAfterNReads, context->client_metadata(), 0); kServerFinishAfterNReads, context->client_metadata(), 0);
message_size_ = GetIntValueFromMetadata(kServerMessageSize, message_size_ = GetIntValueFromMetadata(kServerMessageSize,
context->client_metadata(), 0); context->client_metadata(), 0);
gpr_log(GPR_INFO, "server enter n reads %d", server_write_last_);
StartRead(&request_); StartRead(&request_);
} }
void OnDone() override { void OnDone() override {
@ -83,21 +84,25 @@ CallbackStreamingTestService::BidiStream() {
return; return;
} }
num_msgs_read_++; num_msgs_read_++;
gpr_log(GPR_INFO, "server read %d", num_msgs_read_);
if (message_size_ > 0) { if (message_size_ > 0) {
response_.set_message(std::string(message_size_, 'a')); response_.set_message(std::string(message_size_, 'a'));
} else { } else {
response_.set_message(""); response_.set_message("");
} }
if (num_msgs_read_ == server_write_last_) { if (num_msgs_read_ < server_write_last_) {
StartWriteLast(&response_, WriteOptions()); gpr_log(GPR_INFO, "server start write %d", num_msgs_read_);
} else {
StartWrite(&response_); StartWrite(&response_);
} else {
gpr_log(GPR_INFO, "server last write %d", num_msgs_read_);
StartWriteLast(&response_, WriteOptions());
} }
} }
void OnWriteDone(bool ok) override { void OnWriteDone(bool ok) override {
if (!ok) { if (!ok) {
return; return;
} }
gpr_log(GPR_INFO, "server write %d", num_msgs_read_);
if (num_msgs_read_ < server_write_last_) { if (num_msgs_read_ < server_write_last_) {
StartRead(&request_); StartRead(&request_);
} else { } else {

@ -19,6 +19,7 @@
#ifndef TEST_CPP_MICROBENCHMARKS_CALLBACK_TEST_SERVICE_H #ifndef TEST_CPP_MICROBENCHMARKS_CALLBACK_TEST_SERVICE_H
#define TEST_CPP_MICROBENCHMARKS_CALLBACK_TEST_SERVICE_H #define TEST_CPP_MICROBENCHMARKS_CALLBACK_TEST_SERVICE_H
#include <benchmark/benchmark.h>
#include <condition_variable> #include <condition_variable>
#include <memory> #include <memory>
#include <mutex> #include <mutex>
@ -47,24 +48,30 @@ class CallbackStreamingTestService
class BidiClient class BidiClient
: public grpc::experimental::ClientBidiReactor<EchoRequest, EchoResponse> { : public grpc::experimental::ClientBidiReactor<EchoRequest, EchoResponse> {
public: public:
BidiClient(EchoTestService::Stub* stub, EchoRequest* request, BidiClient(benchmark::State& state, EchoTestService::Stub* stub,
EchoResponse* response, ClientContext* context, EchoRequest* request, EchoResponse* response, std::mutex& mu,
int num_msgs_to_send) std::condition_variable& cv, bool& done)
: request_{request}, : state_{state},
stub_{stub},
request_{request},
response_{response}, response_{response},
context_{context}, mu_{mu},
msgs_to_send_{num_msgs_to_send} { cv_{cv},
stub->experimental_async()->BidiStream(context_, this); done_(done) {
MaybeWrite(); gpr_log(GPR_INFO, "client enter");
StartCall(); msgs_size_ = state.range(0);
msgs_to_send_ = state.range(1);
cli_ctx_ = new ClientContext();
cli_ctx_->AddMetadata(kServerFinishAfterNReads,
grpc::to_string(msgs_to_send_));
cli_ctx_->AddMetadata(kServerMessageSize, grpc::to_string(msgs_size_));
} }
void OnReadDone(bool ok) override { void OnReadDone(bool ok) override {
if (!ok) { if (!ok) {
return; return;
} }
if (ok && reads_complete_ < msgs_to_send_) { if (writes_complete_ < msgs_to_send_) {
reads_complete_++;
MaybeWrite(); MaybeWrite();
} }
} }
@ -79,9 +86,19 @@ class BidiClient
void OnDone(const Status& s) override { void OnDone(const Status& s) override {
GPR_ASSERT(s.ok()); GPR_ASSERT(s.ok());
std::unique_lock<std::mutex> l(mu_); if (state_.KeepRunning()) {
done_ = true; count++;
cv_.notify_one(); gpr_log(GPR_INFO, "client start %d rpc", count);
BidiClient* test =
new BidiClient(state_, stub_, request_, response_, mu_, cv_, done_);
test->StartNewRpc();
} else {
gpr_log(GPR_INFO, "client done");
std::unique_lock<std::mutex> l(mu_);
done_ = true;
cv_.notify_one();
}
delete cli_ctx_;
} }
void Await() { void Await() {
@ -91,24 +108,36 @@ class BidiClient
} }
} }
void StartNewRpc() {
gpr_log(GPR_INFO, "%d rpc start", count);
stub_->experimental_async()->BidiStream(cli_ctx_, this);
gpr_log(GPR_INFO, "%d write start", count);
MaybeWrite();
StartCall();
gpr_log(GPR_INFO, "%d call start", count);
}
private: private:
void MaybeWrite() { void MaybeWrite() {
if (writes_complete_ == msgs_to_send_) { if (writes_complete_ < msgs_to_send_) {
StartWritesDone();
} else {
StartWrite(request_); StartWrite(request_);
} else {
StartWritesDone();
} }
} }
ClientContext* cli_ctx_;
benchmark::State& state_;
EchoTestService::Stub* stub_;
EchoRequest* request_; EchoRequest* request_;
EchoResponse* response_; EchoResponse* response_;
ClientContext* context_;
int reads_complete_{0};
int writes_complete_{0}; int writes_complete_{0};
const int msgs_to_send_; int msgs_to_send_;
std::mutex mu_; int msgs_size_;
std::condition_variable cv_; int count{0};
bool done_ = false; std::mutex& mu_;
std::condition_variable& cv_;
bool& done_;
}; };
} // namespace testing } // namespace testing

@ -58,7 +58,7 @@ void SendCallbackUnaryPingPong(benchmark::State& state, EchoRequest* request,
} }
delete cli_ctx; delete cli_ctx;
}); });
} };
template <class Fixture, class ClientContextMutator, class ServerContextMutator> template <class Fixture, class ClientContextMutator, class ServerContextMutator>
static void BM_CallbackUnaryPingPong(benchmark::State& state) { static void BM_CallbackUnaryPingPong(benchmark::State& state) {

Loading…
Cancel
Save