Clang format

pull/18881/head
Na-Na Pang 6 years ago
parent 4198c4fcc6
commit c905f76a5b
  1. 2
      src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_libuv.cc
  2. 2
      src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_libuv.cc
  3. 2
      src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_libuv_windows.cc
  4. 2
      src/python/grpcio_tests/tests/interop/service.py
  5. 3
      test/cpp/microbenchmarks/bm_callback_cq.cc
  6. 4
      test/cpp/microbenchmarks/bm_callback_streaming_ping_pong.cc
  7. 4
      test/cpp/microbenchmarks/bm_callback_unary_ping_pong.cc
  8. 21
      test/cpp/microbenchmarks/callback_streaming_ping_pong.h
  9. 11
      test/cpp/microbenchmarks/callback_test_service.cc
  10. 75
      test/cpp/microbenchmarks/callback_test_service.h
  11. 2
      test/cpp/microbenchmarks/callback_unary_ping_pong.h

@ -176,4 +176,4 @@ UniquePtr<GrpcPolledFdFactory> NewGrpcPolledFdFactory(grpc_combiner* combiner) {
} // namespace grpc_core
#endif /* GRPC_ARES == 1 && defined(GRPC_UV) */
#endif /* GRPC_ARES == 1 && defined(GRPC_UV) */

@ -49,4 +49,4 @@ bool grpc_ares_maybe_resolve_localhost_manually_locked(
return out;
}
#endif /* GRPC_ARES == 1 && defined(GRPC_UV) */
#endif /* GRPC_ARES == 1 && defined(GRPC_UV) */

@ -80,4 +80,4 @@ bool inner_maybe_resolve_localhost_manually_locked(
return false;
}
#endif /* GRPC_ARES == 1 && (defined(GRPC_UV) || defined(GPR_WINDOWS)) */
#endif /* GRPC_ARES == 1 && (defined(GRPC_UV) || defined(GPR_WINDOWS)) */

@ -94,4 +94,4 @@ class TestService(test_pb2_grpc.TestServiceServicer):
# NOTE(nathaniel): Apparently this is the same as the full-duplex call?
# NOTE(atash): It isn't even called in the interop spec (Oct 22 2015)...
def HalfDuplexCall(self, request_iterator, context):
return self.FullDuplexCall(request_iterator, context)
return self.FullDuplexCall(request_iterator, context)

@ -35,8 +35,6 @@
namespace grpc {
namespace testing {
auto& force_library_initialization = Library::get();
class TagCallback : public grpc_experimental_completion_queue_functor {
public:
TagCallback(int* counter, int tag) : counter_(counter), tag_(tag) {
@ -220,6 +218,7 @@ void RunTheBenchmarksNamespaced() { RunSpecifiedBenchmarks(); }
} // namespace benchmark
int main(int argc, char** argv) {
LibraryInitializer libInit;
::benchmark::Initialize(&argc, argv);
::grpc::testing::InitTest(&argc, &argv, false);
benchmark::RunTheBenchmarksNamespaced();

@ -22,9 +22,6 @@
namespace grpc {
namespace testing {
// force library initialization
auto& force_library_initialization = Library::get();
/*******************************************************************************
* CONFIGURATIONS
*/
@ -131,6 +128,7 @@ void RunTheBenchmarksNamespaced() { RunSpecifiedBenchmarks(); }
} // namespace benchmark
int main(int argc, char** argv) {
LibraryInitializer libInit;
::benchmark::Initialize(&argc, argv);
::grpc::testing::InitTest(&argc, &argv, false);
benchmark::RunTheBenchmarksNamespaced();

@ -22,9 +22,6 @@
namespace grpc {
namespace testing {
// force library initialization
auto& force_library_initialization = Library::get();
/*******************************************************************************
* CONFIGURATIONS
*/
@ -113,6 +110,7 @@ void RunTheBenchmarksNamespaced() { RunSpecifiedBenchmarks(); }
} // namespace benchmark
int main(int argc, char** argv) {
LibraryInitializer libInit;
::benchmark::Initialize(&argc, argv);
::grpc::testing::InitTest(&argc, &argv, false);
benchmark::RunTheBenchmarksNamespaced();

@ -44,19 +44,22 @@ static void BM_CallbackBidiStreaming(benchmark::State& state) {
EchoTestService::NewStub(fixture->channel()));
EchoRequest request;
EchoResponse response;
if (state.range(0) > 0) {
request.set_message(std::string(state.range(0), 'a'));
if (message_size > 0) {
request.set_message(std::string(message_size, 'a'));
} else {
request.set_message("");
}
while (state.KeepRunning()) {
if (state.KeepRunning()) {
GPR_TIMER_SCOPE("BenchmarkCycle", 0);
ClientContext cli_ctx;
cli_ctx.AddMetadata(kServerFinishAfterNReads,
grpc::to_string(max_ping_pongs));
cli_ctx.AddMetadata(kServerMessageSize, grpc::to_string(message_size));
BidiClient test{stub_.get(), &request, &response, &cli_ctx, max_ping_pongs};
test.Await();
std::mutex mu;
std::condition_variable cv;
bool done = false;
gpr_log(GPR_INFO, "big enter");
BidiClient* test =
new BidiClient(state, stub_.get(), &request, &response, mu, cv, done);
test->StartNewRpc();
test->Await();
gpr_log(GPR_INFO, "big exit");
}
fixture->Finish(state);
fixture.reset();

@ -71,6 +71,7 @@ CallbackStreamingTestService::BidiStream() {
kServerFinishAfterNReads, context->client_metadata(), 0);
message_size_ = GetIntValueFromMetadata(kServerMessageSize,
context->client_metadata(), 0);
gpr_log(GPR_INFO, "server enter n reads %d", server_write_last_);
StartRead(&request_);
}
void OnDone() override {
@ -83,21 +84,25 @@ CallbackStreamingTestService::BidiStream() {
return;
}
num_msgs_read_++;
gpr_log(GPR_INFO, "server read %d", num_msgs_read_);
if (message_size_ > 0) {
response_.set_message(std::string(message_size_, 'a'));
} else {
response_.set_message("");
}
if (num_msgs_read_ == server_write_last_) {
StartWriteLast(&response_, WriteOptions());
} else {
if (num_msgs_read_ < server_write_last_) {
gpr_log(GPR_INFO, "server start write %d", num_msgs_read_);
StartWrite(&response_);
} else {
gpr_log(GPR_INFO, "server last write %d", num_msgs_read_);
StartWriteLast(&response_, WriteOptions());
}
}
void OnWriteDone(bool ok) override {
if (!ok) {
return;
}
gpr_log(GPR_INFO, "server write %d", num_msgs_read_);
if (num_msgs_read_ < server_write_last_) {
StartRead(&request_);
} else {

@ -19,6 +19,7 @@
#ifndef TEST_CPP_MICROBENCHMARKS_CALLBACK_TEST_SERVICE_H
#define TEST_CPP_MICROBENCHMARKS_CALLBACK_TEST_SERVICE_H
#include <benchmark/benchmark.h>
#include <condition_variable>
#include <memory>
#include <mutex>
@ -47,24 +48,30 @@ class CallbackStreamingTestService
class BidiClient
: public grpc::experimental::ClientBidiReactor<EchoRequest, EchoResponse> {
public:
BidiClient(EchoTestService::Stub* stub, EchoRequest* request,
EchoResponse* response, ClientContext* context,
int num_msgs_to_send)
: request_{request},
BidiClient(benchmark::State& state, EchoTestService::Stub* stub,
EchoRequest* request, EchoResponse* response, std::mutex& mu,
std::condition_variable& cv, bool& done)
: state_{state},
stub_{stub},
request_{request},
response_{response},
context_{context},
msgs_to_send_{num_msgs_to_send} {
stub->experimental_async()->BidiStream(context_, this);
MaybeWrite();
StartCall();
mu_{mu},
cv_{cv},
done_(done) {
gpr_log(GPR_INFO, "client enter");
msgs_size_ = state.range(0);
msgs_to_send_ = state.range(1);
cli_ctx_ = new ClientContext();
cli_ctx_->AddMetadata(kServerFinishAfterNReads,
grpc::to_string(msgs_to_send_));
cli_ctx_->AddMetadata(kServerMessageSize, grpc::to_string(msgs_size_));
}
void OnReadDone(bool ok) override {
if (!ok) {
return;
}
if (ok && reads_complete_ < msgs_to_send_) {
reads_complete_++;
if (writes_complete_ < msgs_to_send_) {
MaybeWrite();
}
}
@ -79,9 +86,19 @@ class BidiClient
void OnDone(const Status& s) override {
GPR_ASSERT(s.ok());
std::unique_lock<std::mutex> l(mu_);
done_ = true;
cv_.notify_one();
if (state_.KeepRunning()) {
count++;
gpr_log(GPR_INFO, "client start %d rpc", count);
BidiClient* test =
new BidiClient(state_, stub_, request_, response_, mu_, cv_, done_);
test->StartNewRpc();
} else {
gpr_log(GPR_INFO, "client done");
std::unique_lock<std::mutex> l(mu_);
done_ = true;
cv_.notify_one();
}
delete cli_ctx_;
}
void Await() {
@ -91,24 +108,36 @@ class BidiClient
}
}
void StartNewRpc() {
gpr_log(GPR_INFO, "%d rpc start", count);
stub_->experimental_async()->BidiStream(cli_ctx_, this);
gpr_log(GPR_INFO, "%d write start", count);
MaybeWrite();
StartCall();
gpr_log(GPR_INFO, "%d call start", count);
}
private:
void MaybeWrite() {
if (writes_complete_ == msgs_to_send_) {
StartWritesDone();
} else {
if (writes_complete_ < msgs_to_send_) {
StartWrite(request_);
} else {
StartWritesDone();
}
}
ClientContext* cli_ctx_;
benchmark::State& state_;
EchoTestService::Stub* stub_;
EchoRequest* request_;
EchoResponse* response_;
ClientContext* context_;
int reads_complete_{0};
int writes_complete_{0};
const int msgs_to_send_;
std::mutex mu_;
std::condition_variable cv_;
bool done_ = false;
int msgs_to_send_;
int msgs_size_;
int count{0};
std::mutex& mu_;
std::condition_variable& cv_;
bool& done_;
};
} // namespace testing

@ -58,7 +58,7 @@ void SendCallbackUnaryPingPong(benchmark::State& state, EchoRequest* request,
}
delete cli_ctx;
});
}
};
template <class Fixture, class ClientContextMutator, class ServerContextMutator>
static void BM_CallbackUnaryPingPong(benchmark::State& state) {

Loading…
Cancel
Save