Support for open-loop sync test

pull/1380/head
Vijay Pai 10 years ago
parent e23aba4884
commit 105ff2eb2e
  1. 57
      test/cpp/qps/client_sync.cc
  2. 37
      test/cpp/qps/qpstest.proto

@ -32,6 +32,7 @@
*/
#include <cassert>
#include <chrono>
#include <memory>
#include <mutex>
#include <string>
@ -57,6 +58,7 @@
#include "test/cpp/qps/client.h"
#include "test/cpp/qps/qpstest.grpc.pb.h"
#include "test/cpp/qps/histogram.h"
#include "test/cpp/qps/interarrival.h"
#include "test/cpp/qps/timer.h"
namespace grpc {
@ -64,17 +66,66 @@ namespace testing {
class SynchronousClient : public Client {
public:
SynchronousClient(const ClientConfig& config) : Client(config) {
SynchronousClient(const ClientConfig& config) : Client(config),
interarrival_timer_() {
num_threads_ =
config.outstanding_rpcs_per_channel() * config.client_channels();
responses_.resize(num_threads_);
// Now sort out the load test type
if (config.load_type() == CLOSED_LOOP) {
closed_loop_ = true;
}
else {
closed_loop_ = false;
std::unique_ptr<RandomDist> random_dist;
auto& load = config.load_parameters();
switch (config.load_type()) {
case POISSON:
random_dist.reset
(new ExpDist(load.poisson().offered_load()/num_threads_));
case UNIFORM:
random_dist.reset
(new UniformDist(load.uniform().interarrival_lo()*num_threads_,
load.uniform().interarrival_hi()*num_threads_));
case DETERMINISTIC:
random_dist.reset
(new DetDist(num_threads_/load.determ().offered_load()));
case PARETO:
random_dist.reset
(new ParetoDist(load.pareto().interarrival_base()*num_threads_,
load.pareto().alpha()));
default:
GPR_ASSERT(false);
break;
}
interarrival_timer_.init(*random_dist, num_threads_);
for (size_t i = 0; i<num_threads_; i++) {
next_time_.push_back(std::chrono::high_resolution_clock::now()
+ interarrival_timer_(i));
}
}
}
virtual ~SynchronousClient() { EndThreads(); }
protected:
void WaitToIssue(int thread_idx) {
if (!closed_loop_) {
std::this_thread::sleep_until(next_time_[thread_idx]);
next_time_[thread_idx] += interarrival_timer_(thread_idx);
}
}
size_t num_threads_;
std::vector<SimpleResponse> responses_;
private:
bool closed_loop_;
InterarrivalTimer interarrival_timer_;
std::vector<std::chrono::time_point
<std::chrono::high_resolution_clock>> next_time_;
};
class SynchronousUnaryClient GRPC_FINAL : public SynchronousClient {
@ -82,8 +133,9 @@ class SynchronousUnaryClient GRPC_FINAL : public SynchronousClient {
SynchronousUnaryClient(const ClientConfig& config):
SynchronousClient(config) {StartThreads(num_threads_);}
~SynchronousUnaryClient() {}
bool ThreadFunc(Histogram* histogram, size_t thread_idx) GRPC_OVERRIDE {
WaitToIssue(thread_idx);
auto* stub = channels_[thread_idx % channels_.size()].get_stub();
double start = Timer::Now();
grpc::ClientContext context;
@ -113,6 +165,7 @@ class SynchronousStreamingClient GRPC_FINAL : public SynchronousClient {
}
bool ThreadFunc(Histogram* histogram, size_t thread_idx) GRPC_OVERRIDE {
WaitToIssue(thread_idx);
double start = Timer::Now();
if (stream_->Write(request_) && stream_->Read(&responses_[thread_idx])) {
histogram->Add((Timer::Now() - start) * 1e9);

@ -92,6 +92,41 @@ enum RpcType {
STREAMING = 2;
}
enum LoadType {
CLOSED_LOOP = 1;
POISSON = 2;
UNIFORM = 3;
DETERMINISTIC = 4;
PARETO = 5;
}
message PoissonParameters {
optional double offered_load = 1;
}
message UniformParameters {
optional double interarrival_lo = 1;
optional double interarrival_hi = 2;
}
message DeterministicParameters {
optional double offered_load = 1;
}
message ParetoParameters {
optional double interarrival_base = 1;
optional double alpha = 2;
}
message LoadParameters {
oneof load {
PoissonParameters poisson = 1;
UniformParameters uniform = 2;
DeterministicParameters determ = 3;
ParetoParameters pareto = 4;
};
}
message ClientConfig {
repeated string server_targets = 1;
required ClientType client_type = 2;
@ -102,6 +137,8 @@ message ClientConfig {
// only for async client:
optional int32 async_client_threads = 7;
optional RpcType rpc_type = 8 [default=UNARY];
optional LoadType load_type = 9 [default=CLOSED_LOOP];
optional LoadParameters load_parameters = 10;
}
// Request current stats

Loading…
Cancel
Save