populate ScenarioResult.summary in JSON report

pull/6192/head
Jan Tattermusch 9 years ago
parent ac4251aa0e
commit 33c161dffa
  1. 43
      test/cpp/qps/driver.cc
  2. 44
      test/cpp/qps/report.cc

@ -52,6 +52,7 @@
#include "test/cpp/qps/driver.h"
#include "test/cpp/qps/histogram.h"
#include "test/cpp/qps/qps_worker.h"
#include "test/cpp/qps/stats.h"
using std::list;
using std::thread;
@ -115,6 +116,46 @@ static deque<string> get_workers(const string& name) {
}
}
// helpers for postprocess_scenario_result
static double WallTime(ClientStats s) { return s.time_elapsed(); }
static double SystemTime(ClientStats s) { return s.time_system(); }
static double UserTime(ClientStats s) { return s.time_user(); }
static double ServerWallTime(ServerStats s) { return s.time_elapsed(); }
static double ServerSystemTime(ServerStats s) { return s.time_system(); }
static double ServerUserTime(ServerStats s) { return s.time_user(); }
static int Cores(int n) { return n; }
// Postprocess ScenarioResult and populate result summary.
static void postprocess_scenario_result(ScenarioResult* result) {
Histogram histogram;
histogram.MergeProto(result->latencies());
auto qps = histogram.Count() / average(result->client_stats(), WallTime);
auto qps_per_server_core = qps / sum(result->server_cores(), Cores);
result->mutable_summary()->set_qps(qps);
result->mutable_summary()->set_qps_per_server_core(qps_per_server_core);
result->mutable_summary()->set_latency_50(histogram.Percentile(50));
result->mutable_summary()->set_latency_90(histogram.Percentile(90));
result->mutable_summary()->set_latency_95(histogram.Percentile(95));
result->mutable_summary()->set_latency_99(histogram.Percentile(99));
result->mutable_summary()->set_latency_999(histogram.Percentile(99.9));
auto server_system_time = 100.0 * sum(result->server_stats(), ServerSystemTime) /
sum(result->server_stats(), ServerWallTime);
auto server_user_time = 100.0 * sum(result->server_stats(), ServerUserTime) /
sum(result->server_stats(), ServerWallTime);
auto client_system_time = 100.0 * sum(result->client_stats(), SystemTime) /
sum(result->client_stats(), WallTime);
auto client_user_time = 100.0 * sum(result->client_stats(), UserTime) /
sum(result->client_stats(), WallTime);
result->mutable_summary()->set_server_system_time(server_system_time);
result->mutable_summary()->set_server_user_time(server_user_time);
result->mutable_summary()->set_client_system_time(client_system_time);
result->mutable_summary()->set_client_user_time(client_user_time);
}
// Namespace for classes and functions used only in RunScenario
// Using this rather than local definitions to workaround gcc-4.4 limitations
// regarding using templates without linkage
@ -380,6 +421,8 @@ std::unique_ptr<ScenarioResult> RunScenario(
}
delete[] servers;
postprocess_scenario_result(result.get());
return result;
}

@ -45,14 +45,6 @@
namespace grpc {
namespace testing {
static double WallTime(ClientStats s) { return s.time_elapsed(); }
static double SystemTime(ClientStats s) { return s.time_system(); }
static double UserTime(ClientStats s) { return s.time_user(); }
static double ServerWallTime(ServerStats s) { return s.time_elapsed(); }
static double ServerSystemTime(ServerStats s) { return s.time_system(); }
static double ServerUserTime(ServerStats s) { return s.time_user(); }
static int Cores(int n) { return n; }
void CompositeReporter::add(std::unique_ptr<Reporter> reporter) {
reporters_.emplace_back(std::move(reporter));
}
@ -82,44 +74,34 @@ void CompositeReporter::ReportTimes(const ScenarioResult& result) {
}
void GprLogReporter::ReportQPS(const ScenarioResult& result) {
Histogram histogram;
histogram.MergeProto(result.latencies());
gpr_log(GPR_INFO, "QPS: %.1f",
histogram.Count() / average(result.client_stats(), WallTime));
gpr_log(GPR_INFO, "QPS: %.1f", result.summary().qps());
}
void GprLogReporter::ReportQPSPerCore(const ScenarioResult& result) {
Histogram histogram;
histogram.MergeProto(result.latencies());
auto qps = histogram.Count() / average(result.client_stats(), WallTime);
gpr_log(GPR_INFO, "QPS: %.1f (%.1f/server core)", qps,
qps / sum(result.server_cores(), Cores));
gpr_log(GPR_INFO, "QPS: %.1f (%.1f/server core)",
result.summary().qps(),
result.summary().qps_per_server_core());
}
void GprLogReporter::ReportLatency(const ScenarioResult& result) {
Histogram histogram;
histogram.MergeProto(result.latencies());
gpr_log(GPR_INFO,
"Latencies (50/90/95/99/99.9%%-ile): %.1f/%.1f/%.1f/%.1f/%.1f us",
histogram.Percentile(50) / 1000, histogram.Percentile(90) / 1000,
histogram.Percentile(95) / 1000, histogram.Percentile(99) / 1000,
histogram.Percentile(99.9) / 1000);
result.summary().latency_50() / 1000,
result.summary().latency_90() / 1000,
result.summary().latency_95() / 1000,
result.summary().latency_99() / 1000,
result.summary().latency_999() / 1000);
}
void GprLogReporter::ReportTimes(const ScenarioResult& result) {
gpr_log(GPR_INFO, "Server system time: %.2f%%",
100.0 * sum(result.server_stats(), ServerSystemTime) /
sum(result.server_stats(), ServerWallTime));
result.summary().server_system_time());
gpr_log(GPR_INFO, "Server user time: %.2f%%",
100.0 * sum(result.server_stats(), ServerUserTime) /
sum(result.server_stats(), ServerWallTime));
result.summary().server_user_time());
gpr_log(GPR_INFO, "Client system time: %.2f%%",
100.0 * sum(result.client_stats(), SystemTime) /
sum(result.client_stats(), WallTime));
result.summary().client_system_time());
gpr_log(GPR_INFO, "Client user time: %.2f%%",
100.0 * sum(result.client_stats(), UserTime) /
sum(result.client_stats(), WallTime));
result.summary().client_user_time());
}
void JsonReporter::ReportQPS(const ScenarioResult& result) {

Loading…
Cancel
Save