Merge pull request #2079 from yang-g/statusChange

Update the Status API
pull/2078/head
Nicolas Noble 10 years ago
commit 32f020ae2a
  1. 4
      include/grpc++/async_unary_call.h
  2. 2
      include/grpc++/client_context.h
  3. 2
      include/grpc++/impl/client_unary_call.h
  4. 2
      include/grpc++/impl/service_type.h
  5. 9
      include/grpc++/status.h
  6. 4
      include/grpc++/stream.h
  7. 8
      src/compiler/cpp_generator.cc
  8. 2
      src/cpp/client/client_unary_call.cc
  9. 4
      src/cpp/common/call.cc
  10. 2
      src/cpp/util/status.cc
  11. 18
      test/cpp/end2end/async_end2end_test.cc
  12. 4
      test/cpp/end2end/client_crash_test.cc
  13. 68
      test/cpp/end2end/end2end_test.cc
  14. 4
      test/cpp/end2end/generic_end2end_test.cc
  15. 4
      test/cpp/end2end/mock_test.cc
  16. 6
      test/cpp/end2end/thread_stress_test.cc
  17. 8
      test/cpp/interop/interop_client.cc
  18. 4
      test/cpp/qps/client_sync.cc
  19. 4
      test/cpp/qps/driver.cc
  20. 24
      test/cpp/qps/qps_worker.cc
  21. 6
      test/cpp/util/cli_call.cc
  22. 2
      test/cpp/util/cli_call_test.cc

@ -117,7 +117,7 @@ class ServerAsyncResponseWriter GRPC_FINAL
ctx_->sent_initial_metadata_ = true;
}
// The response is dropped if the status is not OK.
if (status.IsOk()) {
if (status.ok()) {
finish_buf_.AddSendMessage(msg);
}
finish_buf_.AddServerSendStatus(&ctx_->trailing_metadata_, status);
@ -125,7 +125,7 @@ class ServerAsyncResponseWriter GRPC_FINAL
}
void FinishWithError(const Status& status, void* tag) {
GPR_ASSERT(!status.IsOk());
GPR_ASSERT(!status.ok());
finish_buf_.Reset(tag);
if (!ctx_->sent_initial_metadata_) {
finish_buf_.AddSendInitialMetadata(&ctx_->initial_metadata_);

@ -41,6 +41,7 @@
#include <grpc/support/log.h>
#include <grpc/support/time.h>
#include <grpc++/config.h>
#include <grpc++/status.h>
#include <grpc++/time.h>
struct grpc_call;
@ -53,7 +54,6 @@ class ChannelInterface;
class CompletionQueue;
class Credentials;
class RpcMethod;
class Status;
template <class R>
class ClientReader;
template <class W>

@ -35,6 +35,7 @@
#define GRPCXX_IMPL_CLIENT_UNARY_CALL_H
#include <grpc++/config.h>
#include <grpc++/status.h>
namespace grpc {
@ -42,7 +43,6 @@ class ChannelInterface;
class ClientContext;
class CompletionQueue;
class RpcMethod;
class Status;
// Wrapper that performs a blocking unary call
Status BlockingUnaryCall(ChannelInterface* channel, const RpcMethod& method,

@ -35,6 +35,7 @@
#define GRPCXX_IMPL_SERVICE_TYPE_H
#include <grpc++/config.h>
#include <grpc++/status.h>
namespace grpc {
@ -44,7 +45,6 @@ class RpcService;
class Server;
class ServerCompletionQueue;
class ServerContext;
class Status;
class SynchronousService {
public:

@ -42,18 +42,17 @@ namespace grpc {
class Status {
public:
Status() : code_(StatusCode::OK) {}
explicit Status(StatusCode code) : code_(code) {}
Status(StatusCode code, const grpc::string& details)
: code_(code), details_(details) {}
// Pre-defined special status objects.
static const Status& OK;
static const Status& Cancelled;
static const Status& CANCELLED;
StatusCode code() const { return code_; }
grpc::string details() const { return details_; }
StatusCode error_code() const { return code_; }
grpc::string error_message() const { return details_; }
bool IsOk() const { return code_ == StatusCode::OK; }
bool ok() const { return code_ == StatusCode::OK; }
private:
StatusCode code_;

@ -615,7 +615,7 @@ class ServerAsyncReader GRPC_FINAL : public ServerAsyncStreamingInterface,
ctx_->sent_initial_metadata_ = true;
}
// The response is dropped if the status is not OK.
if (status.IsOk()) {
if (status.ok()) {
finish_buf_.AddSendMessage(msg);
}
finish_buf_.AddServerSendStatus(&ctx_->trailing_metadata_, status);
@ -623,7 +623,7 @@ class ServerAsyncReader GRPC_FINAL : public ServerAsyncStreamingInterface,
}
void FinishWithError(const Status& status, void* tag) {
GPR_ASSERT(!status.IsOk());
GPR_ASSERT(!status.ok());
finish_buf_.Reset(tag);
if (!ctx_->sent_initial_metadata_) {
finish_buf_.AddSendInitialMetadata(&ctx_->initial_metadata_);

@ -854,7 +854,7 @@ void PrintSourceServerMethod(grpc::protobuf::io::Printer *printer,
printer->Print(" (void) response;\n");
printer->Print(
" return ::grpc::Status("
"::grpc::StatusCode::UNIMPLEMENTED);\n");
"::grpc::StatusCode::UNIMPLEMENTED, \"\");\n");
printer->Print("}\n\n");
} else if (ClientOnlyStreaming(method)) {
printer->Print(*vars,
@ -867,7 +867,7 @@ void PrintSourceServerMethod(grpc::protobuf::io::Printer *printer,
printer->Print(" (void) response;\n");
printer->Print(
" return ::grpc::Status("
"::grpc::StatusCode::UNIMPLEMENTED);\n");
"::grpc::StatusCode::UNIMPLEMENTED, \"\");\n");
printer->Print("}\n\n");
} else if (ServerOnlyStreaming(method)) {
printer->Print(*vars,
@ -880,7 +880,7 @@ void PrintSourceServerMethod(grpc::protobuf::io::Printer *printer,
printer->Print(" (void) writer;\n");
printer->Print(
" return ::grpc::Status("
"::grpc::StatusCode::UNIMPLEMENTED);\n");
"::grpc::StatusCode::UNIMPLEMENTED, \"\");\n");
printer->Print("}\n\n");
} else if (BidiStreaming(method)) {
printer->Print(*vars,
@ -892,7 +892,7 @@ void PrintSourceServerMethod(grpc::protobuf::io::Printer *printer,
printer->Print(" (void) stream;\n");
printer->Print(
" return ::grpc::Status("
"::grpc::StatusCode::UNIMPLEMENTED);\n");
"::grpc::StatusCode::UNIMPLEMENTED, \"\");\n");
printer->Print("}\n\n");
}
}

@ -57,7 +57,7 @@ Status BlockingUnaryCall(ChannelInterface* channel, const RpcMethod& method,
buf.AddClientSendClose();
buf.AddClientRecvStatus(context, &status);
call.PerformOps(&buf);
GPR_ASSERT((cq.Pluck(&buf) && buf.got_message) || !status.IsOk());
GPR_ASSERT((cq.Pluck(&buf) && buf.got_message) || !status.ok());
return status;
}

@ -214,8 +214,8 @@ void CallOpBuffer::AddServerSendStatus(
trailing_metadata_count_ = 0;
}
send_status_available_ = true;
send_status_code_ = static_cast<grpc_status_code>(status.code());
send_status_details_ = status.details();
send_status_code_ = static_cast<grpc_status_code>(status.error_code());
send_status_details_ = status.error_message();
}
void CallOpBuffer::FillOps(grpc_op* ops, size_t* nops) {

@ -36,6 +36,6 @@
namespace grpc {
const Status& Status::OK = Status();
const Status& Status::Cancelled = Status(StatusCode::CANCELLED);
const Status& Status::CANCELLED = Status(StatusCode::CANCELLED, "");
} // namespace grpc

@ -157,7 +157,7 @@ class AsyncEnd2endTest : public ::testing::Test {
client_ok(4);
EXPECT_EQ(send_response.message(), recv_response.message());
EXPECT_TRUE(recv_status.IsOk());
EXPECT_TRUE(recv_status.ok());
}
}
@ -218,7 +218,7 @@ TEST_F(AsyncEnd2endTest, AsyncNextRpc) {
verify_timed_ok(&cli_cq_, 4, true);
EXPECT_EQ(send_response.message(), recv_response.message());
EXPECT_TRUE(recv_status.IsOk());
EXPECT_TRUE(recv_status.ok());
}
// Two pings and a final pong.
@ -272,7 +272,7 @@ TEST_F(AsyncEnd2endTest, SimpleClientStreaming) {
client_ok(10);
EXPECT_EQ(send_response.message(), recv_response.message());
EXPECT_TRUE(recv_status.IsOk());
EXPECT_TRUE(recv_status.ok());
}
// One ping, two pongs.
@ -323,7 +323,7 @@ TEST_F(AsyncEnd2endTest, SimpleServerStreaming) {
cli_stream->Finish(&recv_status, tag(9));
client_ok(9);
EXPECT_TRUE(recv_status.IsOk());
EXPECT_TRUE(recv_status.ok());
}
// One ping, one pong.
@ -376,7 +376,7 @@ TEST_F(AsyncEnd2endTest, SimpleBidiStreaming) {
cli_stream->Finish(&recv_status, tag(10));
client_ok(10);
EXPECT_TRUE(recv_status.IsOk());
EXPECT_TRUE(recv_status.ok());
}
// Metadata tests
@ -420,7 +420,7 @@ TEST_F(AsyncEnd2endTest, ClientInitialMetadataRpc) {
client_ok(4);
EXPECT_EQ(send_response.message(), recv_response.message());
EXPECT_TRUE(recv_status.IsOk());
EXPECT_TRUE(recv_status.ok());
}
TEST_F(AsyncEnd2endTest, ServerInitialMetadataRpc) {
@ -467,7 +467,7 @@ TEST_F(AsyncEnd2endTest, ServerInitialMetadataRpc) {
client_ok(6);
EXPECT_EQ(send_response.message(), recv_response.message());
EXPECT_TRUE(recv_status.IsOk());
EXPECT_TRUE(recv_status.ok());
}
TEST_F(AsyncEnd2endTest, ServerTrailingMetadataRpc) {
@ -507,7 +507,7 @@ TEST_F(AsyncEnd2endTest, ServerTrailingMetadataRpc) {
response_reader->Finish(&recv_response, &recv_status, tag(5));
client_ok(5);
EXPECT_EQ(send_response.message(), recv_response.message());
EXPECT_TRUE(recv_status.IsOk());
EXPECT_TRUE(recv_status.ok());
auto server_trailing_metadata = cli_ctx.GetServerTrailingMetadata();
EXPECT_EQ(meta1.second, server_trailing_metadata.find(meta1.first)->second);
EXPECT_EQ(meta2.second, server_trailing_metadata.find(meta2.first)->second);
@ -580,7 +580,7 @@ TEST_F(AsyncEnd2endTest, MetadataRpc) {
response_reader->Finish(&recv_response, &recv_status, tag(6));
client_ok(6);
EXPECT_EQ(send_response.message(), recv_response.message());
EXPECT_TRUE(recv_status.IsOk());
EXPECT_TRUE(recv_status.ok());
auto server_trailing_metadata = cli_ctx.GetServerTrailingMetadata();
EXPECT_EQ(meta5.second, server_trailing_metadata.find(meta5.first)->second);
EXPECT_EQ(meta6.second, server_trailing_metadata.find(meta6.first)->second);

@ -119,7 +119,7 @@ TEST_F(CrashTest, KillAfterWrite) {
EXPECT_FALSE(stream->Read(&response));
EXPECT_FALSE(stream->Finish().IsOk());
EXPECT_FALSE(stream->Finish().ok());
}
TEST_F(CrashTest, KillBeforeWrite) {
@ -142,7 +142,7 @@ TEST_F(CrashTest, KillBeforeWrite) {
EXPECT_FALSE(stream->Write(request));
EXPECT_FALSE(stream->Read(&response));
EXPECT_FALSE(stream->Finish().IsOk());
EXPECT_FALSE(stream->Finish().ok());
}
} // namespace

@ -101,13 +101,13 @@ class TestServiceImpl : public ::grpc::cpp::test::util::TestService::Service {
gpr_now(),
gpr_time_from_micros(request->param().client_cancel_after_us())));
}
return Status::Cancelled;
return Status::CANCELLED;
} else if (request->has_param() &&
request->param().server_cancel_after_us()) {
gpr_sleep_until(gpr_time_add(
gpr_now(),
gpr_time_from_micros(request->param().server_cancel_after_us())));
return Status::Cancelled;
return Status::CANCELLED;
} else {
EXPECT_FALSE(context->IsCancelled());
}
@ -232,7 +232,7 @@ static void SendRpc(grpc::cpp::test::util::TestService::Stub* stub,
ClientContext context;
Status s = stub->Echo(&context, request, &response);
EXPECT_EQ(response.message(), request.message());
EXPECT_TRUE(s.IsOk());
EXPECT_TRUE(s.ok());
}
}
@ -265,7 +265,7 @@ TEST_F(End2endTest, RpcDeadlineExpires) {
std::chrono::system_clock::now() + std::chrono::microseconds(10);
context.set_deadline(deadline);
Status s = stub_->Echo(&context, request, &response);
EXPECT_EQ(StatusCode::DEADLINE_EXCEEDED, s.code());
EXPECT_EQ(StatusCode::DEADLINE_EXCEEDED, s.error_code());
}
// Set a long but finite deadline.
@ -281,7 +281,7 @@ TEST_F(End2endTest, RpcLongDeadline) {
context.set_deadline(deadline);
Status s = stub_->Echo(&context, request, &response);
EXPECT_EQ(response.message(), request.message());
EXPECT_TRUE(s.IsOk());
EXPECT_TRUE(s.ok());
}
// Ask server to echo back the deadline it sees.
@ -298,7 +298,7 @@ TEST_F(End2endTest, EchoDeadline) {
context.set_deadline(deadline);
Status s = stub_->Echo(&context, request, &response);
EXPECT_EQ(response.message(), request.message());
EXPECT_TRUE(s.IsOk());
EXPECT_TRUE(s.ok());
gpr_timespec sent_deadline;
Timepoint2Timespec(deadline, &sent_deadline);
// Allow 1 second error.
@ -317,7 +317,7 @@ TEST_F(End2endTest, EchoDeadlineForNoDeadlineRpc) {
ClientContext context;
Status s = stub_->Echo(&context, request, &response);
EXPECT_EQ(response.message(), request.message());
EXPECT_TRUE(s.IsOk());
EXPECT_TRUE(s.ok());
EXPECT_EQ(response.param().request_deadline(), gpr_inf_future.tv_sec);
}
@ -329,9 +329,9 @@ TEST_F(End2endTest, UnimplementedRpc) {
ClientContext context;
Status s = stub_->Unimplemented(&context, request, &response);
EXPECT_FALSE(s.IsOk());
EXPECT_EQ(s.code(), grpc::StatusCode::UNIMPLEMENTED);
EXPECT_EQ(s.details(), "");
EXPECT_FALSE(s.ok());
EXPECT_EQ(s.error_code(), grpc::StatusCode::UNIMPLEMENTED);
EXPECT_EQ(s.error_message(), "");
EXPECT_EQ(response.message(), "");
}
@ -347,7 +347,7 @@ TEST_F(End2endTest, RequestStreamOneRequest) {
stream->WritesDone();
Status s = stream->Finish();
EXPECT_EQ(response.message(), request.message());
EXPECT_TRUE(s.IsOk());
EXPECT_TRUE(s.ok());
}
TEST_F(End2endTest, RequestStreamTwoRequests) {
@ -363,7 +363,7 @@ TEST_F(End2endTest, RequestStreamTwoRequests) {
stream->WritesDone();
Status s = stream->Finish();
EXPECT_EQ(response.message(), "hellohello");
EXPECT_TRUE(s.IsOk());
EXPECT_TRUE(s.ok());
}
TEST_F(End2endTest, ResponseStream) {
@ -383,7 +383,7 @@ TEST_F(End2endTest, ResponseStream) {
EXPECT_FALSE(stream->Read(&response));
Status s = stream->Finish();
EXPECT_TRUE(s.IsOk());
EXPECT_TRUE(s.ok());
}
TEST_F(End2endTest, BidiStream) {
@ -414,7 +414,7 @@ TEST_F(End2endTest, BidiStream) {
EXPECT_FALSE(stream->Read(&response));
Status s = stream->Finish();
EXPECT_TRUE(s.IsOk());
EXPECT_TRUE(s.ok());
}
// Talk to the two services with the same name but different package names.
@ -433,7 +433,7 @@ TEST_F(End2endTest, DiffPackageServices) {
ClientContext context;
Status s = stub->Echo(&context, request, &response);
EXPECT_EQ(response.message(), request.message());
EXPECT_TRUE(s.IsOk());
EXPECT_TRUE(s.ok());
std::unique_ptr<grpc::cpp::test::util::duplicate::TestService::Stub>
dup_pkg_stub(
@ -441,7 +441,7 @@ TEST_F(End2endTest, DiffPackageServices) {
ClientContext context2;
s = dup_pkg_stub->Echo(&context2, request, &response);
EXPECT_EQ("no package", response.message());
EXPECT_TRUE(s.IsOk());
EXPECT_TRUE(s.ok());
}
// rpc and stream should fail on bad credentials.
@ -459,16 +459,16 @@ TEST_F(End2endTest, BadCredentials) {
Status s = stub->Echo(&context, request, &response);
EXPECT_EQ("", response.message());
EXPECT_FALSE(s.IsOk());
EXPECT_EQ(StatusCode::UNKNOWN, s.code());
EXPECT_EQ("Rpc sent on a lame channel.", s.details());
EXPECT_FALSE(s.ok());
EXPECT_EQ(StatusCode::UNKNOWN, s.error_code());
EXPECT_EQ("Rpc sent on a lame channel.", s.error_message());
ClientContext context2;
auto stream = stub->BidiStream(&context2);
s = stream->Finish();
EXPECT_FALSE(s.IsOk());
EXPECT_EQ(StatusCode::UNKNOWN, s.code());
EXPECT_EQ("Rpc sent on a lame channel.", s.details());
EXPECT_FALSE(s.ok());
EXPECT_EQ(StatusCode::UNKNOWN, s.error_code());
EXPECT_EQ("Rpc sent on a lame channel.", s.error_message());
}
void CancelRpc(ClientContext* context, int delay_us, TestServiceImpl* service) {
@ -491,8 +491,8 @@ TEST_F(End2endTest, ClientCancelsRpc) {
std::thread cancel_thread(CancelRpc, &context, kCancelDelayUs, &service_);
Status s = stub_->Echo(&context, request, &response);
cancel_thread.join();
EXPECT_EQ(StatusCode::CANCELLED, s.code());
EXPECT_EQ(s.details(), "Cancelled");
EXPECT_EQ(StatusCode::CANCELLED, s.error_code());
EXPECT_EQ(s.error_message(), "Cancelled");
}
// Server cancels rpc after 1ms
@ -505,8 +505,8 @@ TEST_F(End2endTest, ServerCancelsRpc) {
ClientContext context;
Status s = stub_->Echo(&context, request, &response);
EXPECT_EQ(StatusCode::CANCELLED, s.code());
EXPECT_TRUE(s.details().empty());
EXPECT_EQ(StatusCode::CANCELLED, s.error_code());
EXPECT_TRUE(s.error_message().empty());
}
// Client cancels request stream after sending two messages
@ -524,7 +524,7 @@ TEST_F(End2endTest, ClientCancelsRequestStream) {
context.TryCancel();
Status s = stream->Finish();
EXPECT_EQ(grpc::StatusCode::CANCELLED, s.code());
EXPECT_EQ(grpc::StatusCode::CANCELLED, s.error_code());
EXPECT_EQ(response.message(), "");
}
@ -558,7 +558,7 @@ TEST_F(End2endTest, ClientCancelsResponseStream) {
Status s = stream->Finish();
// The final status could be either of CANCELLED or OK depending on
// who won the race.
EXPECT_GE(grpc::StatusCode::CANCELLED, s.code());
EXPECT_GE(grpc::StatusCode::CANCELLED, s.error_code());
}
// Client cancels bidi stream after sending some messages
@ -591,7 +591,7 @@ TEST_F(End2endTest, ClientCancelsBidi) {
}
Status s = stream->Finish();
EXPECT_EQ(grpc::StatusCode::CANCELLED, s.code());
EXPECT_EQ(grpc::StatusCode::CANCELLED, s.error_code());
}
TEST_F(End2endTest, RpcMaxMessageSize) {
@ -602,7 +602,7 @@ TEST_F(End2endTest, RpcMaxMessageSize) {
ClientContext context;
Status s = stub_->Echo(&context, request, &response);
EXPECT_FALSE(s.IsOk());
EXPECT_FALSE(s.ok());
}
bool MetadataContains(const std::multimap<grpc::string, grpc::string>& metadata,
@ -632,7 +632,7 @@ TEST_F(End2endTest, SetPerCallCredentials) {
Status s = stub_->Echo(&context, request, &response);
EXPECT_EQ(request.message(), response.message());
EXPECT_TRUE(s.IsOk());
EXPECT_TRUE(s.ok());
EXPECT_TRUE(MetadataContains(context.GetServerTrailingMetadata(),
GRPC_IAM_AUTHORIZATION_TOKEN_METADATA_KEY,
"fake_token"));
@ -652,8 +652,8 @@ TEST_F(End2endTest, InsecurePerCallCredentials) {
request.mutable_param()->set_echo_metadata(true);
Status s = stub_->Echo(&context, request, &response);
EXPECT_EQ(StatusCode::CANCELLED, s.code());
EXPECT_EQ("Failed to set credentials to rpc.", s.details());
EXPECT_EQ(StatusCode::CANCELLED, s.error_code());
EXPECT_EQ("Failed to set credentials to rpc.", s.error_message());
}
TEST_F(End2endTest, OverridePerCallCredentials) {
@ -684,7 +684,7 @@ TEST_F(End2endTest, OverridePerCallCredentials) {
GRPC_IAM_AUTHORITY_SELECTOR_METADATA_KEY,
"fake_selector1"));
EXPECT_EQ(request.message(), response.message());
EXPECT_TRUE(s.IsOk());
EXPECT_TRUE(s.ok());
}
} // namespace testing

@ -190,7 +190,7 @@ class GenericEnd2endTest : public ::testing::Test {
client_ok(9);
EXPECT_EQ(send_response.message(), recv_response.message());
EXPECT_TRUE(recv_status.IsOk());
EXPECT_TRUE(recv_status.ok());
}
}
@ -273,7 +273,7 @@ TEST_F(GenericEnd2endTest, SimpleBidiStreaming) {
client_ok(10);
EXPECT_EQ(send_response.message(), recv_response.message());
EXPECT_TRUE(recv_status.IsOk());
EXPECT_TRUE(recv_status.ok());
}
} // namespace

@ -168,7 +168,7 @@ class FakeClient {
request.set_message("hello world");
Status s = stub_->Echo(&context, request, &response);
EXPECT_EQ(request.message(), response.message());
EXPECT_TRUE(s.IsOk());
EXPECT_TRUE(s.ok());
}
void DoBidiStream() {
@ -199,7 +199,7 @@ class FakeClient {
EXPECT_FALSE(stream->Read(&response));
Status s = stream->Finish();
EXPECT_TRUE(s.IsOk());
EXPECT_TRUE(s.ok());
}
void ResetStub(TestService::StubInterface* stub) { stub_ = stub; }

@ -99,13 +99,13 @@ class TestServiceImpl : public ::grpc::cpp::test::util::TestService::Service {
gpr_now(),
gpr_time_from_micros(request->param().client_cancel_after_us())));
}
return Status::Cancelled;
return Status::CANCELLED;
} else if (request->has_param() &&
request->param().server_cancel_after_us()) {
gpr_sleep_until(gpr_time_add(
gpr_now(),
gpr_time_from_micros(request->param().server_cancel_after_us())));
return Status::Cancelled;
return Status::CANCELLED;
} else {
EXPECT_FALSE(context->IsCancelled());
}
@ -219,7 +219,7 @@ static void SendRpc(grpc::cpp::test::util::TestService::Stub* stub,
ClientContext context;
Status s = stub->Echo(&context, request, &response);
EXPECT_EQ(response.message(), request.message());
EXPECT_TRUE(s.IsOk());
EXPECT_TRUE(s.ok());
}
}

@ -65,11 +65,11 @@ InteropClient::InteropClient(std::shared_ptr<ChannelInterface> channel)
: channel_(channel) {}
void InteropClient::AssertOkOrPrintErrorStatus(const Status& s) {
if (s.IsOk()) {
if (s.ok()) {
return;
}
gpr_log(GPR_INFO, "Error status code: %d, message: %s", s.code(),
s.details().c_str());
gpr_log(GPR_INFO, "Error status code: %d, message: %s", s.error_code(),
s.error_message().c_str());
GPR_ASSERT(0);
}
@ -321,7 +321,7 @@ void InteropClient::DoCancelAfterBegin() {
gpr_log(GPR_INFO, "Trying to cancel...");
context.TryCancel();
Status s = stream->Finish();
GPR_ASSERT(s.code() == StatusCode::CANCELLED);
GPR_ASSERT(s.error_code() == StatusCode::CANCELLED);
gpr_log(GPR_INFO, "Canceling streaming done.");
}

@ -103,7 +103,7 @@ class SynchronousUnaryClient GRPC_FINAL : public SynchronousClient {
grpc::Status s =
stub->UnaryCall(&context, request_, &responses_[thread_idx]);
histogram->Add((Timer::Now() - start) * 1e9);
return s.IsOk();
return s.ok();
}
};
@ -124,7 +124,7 @@ class SynchronousStreamingClient GRPC_FINAL : public SynchronousClient {
for (auto stream = stream_.begin(); stream != stream_.end(); stream++) {
if (*stream) {
(*stream)->WritesDone();
EXPECT_TRUE((*stream)->Finish().IsOk());
EXPECT_TRUE((*stream)->Finish().ok());
}
}
}

@ -241,11 +241,11 @@ std::unique_ptr<ScenarioResult> RunScenario(
for (auto client = clients.begin(); client != clients.end(); client++) {
GPR_ASSERT(client->stream->WritesDone());
GPR_ASSERT(client->stream->Finish().IsOk());
GPR_ASSERT(client->stream->Finish().ok());
}
for (auto server = servers.begin(); server != servers.end(); server++) {
GPR_ASSERT(server->stream->WritesDone());
GPR_ASSERT(server->stream->Finish().IsOk());
GPR_ASSERT(server->stream->Finish().ok());
}
return result;
}

@ -100,7 +100,7 @@ class WorkerImpl GRPC_FINAL : public Worker::Service {
GRPC_OVERRIDE {
InstanceGuard g(this);
if (!g.Acquired()) {
return Status(RESOURCE_EXHAUSTED);
return Status(StatusCode::RESOURCE_EXHAUSTED, "");
}
grpc_profiler_start("qps_client.prof");
@ -114,7 +114,7 @@ class WorkerImpl GRPC_FINAL : public Worker::Service {
GRPC_OVERRIDE {
InstanceGuard g(this);
if (!g.Acquired()) {
return Status(RESOURCE_EXHAUSTED);
return Status(StatusCode::RESOURCE_EXHAUSTED, "");
}
grpc_profiler_start("qps_server.prof");
@ -159,22 +159,22 @@ class WorkerImpl GRPC_FINAL : public Worker::Service {
ServerReaderWriter<ClientStatus, ClientArgs>* stream) {
ClientArgs args;
if (!stream->Read(&args)) {
return Status(INVALID_ARGUMENT);
return Status(StatusCode::INVALID_ARGUMENT, "");
}
if (!args.has_setup()) {
return Status(INVALID_ARGUMENT);
return Status(StatusCode::INVALID_ARGUMENT, "");
}
auto client = CreateClient(args.setup());
if (!client) {
return Status(INVALID_ARGUMENT);
return Status(StatusCode::INVALID_ARGUMENT, "");
}
ClientStatus status;
if (!stream->Write(status)) {
return Status(UNKNOWN);
return Status(StatusCode::UNKNOWN, "");
}
while (stream->Read(&args)) {
if (!args.has_mark()) {
return Status(INVALID_ARGUMENT);
return Status(StatusCode::INVALID_ARGUMENT, "");
}
*status.mutable_stats() = client->Mark();
stream->Write(status);
@ -187,23 +187,23 @@ class WorkerImpl GRPC_FINAL : public Worker::Service {
ServerReaderWriter<ServerStatus, ServerArgs>* stream) {
ServerArgs args;
if (!stream->Read(&args)) {
return Status(INVALID_ARGUMENT);
return Status(StatusCode::INVALID_ARGUMENT, "");
}
if (!args.has_setup()) {
return Status(INVALID_ARGUMENT);
return Status(StatusCode::INVALID_ARGUMENT, "");
}
auto server = CreateServer(args.setup(), server_port_);
if (!server) {
return Status(INVALID_ARGUMENT);
return Status(StatusCode::INVALID_ARGUMENT, "");
}
ServerStatus status;
status.set_port(server_port_);
if (!stream->Write(status)) {
return Status(UNKNOWN);
return Status(StatusCode::UNKNOWN, "");
}
while (stream->Read(&args)) {
if (!args.has_mark()) {
return Status(INVALID_ARGUMENT);
return Status(StatusCode::INVALID_ARGUMENT, "");
}
*status.mutable_stats() = server->Mark();
stream->Write(status);

@ -86,7 +86,7 @@ void CliCall::Call(std::shared_ptr<grpc::ChannelInterface> channel,
cq.Next(&got_tag, &ok);
GPR_ASSERT(ok);
if (status.IsOk()) {
if (status.ok()) {
std::cout << "RPC finished with OK status." << std::endl;
std::vector<grpc::Slice> slices;
recv_buffer.Dump(&slices);
@ -97,8 +97,8 @@ void CliCall::Call(std::shared_ptr<grpc::ChannelInterface> channel,
slices[i].size());
}
} else {
std::cout << "RPC finished with status code " << status.code()
<< " details: " << status.details() << std::endl;
std::cout << "RPC finished with status code " << status.error_code()
<< " details: " << status.error_message() << std::endl;
}
}

@ -108,7 +108,7 @@ TEST_F(CliCallTest, SimpleRpc) {
ClientContext context;
Status s = stub_->Echo(&context, request, &response);
EXPECT_EQ(response.message(), request.message());
EXPECT_TRUE(s.IsOk());
EXPECT_TRUE(s.ok());
const grpc::string kMethod("/grpc.cpp.test.util.TestService/Echo");
grpc::string request_bin, response_bin, expected_response_bin;

Loading…
Cancel
Save