Remove some unnecessary new/delete patterns

pull/8638/head
Vijay Pai 9 years ago
parent f4671bfaef
commit b0a6be2b8f
  1. 8
      test/cpp/end2end/async_end2end_test.cc
  2. 21
      test/cpp/end2end/end2end_test.cc
  3. 12
      test/cpp/end2end/test_service_impl.cc
  4. 25
      test/cpp/end2end/thread_stress_test.cc

@ -989,7 +989,7 @@ class AsyncEnd2endServerTryCancelTest : public AsyncEnd2endTest {
expected_server_cq_result = false; expected_server_cq_result = false;
} }
std::thread* server_try_cancel_thd = NULL; std::thread* server_try_cancel_thd = nullptr;
auto verif = Verifier(GetParam().disable_blocking); auto verif = Verifier(GetParam().disable_blocking);
@ -1025,7 +1025,7 @@ class AsyncEnd2endServerTryCancelTest : public AsyncEnd2endTest {
} }
} }
if (server_try_cancel_thd != NULL) { if (server_try_cancel_thd != nullptr) {
server_try_cancel_thd->join(); server_try_cancel_thd->join();
delete server_try_cancel_thd; delete server_try_cancel_thd;
} }
@ -1250,7 +1250,7 @@ class AsyncEnd2endServerTryCancelTest : public AsyncEnd2endTest {
expected_cq_result = false; expected_cq_result = false;
} }
std::thread* server_try_cancel_thd = NULL; std::thread* server_try_cancel_thd = nullptr;
auto verif = Verifier(GetParam().disable_blocking); auto verif = Verifier(GetParam().disable_blocking);
@ -1330,7 +1330,7 @@ class AsyncEnd2endServerTryCancelTest : public AsyncEnd2endTest {
EXPECT_EQ(verif.Next(cq_.get(), ignore_cq_result), 8); EXPECT_EQ(verif.Next(cq_.get(), ignore_cq_result), 8);
} }
if (server_try_cancel_thd != NULL) { if (server_try_cancel_thd != nullptr) {
server_try_cancel_thd->join(); server_try_cancel_thd->join();
delete server_try_cancel_thd; delete server_try_cancel_thd;
} }

@ -656,25 +656,23 @@ TEST_P(End2endTest, SimpleRpcWithCustomeUserAgentPrefix) {
TEST_P(End2endTest, MultipleRpcsWithVariedBinaryMetadataValue) { TEST_P(End2endTest, MultipleRpcsWithVariedBinaryMetadataValue) {
ResetStub(); ResetStub();
std::vector<std::thread*> threads; std::vector<std::thread> threads;
for (int i = 0; i < 10; ++i) { for (int i = 0; i < 10; ++i) {
threads.push_back(new std::thread(SendRpc, stub_.get(), 10, true)); threads.emplace_back(SendRpc, stub_.get(), 10, true);
} }
for (int i = 0; i < 10; ++i) { for (int i = 0; i < 10; ++i) {
threads[i]->join(); threads[i].join();
delete threads[i];
} }
} }
TEST_P(End2endTest, MultipleRpcs) { TEST_P(End2endTest, MultipleRpcs) {
ResetStub(); ResetStub();
std::vector<std::thread*> threads; std::vector<std::thread> threads;
for (int i = 0; i < 10; ++i) { for (int i = 0; i < 10; ++i) {
threads.push_back(new std::thread(SendRpc, stub_.get(), 10, false)); threads.emplace_back(SendRpc, stub_.get(), 10, false);
} }
for (int i = 0; i < 10; ++i) { for (int i = 0; i < 10; ++i) {
threads[i]->join(); threads[i].join();
delete threads[i];
} }
} }
@ -1058,13 +1056,12 @@ TEST_P(ProxyEnd2endTest, SimpleRpcWithEmptyMessages) {
TEST_P(ProxyEnd2endTest, MultipleRpcs) { TEST_P(ProxyEnd2endTest, MultipleRpcs) {
ResetStub(); ResetStub();
std::vector<std::thread*> threads; std::vector<std::thread> threads;
for (int i = 0; i < 10; ++i) { for (int i = 0; i < 10; ++i) {
threads.push_back(new std::thread(SendRpc, stub_.get(), 10, false)); threads.emplace_back(SendRpc, stub_.get(), 10, false);
} }
for (int i = 0; i < 10; ++i) { for (int i = 0; i < 10; ++i) {
threads[i]->join(); threads[i].join();
delete threads[i];
} }
} }

@ -194,7 +194,7 @@ Status TestServiceImpl::RequestStream(ServerContext* context,
return Status::CANCELLED; return Status::CANCELLED;
} }
std::thread* server_try_cancel_thd = NULL; std::thread* server_try_cancel_thd = nullptr;
if (server_try_cancel == CANCEL_DURING_PROCESSING) { if (server_try_cancel == CANCEL_DURING_PROCESSING) {
server_try_cancel_thd = server_try_cancel_thd =
new std::thread(&TestServiceImpl::ServerTryCancel, this, context); new std::thread(&TestServiceImpl::ServerTryCancel, this, context);
@ -212,7 +212,7 @@ Status TestServiceImpl::RequestStream(ServerContext* context,
} }
gpr_log(GPR_INFO, "Read: %d messages", num_msgs_read); gpr_log(GPR_INFO, "Read: %d messages", num_msgs_read);
if (server_try_cancel_thd != NULL) { if (server_try_cancel_thd != nullptr) {
server_try_cancel_thd->join(); server_try_cancel_thd->join();
delete server_try_cancel_thd; delete server_try_cancel_thd;
return Status::CANCELLED; return Status::CANCELLED;
@ -248,7 +248,7 @@ Status TestServiceImpl::ResponseStream(ServerContext* context,
} }
EchoResponse response; EchoResponse response;
std::thread* server_try_cancel_thd = NULL; std::thread* server_try_cancel_thd = nullptr;
if (server_try_cancel == CANCEL_DURING_PROCESSING) { if (server_try_cancel == CANCEL_DURING_PROCESSING) {
server_try_cancel_thd = server_try_cancel_thd =
new std::thread(&TestServiceImpl::ServerTryCancel, this, context); new std::thread(&TestServiceImpl::ServerTryCancel, this, context);
@ -259,7 +259,7 @@ Status TestServiceImpl::ResponseStream(ServerContext* context,
writer->Write(response); writer->Write(response);
} }
if (server_try_cancel_thd != NULL) { if (server_try_cancel_thd != nullptr) {
server_try_cancel_thd->join(); server_try_cancel_thd->join();
delete server_try_cancel_thd; delete server_try_cancel_thd;
return Status::CANCELLED; return Status::CANCELLED;
@ -295,7 +295,7 @@ Status TestServiceImpl::BidiStream(
return Status::CANCELLED; return Status::CANCELLED;
} }
std::thread* server_try_cancel_thd = NULL; std::thread* server_try_cancel_thd = nullptr;
if (server_try_cancel == CANCEL_DURING_PROCESSING) { if (server_try_cancel == CANCEL_DURING_PROCESSING) {
server_try_cancel_thd = server_try_cancel_thd =
new std::thread(&TestServiceImpl::ServerTryCancel, this, context); new std::thread(&TestServiceImpl::ServerTryCancel, this, context);
@ -307,7 +307,7 @@ Status TestServiceImpl::BidiStream(
stream->Write(response); stream->Write(response);
} }
if (server_try_cancel_thd != NULL) { if (server_try_cancel_thd != nullptr) {
server_try_cancel_thd->join(); server_try_cancel_thd->join();
delete server_try_cancel_thd; delete server_try_cancel_thd;
return Status::CANCELLED; return Status::CANCELLED;

@ -243,8 +243,8 @@ class CommonStressTestAsyncServer
RefreshContext(i); RefreshContext(i);
} }
for (int i = 0; i < kNumAsyncServerThreads; i++) { for (int i = 0; i < kNumAsyncServerThreads; i++) {
server_threads_.push_back( server_threads_.emplace_back(&CommonStressTestAsyncServer::ProcessRpcs,
new std::thread(&CommonStressTestAsyncServer::ProcessRpcs, this)); this);
} }
} }
void TearDown() override { void TearDown() override {
@ -256,8 +256,7 @@ class CommonStressTestAsyncServer
} }
for (int i = 0; i < kNumAsyncServerThreads; i++) { for (int i = 0; i < kNumAsyncServerThreads; i++) {
server_threads_[i]->join(); server_threads_[i].join();
delete server_threads_[i];
} }
void* ignored_tag; void* ignored_tag;
@ -316,7 +315,7 @@ class CommonStressTestAsyncServer
std::unique_ptr<ServerCompletionQueue> cq_; std::unique_ptr<ServerCompletionQueue> cq_;
bool shutting_down_; bool shutting_down_;
std::mutex mu_; std::mutex mu_;
std::vector<std::thread*> server_threads_; std::vector<std::thread> server_threads_;
}; };
template <class Common> template <class Common>
@ -440,26 +439,24 @@ class AsyncClientEnd2endTest : public ::testing::Test {
TYPED_TEST_CASE(AsyncClientEnd2endTest, CommonTypes); TYPED_TEST_CASE(AsyncClientEnd2endTest, CommonTypes);
TYPED_TEST(AsyncClientEnd2endTest, ThreadStress) { TYPED_TEST(AsyncClientEnd2endTest, ThreadStress) {
this->common_.ResetStub(); this->common_.ResetStub();
std::vector<std::thread *> send_threads, completion_threads; std::vector<std::thread> send_threads, completion_threads;
for (int i = 0; i < kNumAsyncReceiveThreads; ++i) { for (int i = 0; i < kNumAsyncReceiveThreads; ++i) {
completion_threads.push_back(new std::thread( completion_threads.emplace_back(
&AsyncClientEnd2endTest_ThreadStress_Test<TypeParam>::AsyncCompleteRpc, &AsyncClientEnd2endTest_ThreadStress_Test<TypeParam>::AsyncCompleteRpc,
this)); this);
} }
for (int i = 0; i < kNumAsyncSendThreads; ++i) { for (int i = 0; i < kNumAsyncSendThreads; ++i) {
send_threads.push_back(new std::thread( send_threads.emplace_back(
&AsyncClientEnd2endTest_ThreadStress_Test<TypeParam>::AsyncSendRpc, &AsyncClientEnd2endTest_ThreadStress_Test<TypeParam>::AsyncSendRpc,
this, kNumRpcs)); this, kNumRpcs);
} }
for (int i = 0; i < kNumAsyncSendThreads; ++i) { for (int i = 0; i < kNumAsyncSendThreads; ++i) {
send_threads[i]->join(); send_threads[i].join();
delete send_threads[i];
} }
this->Wait(); this->Wait();
for (int i = 0; i < kNumAsyncReceiveThreads; ++i) { for (int i = 0; i < kNumAsyncReceiveThreads; ++i) {
completion_threads[i]->join(); completion_threads[i].join();
delete completion_threads[i];
} }
} }

Loading…
Cancel
Save