From f1873bbca19400110f6a0fa4333453165ebdc963 Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Tue, 12 Nov 2013 18:15:50 +0400 Subject: [PATCH] perf tests: allow to skip performance tests --- modules/ts/include/opencv2/ts/ts_perf.hpp | 3 ++ modules/ts/src/ts_perf.cpp | 47 ++++++++++++++++------- 2 files changed, 37 insertions(+), 13 deletions(-) diff --git a/modules/ts/include/opencv2/ts/ts_perf.hpp b/modules/ts/include/opencv2/ts/ts_perf.hpp index 6a0e9215be..9238b3e342 100644 --- a/modules/ts/include/opencv2/ts/ts_perf.hpp +++ b/modules/ts/include/opencv2/ts/ts_perf.hpp @@ -243,6 +243,7 @@ typedef struct CV_EXPORTS performance_metrics TERM_TIME = 1, TERM_INTERRUPT = 2, TERM_EXCEPTION = 3, + TERM_SKIP_TEST = 4, // there are some limitations and test should be skipped TERM_UNKNOWN = -1 }; @@ -279,6 +280,8 @@ public: static enum PERF_STRATEGY getPerformanceStrategy(); static enum PERF_STRATEGY setPerformanceStrategy(enum PERF_STRATEGY strategy); + class PerfSkipTestException: public cv::Exception {}; + protected: virtual void PerfTestBody() = 0; diff --git a/modules/ts/src/ts_perf.cpp b/modules/ts/src/ts_perf.cpp index 319076ca88..3bcb5c11a9 100644 --- a/modules/ts/src/ts_perf.cpp +++ b/modules/ts/src/ts_perf.cpp @@ -1175,7 +1175,14 @@ void TestBase::reportMetrics(bool toJUnitXML) { performance_metrics& m = calcMetrics(); - if (toJUnitXML) + if (m.terminationReason == performance_metrics::TERM_SKIP_TEST) + { + if (toJUnitXML) + { + RecordProperty("custom_status", "skipped"); + } + } + else if (toJUnitXML) { RecordProperty("bytesIn", (int)m.bytesIn); RecordProperty("bytesOut", (int)m.bytesOut); @@ -1267,21 +1274,30 @@ void TestBase::SetUp() void TestBase::TearDown() { - if (!HasFailure() && !verified) - ADD_FAILURE() << "The test has no sanity checks. There should be at least one check at the end of performance test."; - - validateMetrics(); - if (HasFailure()) - reportMetrics(false); + if (metrics.terminationReason == performance_metrics::TERM_SKIP_TEST) + { + LOGI("\tTest was skipped"); + GTEST_SUCCEED() << "Test was skipped"; + } else { - const ::testing::TestInfo* const test_info = ::testing::UnitTest::GetInstance()->current_test_info(); - const char* type_param = test_info->type_param(); - const char* value_param = test_info->value_param(); - if (value_param) printf("[ VALUE ] \t%s\n", value_param), fflush(stdout); - if (type_param) printf("[ TYPE ] \t%s\n", type_param), fflush(stdout); - reportMetrics(true); + if (!HasFailure() && !verified) + ADD_FAILURE() << "The test has no sanity checks. There should be at least one check at the end of performance test."; + + validateMetrics(); + if (HasFailure()) + { + reportMetrics(false); + return; + } } + + const ::testing::TestInfo* const test_info = ::testing::UnitTest::GetInstance()->current_test_info(); + const char* type_param = test_info->type_param(); + const char* value_param = test_info->value_param(); + if (value_param) printf("[ VALUE ] \t%s\n", value_param), fflush(stdout); + if (type_param) printf("[ TYPE ] \t%s\n", type_param), fflush(stdout); + reportMetrics(true); } std::string TestBase::getDataPath(const std::string& relativePath) @@ -1331,6 +1347,11 @@ void TestBase::RunPerfTestBody() { this->PerfTestBody(); } + catch(PerfSkipTestException&) + { + metrics.terminationReason = performance_metrics::TERM_SKIP_TEST; + return; + } catch(PerfEarlyExitException&) { metrics.terminationReason = performance_metrics::TERM_INTERRUPT;