mirror of https://github.com/opencv/opencv.git
parent
df9f707ff0
commit
5cce038958
36 changed files with 7922 additions and 1643 deletions
@ -1,8 +1,6 @@ |
||||
if(ZLIB_FOUND) |
||||
include_directories(${ZLIB_INCUDE_DIR}) |
||||
set(deps ${ZLIB_LIBRARIES}) |
||||
else() |
||||
include_directories("${CMAKE_CURRENT_SOURCE_DIR}/../../3rdparty/zlib") |
||||
set(deps zlib) |
||||
endif() |
||||
define_opencv_module(core ${deps}) |
||||
define_opencv_module(core ${ZLIB_LIBRARY}) |
||||
|
@ -0,0 +1,25 @@ |
||||
#include "perf_precomp.hpp" |
||||
|
||||
using namespace std; |
||||
using namespace cv; |
||||
using namespace perf; |
||||
|
||||
#define TYPICAL_MAT_SIZES_ABS TYPICAL_MAT_SIZES |
||||
#define TYPICAL_MAT_TYPES_ABS CV_8SC1, CV_8SC4, CV_32SC1, CV_32FC1 |
||||
#define TYPICAL_MATS_ABS testing::Combine( testing::Values( TYPICAL_MAT_SIZES_ABS), testing::Values( TYPICAL_MAT_TYPES_ABS) ) |
||||
|
||||
PERF_TEST_P(Size_MatType, abs, TYPICAL_MATS_ABS)
|
||||
{ |
||||
Size sz = std::tr1::get<0>(GetParam()); |
||||
int type = std::tr1::get<1>(GetParam()); |
||||
|
||||
cv::Mat a = Mat(sz, type); |
||||
cv::Mat c = Mat(sz, type); |
||||
|
||||
declare.in(a, ::perf::TestBase::WARMUP_RNG).out(c).time(0.5); |
||||
|
||||
TEST_CYCLE(100) c = cv::abs(a); |
||||
|
||||
SANITY_CHECK(c); |
||||
} |
||||
|
@ -0,0 +1,25 @@ |
||||
#include "perf_precomp.hpp" |
||||
|
||||
using namespace std; |
||||
using namespace cv; |
||||
using namespace perf; |
||||
|
||||
#define TYPICAL_MAT_SIZES_BITWNOT TYPICAL_MAT_SIZES |
||||
#define TYPICAL_MAT_TYPES_BITWNOT CV_8SC1, CV_8SC4, CV_32SC1, CV_32SC4 |
||||
#define TYPICAL_MATS_BITWNOT testing::Combine( testing::Values( TYPICAL_MAT_SIZES_BITWNOT), testing::Values( TYPICAL_MAT_TYPES_BITWNOT) ) |
||||
|
||||
PERF_TEST_P(Size_MatType, bitwise_not, TYPICAL_MATS_BITWNOT) |
||||
{ |
||||
Size sz = std::tr1::get<0>(GetParam()); |
||||
int type = std::tr1::get<1>(GetParam()); |
||||
|
||||
cv::Mat a = Mat(sz, type); |
||||
cv::Mat c = Mat(sz, type); |
||||
|
||||
declare.in(a, WARMUP_RNG).out(c); |
||||
|
||||
TEST_CYCLE(100) cv::bitwise_not(a, c); |
||||
|
||||
SANITY_CHECK(c); |
||||
} |
||||
|
@ -0,0 +1,66 @@ |
||||
#include "perf_precomp.hpp" |
||||
|
||||
using namespace std; |
||||
using namespace cv; |
||||
using namespace perf; |
||||
|
||||
#define TYPICAL_MAT_SIZES_CORE_ARITHM TYPICAL_MAT_SIZES |
||||
#define TYPICAL_MAT_TYPES_CORE_ARITHM CV_8UC1, CV_8SC1, CV_8UC4, CV_32SC1, CV_32FC1 |
||||
#define TYPICAL_MATS_CORE_ARITHM testing::Combine( testing::Values( TYPICAL_MAT_SIZES_CORE_ARITHM ), testing::Values( TYPICAL_MAT_TYPES_CORE_ARITHM ) ) |
||||
|
||||
#define TYPICAL_MAT_TYPES_BITW_ARITHM CV_8UC1, CV_8SC1, CV_8UC4, CV_32SC1, CV_32SC4 |
||||
#define TYPICAL_MATS_BITW_ARITHM testing::Combine( testing::Values( TYPICAL_MAT_SIZES_CORE_ARITHM ), testing::Values( TYPICAL_MAT_TYPES_BITW_ARITHM ) ) |
||||
|
||||
#define PERF_TEST_P__CORE_ARITHM(__f, __testset) \ |
||||
PERF_TEST_P(Size_MatType, core_arithm__ ## __f, __testset) \
|
||||
{ \
|
||||
Size sz = std::tr1::get<0>(GetParam()); \
|
||||
int type = std::tr1::get<1>(GetParam()); \
|
||||
cv::Mat a = Mat(sz, type); \
|
||||
cv::Mat b = Mat(sz, type); \
|
||||
cv::Mat c = Mat(sz, type); \
|
||||
\
|
||||
declare.in(a, b, WARMUP_RNG) \
|
||||
.out(c); \
|
||||
\
|
||||
TEST_CYCLE(100) __f(a,b, c); \
|
||||
\
|
||||
SANITY_CHECK(c); \
|
||||
} |
||||
|
||||
#define PERF_TEST_P__CORE_ARITHM_SCALAR(__f, __testset) \ |
||||
PERF_TEST_P(Size_MatType, core_arithm__ ## __f ##__Scalar, __testset) \
|
||||
{ \
|
||||
Size sz = std::tr1::get<0>(GetParam()); \
|
||||
int type = std::tr1::get<1>(GetParam()); \
|
||||
cv::Mat a = Mat(sz, type); \
|
||||
cv::Scalar b; \
|
||||
cv::Mat c = Mat(sz, type); \
|
||||
\
|
||||
declare.in(a, b, WARMUP_RNG) \
|
||||
.out(c); \
|
||||
\
|
||||
TEST_CYCLE(100) __f(a,b, c); \
|
||||
\
|
||||
SANITY_CHECK(c); \
|
||||
} |
||||
|
||||
PERF_TEST_P__CORE_ARITHM(bitwise_and, TYPICAL_MATS_BITW_ARITHM) |
||||
PERF_TEST_P__CORE_ARITHM(bitwise_or, TYPICAL_MATS_BITW_ARITHM) |
||||
PERF_TEST_P__CORE_ARITHM(bitwise_xor, TYPICAL_MATS_BITW_ARITHM) |
||||
PERF_TEST_P__CORE_ARITHM(add, TYPICAL_MATS_CORE_ARITHM) |
||||
PERF_TEST_P__CORE_ARITHM(subtract, TYPICAL_MATS_CORE_ARITHM) |
||||
PERF_TEST_P__CORE_ARITHM(min, TYPICAL_MATS_CORE_ARITHM) |
||||
PERF_TEST_P__CORE_ARITHM(max, TYPICAL_MATS_CORE_ARITHM) |
||||
PERF_TEST_P__CORE_ARITHM(absdiff, TYPICAL_MATS_CORE_ARITHM) |
||||
|
||||
PERF_TEST_P__CORE_ARITHM_SCALAR(bitwise_and, TYPICAL_MATS_BITW_ARITHM) |
||||
PERF_TEST_P__CORE_ARITHM_SCALAR(bitwise_or, TYPICAL_MATS_BITW_ARITHM) |
||||
PERF_TEST_P__CORE_ARITHM_SCALAR(bitwise_xor, TYPICAL_MATS_BITW_ARITHM) |
||||
PERF_TEST_P__CORE_ARITHM_SCALAR(add, TYPICAL_MATS_CORE_ARITHM) |
||||
PERF_TEST_P__CORE_ARITHM_SCALAR(subtract, TYPICAL_MATS_CORE_ARITHM) |
||||
PERF_TEST_P__CORE_ARITHM_SCALAR(min, TYPICAL_MATS_CORE_ARITHM) |
||||
PERF_TEST_P__CORE_ARITHM_SCALAR(max, TYPICAL_MATS_CORE_ARITHM) |
||||
PERF_TEST_P__CORE_ARITHM_SCALAR(absdiff, TYPICAL_MATS_CORE_ARITHM) |
||||
|
||||
|
@ -0,0 +1,3 @@ |
||||
#include "perf_precomp.hpp" |
||||
|
||||
CV_PERF_TEST_MAIN(core) |
@ -0,0 +1 @@ |
||||
#include "perf_precomp.hpp" |
@ -0,0 +1,10 @@ |
||||
#ifndef __OPENCV_PERF_PRECOMP_HPP__ |
||||
#define __OPENCV_PERF_PRECOMP_HPP__ |
||||
|
||||
#include "opencv2/ts/ts.hpp" |
||||
|
||||
#if GTEST_CREATE_SHARED_LIBRARY |
||||
#error no modules except ts should have GTEST_CREATE_SHARED_LIBRARY defined |
||||
#endif |
||||
|
||||
#endif |
@ -0,0 +1,426 @@ |
||||
#include "perf_precomp.hpp" |
||||
#include "opencv2/core/core_c.h" |
||||
|
||||
using namespace std; |
||||
using namespace cv; |
||||
using namespace perf; |
||||
|
||||
|
||||
/*
|
||||
// Scalar sum(InputArray arr)
|
||||
*/ |
||||
PERF_TEST_P( Size_MatType, sum, TYPICAL_MATS ) |
||||
{ |
||||
Size sz = std::tr1::get<0>(GetParam()); |
||||
int type = std::tr1::get<1>(GetParam()); |
||||
|
||||
Mat arr(sz, type); |
||||
Scalar s; |
||||
|
||||
declare.in(arr, WARMUP_RNG); |
||||
|
||||
TEST_CYCLE(100) { s = sum(arr); } |
||||
|
||||
SANITY_CHECK(s); |
||||
} |
||||
|
||||
|
||||
/*
|
||||
// Scalar mean(InputArray src)
|
||||
*/ |
||||
PERF_TEST_P( Size_MatType, mean, TYPICAL_MATS ) |
||||
{ |
||||
Size sz = std::tr1::get<0>(GetParam()); |
||||
int type = std::tr1::get<1>(GetParam()); |
||||
|
||||
Mat src(sz, type); |
||||
Scalar s; |
||||
|
||||
declare.in(src, WARMUP_RNG); |
||||
|
||||
TEST_CYCLE(100) { s = mean(src); } |
||||
|
||||
SANITY_CHECK(s); |
||||
} |
||||
|
||||
|
||||
/*
|
||||
// Scalar mean(InputArray src, InputArray mask=noArray())
|
||||
*/ |
||||
PERF_TEST_P( Size_MatType, mean_mask, TYPICAL_MATS ) |
||||
{ |
||||
Size sz = std::tr1::get<0>(GetParam()); |
||||
int type = std::tr1::get<1>(GetParam()); |
||||
|
||||
Mat src(sz, type); |
||||
Mat mask = Mat::ones(src.size(), CV_8U); |
||||
Scalar s; |
||||
|
||||
declare.in(src, WARMUP_RNG).in(mask); |
||||
|
||||
TEST_CYCLE(100) { s = mean(src, mask); } |
||||
|
||||
SANITY_CHECK(s); |
||||
} |
||||
|
||||
CV_FLAGS(NormType, NORM_INF, NORM_L1, NORM_L2, NORM_TYPE_MASK, NORM_RELATIVE, NORM_MINMAX) |
||||
typedef std::tr1::tuple<Size, MatType, NormType> Size_MatType_NormType_t; |
||||
typedef perf::TestBaseWithParam<Size_MatType_NormType_t> Size_MatType_NormType; |
||||
|
||||
/*
|
||||
// double norm(InputArray src1, int normType=NORM_L2)
|
||||
*/ |
||||
PERF_TEST_P( Size_MatType_NormType, norm,
|
||||
testing::Combine( |
||||
testing::Values( TYPICAL_MAT_SIZES ),
|
||||
testing::Values( TYPICAL_MAT_TYPES ), |
||||
testing::Values( (int)NORM_INF, (int)NORM_L1, (int)NORM_L2 ) |
||||
) |
||||
) |
||||
{ |
||||
Size sz = std::tr1::get<0>(GetParam()); |
||||
int matType = std::tr1::get<1>(GetParam()); |
||||
int normType = std::tr1::get<2>(GetParam()); |
||||
|
||||
Mat src1(sz, matType); |
||||
double n; |
||||
|
||||
declare.in(src1, WARMUP_RNG); |
||||
|
||||
TEST_CYCLE(100) { n = norm(src1, normType); } |
||||
|
||||
SANITY_CHECK(n); |
||||
} |
||||
|
||||
|
||||
/*
|
||||
// double norm(InputArray src1, int normType=NORM_L2, InputArray mask=noArray())
|
||||
*/ |
||||
PERF_TEST_P( Size_MatType_NormType, norm_mask,
|
||||
testing::Combine( |
||||
testing::Values( TYPICAL_MAT_SIZES ),
|
||||
testing::Values( TYPICAL_MAT_TYPES ), |
||||
testing::Values( (int)NORM_INF, (int)NORM_L1, (int)NORM_L2 ) |
||||
) |
||||
) |
||||
{ |
||||
Size sz = std::tr1::get<0>(GetParam()); |
||||
int matType = std::tr1::get<1>(GetParam()); |
||||
int normType = std::tr1::get<2>(GetParam()); |
||||
|
||||
Mat src1(sz, matType); |
||||
Mat mask = Mat::ones(sz, CV_8U); |
||||
double n; |
||||
|
||||
declare.in(src1, WARMUP_RNG).in(mask); |
||||
|
||||
TEST_CYCLE(100) { n = norm(src1, normType, mask); } |
||||
|
||||
SANITY_CHECK(n); |
||||
} |
||||
|
||||
|
||||
/*
|
||||
// double norm(InputArray src1, InputArray src2, int normType)
|
||||
*/ |
||||
PERF_TEST_P( Size_MatType_NormType, norm2,
|
||||
testing::Combine( |
||||
testing::Values( TYPICAL_MAT_SIZES ),
|
||||
testing::Values( TYPICAL_MAT_TYPES ), |
||||
testing::Values( (int)NORM_INF, (int)NORM_L1, (int)NORM_L2, (int)(NORM_RELATIVE+NORM_INF), (int)(NORM_RELATIVE+NORM_L1), (int)(NORM_RELATIVE+NORM_L2) ) |
||||
) |
||||
) |
||||
{ |
||||
Size sz = std::tr1::get<0>(GetParam()); |
||||
int matType = std::tr1::get<1>(GetParam()); |
||||
int normType = std::tr1::get<2>(GetParam()); |
||||
|
||||
Mat src1(sz, matType); |
||||
Mat src2(sz, matType); |
||||
double n; |
||||
|
||||
declare.in(src1, src2, WARMUP_RNG); |
||||
|
||||
TEST_CYCLE(100) { n = norm(src1, src2, normType); } |
||||
|
||||
SANITY_CHECK(n); |
||||
} |
||||
|
||||
|
||||
/*
|
||||
// double norm(InputArray src1, InputArray src2, int normType, InputArray mask=noArray())
|
||||
*/ |
||||
PERF_TEST_P( Size_MatType_NormType, norm2_mask, |
||||
testing::Combine( |
||||
testing::Values( TYPICAL_MAT_SIZES ),
|
||||
testing::Values( TYPICAL_MAT_TYPES ), |
||||
testing::Values( (int)NORM_INF, (int)NORM_L1, (int)NORM_L2, (int)(NORM_RELATIVE+NORM_INF), (int)(NORM_RELATIVE+NORM_L1), (int)(NORM_RELATIVE+NORM_L2) ) |
||||
) |
||||
) |
||||
{ |
||||
Size sz = std::tr1::get<0>(GetParam()); |
||||
int matType = std::tr1::get<1>(GetParam()); |
||||
int normType = std::tr1::get<2>(GetParam()); |
||||
|
||||
Mat src1(sz, matType); |
||||
Mat src2(sz, matType); |
||||
Mat mask = Mat::ones(sz, CV_8U); |
||||
double n; |
||||
|
||||
declare.in(src1, src2, WARMUP_RNG).in(mask); |
||||
|
||||
TEST_CYCLE(100) { n = norm(src1, src2, normType, mask); } |
||||
|
||||
SANITY_CHECK(n); |
||||
} |
||||
|
||||
|
||||
/*
|
||||
// void normalize(const InputArray src, OutputArray dst, double alpha=1, double beta=0, int normType=NORM_L2)
|
||||
*/ |
||||
PERF_TEST_P( Size_MatType_NormType, normalize,
|
||||
testing::Combine( |
||||
testing::Values( TYPICAL_MAT_SIZES ),
|
||||
testing::Values( TYPICAL_MAT_TYPES ), |
||||
testing::Values( (int)NORM_INF, (int)NORM_L1, (int)NORM_L2 ) |
||||
) |
||||
) |
||||
{ |
||||
Size sz = std::tr1::get<0>(GetParam()); |
||||
int matType = std::tr1::get<1>(GetParam()); |
||||
int normType = std::tr1::get<2>(GetParam()); |
||||
|
||||
Mat src(sz, matType); |
||||
Mat dst(sz, matType); |
||||
double alpha = 100.; |
||||
if(normType==NORM_L1) alpha = (double)src.total() * src.channels(); |
||||
if(normType==NORM_L2) alpha = (double)src.total()/10; |
||||
|
||||
declare.in(src, WARMUP_RNG).out(dst); |
||||
|
||||
TEST_CYCLE(100) { normalize(src, dst, alpha, 0., normType); } |
||||
|
||||
SANITY_CHECK(dst); |
||||
} |
||||
|
||||
|
||||
/*
|
||||
// void normalize(const InputArray src, OutputArray dst, double alpha=1, double beta=0, int normType=NORM_L2, int rtype=-1, InputArray mask=noArray())
|
||||
*/ |
||||
PERF_TEST_P( Size_MatType_NormType, normalize_mask,
|
||||
testing::Combine( |
||||
testing::Values( TYPICAL_MAT_SIZES ),
|
||||
testing::Values( TYPICAL_MAT_TYPES ), |
||||
testing::Values( (int)NORM_INF, (int)NORM_L1, (int)NORM_L2 ) |
||||
) |
||||
) |
||||
{ |
||||
Size sz = std::tr1::get<0>(GetParam()); |
||||
int matType = std::tr1::get<1>(GetParam()); |
||||
int normType = std::tr1::get<2>(GetParam()); |
||||
|
||||
Mat src(sz, matType); |
||||
Mat dst(sz, matType); |
||||
Mat mask = Mat::ones(sz, CV_8U); |
||||
double alpha = 100.; |
||||
if(normType==NORM_L1) alpha = (double)src.total() * src.channels(); |
||||
if(normType==NORM_L2) alpha = (double)src.total()/10; |
||||
|
||||
declare.in(src, WARMUP_RNG).in(mask).out(dst); |
||||
|
||||
TEST_CYCLE(100) { normalize(src, dst, alpha, 0., normType, -1, mask); } |
||||
|
||||
SANITY_CHECK(dst); |
||||
} |
||||
|
||||
|
||||
/*
|
||||
// void normalize(const InputArray src, OutputArray dst, double alpha=1, double beta=0, int normType=NORM_L2, int rtype=-1)
|
||||
*/ |
||||
PERF_TEST_P( Size_MatType_NormType, normalize_32f,
|
||||
testing::Combine( |
||||
testing::Values( TYPICAL_MAT_SIZES ),
|
||||
testing::Values( TYPICAL_MAT_TYPES ), |
||||
testing::Values( (int)NORM_INF, (int)NORM_L1, (int)NORM_L2 ) |
||||
) |
||||
) |
||||
{ |
||||
Size sz = std::tr1::get<0>(GetParam()); |
||||
int matType = std::tr1::get<1>(GetParam()); |
||||
int normType = std::tr1::get<2>(GetParam()); |
||||
|
||||
Mat src(sz, matType); |
||||
Mat dst(sz, matType); |
||||
double alpha = 100.; |
||||
if(normType==NORM_L1) alpha = (double)src.total() * src.channels(); |
||||
if(normType==NORM_L2) alpha = (double)src.total()/10; |
||||
|
||||
declare.in(src, WARMUP_RNG).out(dst); |
||||
|
||||
TEST_CYCLE(100) { normalize(src, dst, alpha, 0., normType, CV_32F); } |
||||
|
||||
SANITY_CHECK(dst); |
||||
} |
||||
|
||||
|
||||
/*
|
||||
// void normalize(const InputArray src, OutputArray dst, double alpha=1, double beta=0, int normType=NORM_L2)
|
||||
*/ |
||||
PERF_TEST_P( Size_MatType, normalize_minmax, TYPICAL_MATS ) |
||||
{ |
||||
Size sz = std::tr1::get<0>(GetParam()); |
||||
int matType = std::tr1::get<1>(GetParam()); |
||||
|
||||
Mat src(sz, matType); |
||||
randu(src, 0, 256); |
||||
Mat dst(sz, matType); |
||||
|
||||
declare.in(src).out(dst); |
||||
|
||||
TEST_CYCLE(100) { normalize(src, dst, 20., 100., NORM_MINMAX); } |
||||
|
||||
SANITY_CHECK(dst); |
||||
} |
||||
|
||||
|
||||
/*
|
||||
// void meanStdDev(InputArray src, OutputArray mean, OutputArray stddev)
|
||||
*/ |
||||
PERF_TEST_P( Size_MatType, meanStdDev, TYPICAL_MATS ) |
||||
{ |
||||
Size sz = std::tr1::get<0>(GetParam()); |
||||
int matType = std::tr1::get<1>(GetParam()); |
||||
|
||||
Mat src(sz, matType); |
||||
Mat mean, dev; |
||||
|
||||
declare.in(src, WARMUP_RNG); |
||||
|
||||
TEST_CYCLE(100) { meanStdDev(src, mean, dev); } |
||||
|
||||
SANITY_CHECK(mean); |
||||
SANITY_CHECK(dev); |
||||
} |
||||
|
||||
|
||||
/*
|
||||
// void meanStdDev(InputArray src, OutputArray mean, OutputArray stddev, InputArray mask=noArray())
|
||||
*/ |
||||
PERF_TEST_P( Size_MatType, meanStdDev_mask, TYPICAL_MATS ) |
||||
{ |
||||
Size sz = std::tr1::get<0>(GetParam()); |
||||
int matType = std::tr1::get<1>(GetParam()); |
||||
|
||||
Mat src(sz, matType); |
||||
Mat mask = Mat::ones(sz, CV_8U); |
||||
Mat mean, dev; |
||||
|
||||
declare.in(src, WARMUP_RNG).in(mask); |
||||
|
||||
TEST_CYCLE(100) { meanStdDev(src, mean, dev, mask); } |
||||
|
||||
SANITY_CHECK(mean); |
||||
SANITY_CHECK(dev); |
||||
} |
||||
|
||||
|
||||
/*
|
||||
// int countNonZero(InputArray mtx)
|
||||
*/ |
||||
PERF_TEST_P( Size_MatType, countNonZero, TYPICAL_MATS_C1 ) |
||||
{ |
||||
Size sz = std::tr1::get<0>(GetParam()); |
||||
int matType = std::tr1::get<1>(GetParam()); |
||||
|
||||
Mat src(sz, matType); |
||||
int cnt = 0; |
||||
|
||||
declare.in(src, WARMUP_RNG); |
||||
|
||||
TEST_CYCLE(100) { cnt = countNonZero(src); } |
||||
|
||||
SANITY_CHECK(cnt); |
||||
} |
||||
|
||||
/*
|
||||
// void minMaxLoc(InputArray src, double* minVal, double* maxVal=0, Point* minLoc=0, Point* maxLoc=0, InputArray mask=noArray())
|
||||
*/ |
||||
PERF_TEST_P( Size_MatType, minMaxLoc, TYPICAL_MATS_C1 ) |
||||
{ |
||||
Size sz = std::tr1::get<0>(GetParam()); |
||||
int matType = std::tr1::get<1>(GetParam()); |
||||
|
||||
Mat src(sz, matType); |
||||
double minVal, maxVal; |
||||
Point minLoc, maxLoc; |
||||
|
||||
declare.in(src, WARMUP_RNG); |
||||
|
||||
TEST_CYCLE(100) { minMaxLoc(src, &minVal, &maxVal, &minLoc, &maxLoc); } |
||||
|
||||
SANITY_CHECK(minVal); |
||||
SANITY_CHECK(maxVal); |
||||
} |
||||
|
||||
|
||||
|
||||
CV_ENUM(ROp, CV_REDUCE_SUM, CV_REDUCE_AVG, CV_REDUCE_MAX, CV_REDUCE_MIN) |
||||
typedef std::tr1::tuple<Size, MatType, ROp> Size_MatType_ROp_t; |
||||
typedef perf::TestBaseWithParam<Size_MatType_ROp_t> Size_MatType_ROp; |
||||
|
||||
|
||||
/*
|
||||
// void reduce(InputArray mtx, OutputArray vec, int dim, int reduceOp, int dtype=-1)
|
||||
*/ |
||||
PERF_TEST_P( Size_MatType_ROp, reduceR,
|
||||
testing::Combine(
|
||||
testing::Values( TYPICAL_MAT_SIZES ),
|
||||
testing::Values( TYPICAL_MAT_TYPES ), |
||||
testing::Values( CV_REDUCE_SUM, CV_REDUCE_AVG, CV_REDUCE_MAX, CV_REDUCE_MIN ) |
||||
) |
||||
) |
||||
{ |
||||
Size sz = std::tr1::get<0>(GetParam()); |
||||
int matType = std::tr1::get<1>(GetParam()); |
||||
int reduceOp = std::tr1::get<2>(GetParam()); |
||||
|
||||
int ddepth = -1; |
||||
if( CV_MAT_DEPTH(matType)< CV_32S && (reduceOp == CV_REDUCE_SUM || reduceOp == CV_REDUCE_AVG) ) |
||||
ddepth = CV_32S; |
||||
Mat src(sz, matType); |
||||
Mat vec; |
||||
|
||||
declare.in(src, WARMUP_RNG); |
||||
|
||||
TEST_CYCLE(100) { reduce(src, vec, 0, reduceOp, ddepth); } |
||||
|
||||
SANITY_CHECK(vec); |
||||
} |
||||
|
||||
/*
|
||||
// void reduce(InputArray mtx, OutputArray vec, int dim, int reduceOp, int dtype=-1)
|
||||
*/ |
||||
PERF_TEST_P( Size_MatType_ROp, reduceC,
|
||||
testing::Combine(
|
||||
testing::Values( TYPICAL_MAT_SIZES ),
|
||||
testing::Values( TYPICAL_MAT_TYPES ), |
||||
testing::Values( CV_REDUCE_SUM, CV_REDUCE_AVG, CV_REDUCE_MAX, CV_REDUCE_MIN ) |
||||
) |
||||
) |
||||
{ |
||||
Size sz = std::tr1::get<0>(GetParam()); |
||||
int matType = std::tr1::get<1>(GetParam()); |
||||
int reduceOp = std::tr1::get<2>(GetParam()); |
||||
|
||||
int ddepth = -1; |
||||
if( CV_MAT_DEPTH(matType)< CV_32S && (reduceOp == CV_REDUCE_SUM || reduceOp == CV_REDUCE_AVG) ) |
||||
ddepth = CV_32S; |
||||
Mat src(sz, matType); |
||||
Mat vec; |
||||
|
||||
declare.in(src, WARMUP_RNG); |
||||
|
||||
TEST_CYCLE(100) { reduce(src, vec, 1, reduceOp, ddepth); } |
||||
|
||||
SANITY_CHECK(vec); |
||||
} |
@ -0,0 +1,111 @@ |
||||
#include "perf_precomp.hpp" |
||||
|
||||
using namespace std; |
||||
using namespace cv; |
||||
using namespace perf; |
||||
|
||||
CV_ENUM(CvtMode, CV_YUV2BGR, CV_YUV2RGB, //YUV
|
||||
CV_YUV420i2BGR, CV_YUV420i2RGB, CV_YUV420sp2BGR, CV_YUV420sp2RGB, //YUV420
|
||||
CV_RGB2GRAY, CV_RGBA2GRAY, CV_BGR2GRAY, CV_BGRA2GRAY, //Gray
|
||||
CV_GRAY2RGB, CV_GRAY2RGBA/*, CV_GRAY2BGR, CV_GRAY2BGRA*/ //Gray2
|
||||
) |
||||
|
||||
typedef std::tr1::tuple<Size, CvtMode> Size_CvtMode_t; |
||||
typedef perf::TestBaseWithParam<Size_CvtMode_t> Size_CvtMode; |
||||
|
||||
typedef std::tr1::tuple<Size, CvtMode, int> Size_CvtMode_OutChNum_t; |
||||
typedef perf::TestBaseWithParam<Size_CvtMode_OutChNum_t> Size_CvtMode_OutChNum; |
||||
|
||||
|
||||
/*
|
||||
// void cvtColor(InputArray src, OutputArray dst, int code, int dstCn=0 )
|
||||
*/ |
||||
|
||||
|
||||
PERF_TEST_P( Size_CvtMode_OutChNum, cvtColorYUV, |
||||
testing::Combine(
|
||||
testing::Values( TYPICAL_MAT_SIZES ),
|
||||
testing::Values( (int)CV_YUV2BGR, (int)CV_YUV2RGB ), |
||||
testing::Values( 3, 4 ) |
||||
) |
||||
) |
||||
{ |
||||
Size sz = std::tr1::get<0>(GetParam()); |
||||
int mode = std::tr1::get<1>(GetParam()); |
||||
int ch = std::tr1::get<2>(GetParam()); |
||||
|
||||
Mat src(sz, CV_8UC3); |
||||
Mat dst(sz, CV_8UC(ch)); |
||||
|
||||
declare.in(src, WARMUP_RNG).out(dst); |
||||
|
||||
TEST_CYCLE(100) { cvtColor(src, dst, mode, ch); } |
||||
|
||||
SANITY_CHECK(dst); |
||||
} |
||||
|
||||
|
||||
PERF_TEST_P( Size_CvtMode_OutChNum, cvtColorYUV420, |
||||
testing::Combine(
|
||||
testing::Values( szVGA, sz720p, sz1080p, Size(130, 60) ),
|
||||
testing::Values( (int)CV_YUV420i2BGR, (int)CV_YUV420i2RGB, (int)CV_YUV420sp2BGR, (int)CV_YUV420sp2RGB ), |
||||
testing::Values( 3, 4 ) |
||||
) |
||||
) |
||||
{ |
||||
Size sz = std::tr1::get<0>(GetParam()); |
||||
int mode = std::tr1::get<1>(GetParam()); |
||||
int ch = std::tr1::get<2>(GetParam()); |
||||
|
||||
Mat src(sz.height+sz.height/2, sz.width, CV_8UC1); |
||||
Mat dst(sz, CV_8UC(ch)); |
||||
|
||||
declare.in(src, WARMUP_RNG).out(dst); |
||||
|
||||
TEST_CYCLE(100) { cvtColor(src, dst, mode, ch); } |
||||
|
||||
SANITY_CHECK(dst); |
||||
} |
||||
|
||||
|
||||
PERF_TEST_P( Size_CvtMode, cvtColorGray, |
||||
testing::Combine(
|
||||
testing::Values( TYPICAL_MAT_SIZES ),
|
||||
testing::Values( (int)CV_RGB2GRAY, (int)CV_RGBA2GRAY, (int)CV_BGR2GRAY, (int)CV_BGRA2GRAY ) |
||||
) |
||||
) |
||||
{ |
||||
Size sz = std::tr1::get<0>(GetParam()); |
||||
int mode = std::tr1::get<1>(GetParam()); |
||||
|
||||
Mat src(sz, CV_8UC((mode==CV_RGBA2GRAY || mode==CV_BGRA2GRAY)?4:3)); |
||||
Mat dst(sz, CV_8UC1); |
||||
|
||||
declare.in(src, WARMUP_RNG).out(dst); |
||||
|
||||
TEST_CYCLE(100) { cvtColor(src, dst, mode); } |
||||
|
||||
SANITY_CHECK(dst); |
||||
} |
||||
|
||||
|
||||
PERF_TEST_P( Size_CvtMode, cvtColorGray2, |
||||
testing::Combine(
|
||||
testing::Values( TYPICAL_MAT_SIZES ),
|
||||
testing::Values( (int)CV_GRAY2RGB, (int)CV_GRAY2RGBA/*, CV_GRAY2BGR, CV_GRAY2BGRA*/ ) |
||||
) |
||||
) |
||||
{ |
||||
Size sz = std::tr1::get<0>(GetParam()); |
||||
int mode = std::tr1::get<1>(GetParam()); |
||||
|
||||
Mat src(sz, CV_8UC1); |
||||
Mat dst(sz, CV_8UC((mode==CV_GRAY2RGBA || mode==CV_GRAY2BGRA)?4:3)); |
||||
|
||||
declare.in(src, WARMUP_RNG).out(dst); |
||||
|
||||
TEST_CYCLE(100) { cvtColor(src, dst, mode); } |
||||
|
||||
SANITY_CHECK(dst); |
||||
} |
||||
|
@ -0,0 +1,55 @@ |
||||
#include "perf_precomp.hpp" |
||||
|
||||
using namespace std; |
||||
using namespace cv; |
||||
using namespace perf; |
||||
|
||||
typedef tr1::tuple<MatType, Size, Size> MatInfo_Size_Size_t; |
||||
typedef TestBaseWithParam<MatInfo_Size_Size_t> MatInfo_Size_Size; |
||||
|
||||
PERF_TEST_P(MatInfo_Size_Size, resizeUpLinear, |
||||
testing::Values( |
||||
MatInfo_Size_Size_t(CV_8UC1, szVGA, szqHD), |
||||
MatInfo_Size_Size_t(CV_8UC1, szVGA, sz720p), |
||||
MatInfo_Size_Size_t(CV_8UC4, szVGA, sz720p) |
||||
) |
||||
) |
||||
{ |
||||
int matType = tr1::get<0>(GetParam()); |
||||
Size from = tr1::get<1>(GetParam()); |
||||
Size to = tr1::get<2>(GetParam()); |
||||
|
||||
cv::Mat src(from, matType); |
||||
cv::Mat dst(to, matType); |
||||
|
||||
declare.in(src, WARMUP_RNG).out(dst); |
||||
|
||||
TEST_CYCLE(100) cv::resize(src, dst, to); |
||||
|
||||
SANITY_CHECK(dst); |
||||
} |
||||
|
||||
PERF_TEST_P(MatInfo_Size_Size, resizeDownLinear, |
||||
testing::Values( |
||||
MatInfo_Size_Size_t(CV_8UC1, szVGA, szQVGA), |
||||
MatInfo_Size_Size_t(CV_8UC4, szqHD, szVGA), |
||||
MatInfo_Size_Size_t(CV_8UC1, sz720p, Size(120 * sz720p.width / sz720p.height, 120)),//face detection min_face_size = 20%
|
||||
MatInfo_Size_Size_t(CV_8UC4, sz720p, szVGA), |
||||
MatInfo_Size_Size_t(CV_8UC4, sz720p, szQVGA) |
||||
) |
||||
) |
||||
{ |
||||
int matType = tr1::get<0>(GetParam()); |
||||
Size from = tr1::get<1>(GetParam()); |
||||
Size to = tr1::get<2>(GetParam()); |
||||
|
||||
cv::Mat src(from, matType); |
||||
cv::Mat dst(to, matType); |
||||
|
||||
declare.in(src, WARMUP_RNG).out(dst); |
||||
|
||||
TEST_CYCLE(100) cv::resize(src, dst, to); |
||||
|
||||
SANITY_CHECK(dst); |
||||
} |
||||
|
@ -0,0 +1,44 @@ |
||||
#include "perf_precomp.hpp" |
||||
|
||||
using namespace std; |
||||
using namespace cv; |
||||
using namespace perf; |
||||
|
||||
typedef std::tr1::tuple<Size, MatType, MatDepth> Size_MatType_OutMatDepth_t; |
||||
typedef perf::TestBaseWithParam<Size_MatType_OutMatDepth_t> Size_MatType_OutMatDepth; |
||||
|
||||
/*
|
||||
// void integral(InputArray image, OutputArray sum, int sdepth=-1 )
|
||||
*/ |
||||
PERF_TEST_P( Size_MatType_OutMatDepth, integral1,
|
||||
testing::Combine( |
||||
testing::Values( TYPICAL_MAT_SIZES ),
|
||||
testing::Values( CV_8UC1, CV_8UC4 ), |
||||
testing::Values( CV_32S, CV_32F, CV_64F ) |
||||
) |
||||
) |
||||
{ |
||||
Size sz = std::tr1::get<0>(GetParam()); |
||||
int matType = std::tr1::get<1>(GetParam()); |
||||
int sdepth = std::tr1::get<2>(GetParam()); |
||||
|
||||
Mat src(sz, matType); |
||||
Mat sum(sz, sdepth); |
||||
|
||||
declare.in(src, WARMUP_RNG); |
||||
|
||||
TEST_CYCLE(100) { integral(src, sum, sdepth); } |
||||
|
||||
SANITY_CHECK(sum); |
||||
} |
||||
|
||||
|
||||
|
||||
/*
|
||||
// void integral(InputArray image, OutputArray sum, OutputArray sqsum, int sdepth=-1 )
|
||||
*/ |
||||
|
||||
|
||||
/*
|
||||
// void integral(InputArray image, OutputArray sum, OutputArray sqsum, OutputArray tilted, int sdepth=-1 )
|
||||
*/ |
@ -0,0 +1,3 @@ |
||||
#include "perf_precomp.hpp" |
||||
|
||||
CV_PERF_TEST_MAIN(imgproc) |
@ -0,0 +1 @@ |
||||
#include "perf_precomp.hpp" |
@ -0,0 +1,11 @@ |
||||
#ifndef __OPENCV_PERF_PRECOMP_HPP__ |
||||
#define __OPENCV_PERF_PRECOMP_HPP__ |
||||
|
||||
#include "opencv2/ts/ts.hpp" |
||||
#include "opencv2/imgproc/imgproc.hpp" |
||||
|
||||
#if GTEST_CREATE_SHARED_LIBRARY |
||||
#error no modules except ts should have GTEST_CREATE_SHARED_LIBRARY defined |
||||
#endif |
||||
|
||||
#endif |
@ -1,4 +1,9 @@ |
||||
if(BUILD_SHARED_LIBS) |
||||
add_definitions(-DGTEST_CREATE_SHARED_LIBRARY=1) |
||||
add_definitions(-DGTEST_CREATE_SHARED_LIBRARY=1) |
||||
if (MSVC) |
||||
add_definitions( "/wd4251 /wd4275") |
||||
endif() |
||||
else() |
||||
add_definitions(-DGTEST_CREATE_SHARED_LIBRARY=0) |
||||
endif() |
||||
define_opencv_module(ts opencv_core) |
||||
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,444 @@ |
||||
#ifndef __OPENCV_TS_PERF_HPP__ |
||||
#define __OPENCV_TS_PERF_HPP__ |
||||
|
||||
#include "opencv2/core/core.hpp" |
||||
#include "ts_gtest.h" |
||||
|
||||
#if defined(ANDROID) && defined(USE_ANDROID_LOGGING) |
||||
#include <android/log.h> |
||||
|
||||
#define PERF_TESTS_LOG_TAG "OpenCV_perf" |
||||
#define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, PERF_TESTS_LOG_TAG, __VA_ARGS__)) |
||||
#define LOGI(...) ((void)__android_log_print(ANDROID_LOG_INFO, PERF_TESTS_LOG_TAG, __VA_ARGS__)) |
||||
#define LOGW(...) ((void)__android_log_print(ANDROID_LOG_WARN, PERF_TESTS_LOG_TAG, __VA_ARGS__)) |
||||
#define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, PERF_TESTS_LOG_TAG, __VA_ARGS__)) |
||||
#else |
||||
#define LOGD(_str, ...) do{printf(_str , ## __VA_ARGS__); printf("\n");fflush(stdout);} while(0) |
||||
#define LOGI(_str, ...) do{printf(_str , ## __VA_ARGS__); printf("\n");fflush(stdout);} while(0) |
||||
#define LOGW(_str, ...) do{printf(_str , ## __VA_ARGS__); printf("\n");fflush(stdout);} while(0) |
||||
#define LOGE(_str, ...) do{printf(_str , ## __VA_ARGS__); printf("\n");fflush(stdout);} while(0) |
||||
#endif |
||||
|
||||
|
||||
namespace perf |
||||
{ |
||||
|
||||
/*****************************************************************************************\
|
||||
* Predefined typical frame sizes and typical test parameters * |
||||
\*****************************************************************************************/ |
||||
const cv::Size szQVGA = cv::Size(320, 240); |
||||
const cv::Size szVGA = cv::Size(640, 480); |
||||
const cv::Size szSVGA = cv::Size(800, 600); |
||||
const cv::Size szXGA = cv::Size(1024, 768); |
||||
const cv::Size szSXGA = cv::Size(1280, 1024); |
||||
|
||||
const cv::Size sznHD = cv::Size(640, 360); |
||||
const cv::Size szqHD = cv::Size(960, 540); |
||||
const cv::Size sz720p = cv::Size(1280, 720); |
||||
const cv::Size sz1080p = cv::Size(1920, 1080); |
||||
|
||||
const cv::Size szODD = cv::Size(127, 61); |
||||
|
||||
#define SZ_ALL_VGA ::testing::Values(::perf::szQVGA, ::perf::szVGA, ::perf::szSVGA) |
||||
#define SZ_ALL_GA ::testing::Values(::perf::szQVGA, ::perf::szVGA, ::perf::szSVGA, ::perf::szXGA, ::perf::szSXGA) |
||||
#define SZ_ALL_HD ::testing::Values(::perf::sznHD, ::perf::szqHD, ::perf::sz720p, ::perf::sz1080p) |
||||
#define SZ_ALL ::testing::Values(::perf::szQVGA, ::perf::szVGA, ::perf::szSVGA, ::perf::szXGA, ::perf::szSXGA, ::perf::sznHD, ::perf::szqHD, ::perf::sz720p, ::perf::sz1080p) |
||||
#define SZ_TYPICAL ::testing::Values(::perf::szVGA, ::perf::szqHD, ::perf::sz720p, ::perf::szODD) |
||||
|
||||
|
||||
#define TYPICAL_MAT_SIZES ::perf::szVGA, ::perf::sz720p, ::perf::sz1080p, ::perf::szODD |
||||
#define TYPICAL_MAT_TYPES CV_8UC1, CV_8UC4, CV_32FC1 |
||||
#define TYPICAL_MATS testing::Combine( testing::Values( TYPICAL_MAT_SIZES ), testing::Values( TYPICAL_MAT_TYPES ) ) |
||||
#define TYPICAL_MATS_C1 testing::Combine( testing::Values( TYPICAL_MAT_SIZES ), testing::Values( CV_8UC1, CV_32FC1 ) ) |
||||
#define TYPICAL_MATS_C4 testing::Combine( testing::Values( TYPICAL_MAT_SIZES ), testing::Values( CV_8UC4 ) ) |
||||
|
||||
|
||||
/*****************************************************************************************\
|
||||
* MatType - printable wrapper over integer 'type' of Mat * |
||||
\*****************************************************************************************/ |
||||
class MatType |
||||
{ |
||||
public: |
||||
MatType(int val=0) : _type(val) {} |
||||
operator int() const {return _type;} |
||||
|
||||
private: |
||||
int _type; |
||||
}; |
||||
|
||||
/*****************************************************************************************\
|
||||
* CV_ENUM and CV_FLAGS - macro to create printable wrappers for defines and enums * |
||||
\*****************************************************************************************/ |
||||
|
||||
#define CV_ENUM(class_name, ...) \ |
||||
class CV_EXPORTS class_name {\
|
||||
public:\
|
||||
class_name(int val = 0) : _val(val) {}\
|
||||
operator int() const {return _val;}\
|
||||
void PrintTo(std::ostream* os) const {\
|
||||
const int vals[] = {__VA_ARGS__};\
|
||||
const char* svals = #__VA_ARGS__;\
|
||||
for(int i = 0, pos = 0; i < (int)(sizeof(vals)/sizeof(int)); ++i){\
|
||||
while(isspace(svals[pos]) || svals[pos] == ',') ++pos;\
|
||||
int start = pos;\
|
||||
while(!(isspace(svals[pos]) || svals[pos] == ',' || svals[pos] == 0)) ++pos;\
|
||||
if (_val == vals[i]) {\
|
||||
*os << std::string(svals + start, svals + pos);\
|
||||
return;\
|
||||
}\
|
||||
}\
|
||||
*os << "UNKNOWN";\
|
||||
}\
|
||||
private: int _val;\
|
||||
};\
|
||||
inline void PrintTo(const class_name& t, std::ostream* os) { t.PrintTo(os); } |
||||
|
||||
CV_ENUM(MatDepth, CV_8U, CV_8S, CV_16U, CV_16S, CV_32S, CV_32F, CV_64F, CV_USRTYPE1) |
||||
|
||||
#define CV_FLAGS(class_name, ...) \ |
||||
class CV_EXPORTS class_name {\
|
||||
public:\
|
||||
class_name(int val = 0) : _val(val) {}\
|
||||
operator int() const {return _val;}\
|
||||
void PrintTo(std::ostream* os) const {\
|
||||
const int vals[] = {__VA_ARGS__};\
|
||||
const char* svals = #__VA_ARGS__;\
|
||||
int value = _val;\
|
||||
bool first = true;\
|
||||
for(int i = 0, pos = 0; i < (int)(sizeof(vals)/sizeof(int)); ++i){\
|
||||
while(isspace(svals[pos]) || svals[pos] == ',') ++pos;\
|
||||
int start = pos;\
|
||||
while(!(isspace(svals[pos]) || svals[pos] == ',' || svals[pos] == 0)) ++pos;\
|
||||
if ((value & vals[i]) == vals[i]) {\
|
||||
value &= ~vals[i]; \
|
||||
if (first) first = false; else *os << "|"; \
|
||||
*os << std::string(svals + start, svals + pos);\
|
||||
if (!value) return;\
|
||||
}\
|
||||
}\
|
||||
if (first) *os << "UNKNOWN";\
|
||||
}\
|
||||
private: int _val;\
|
||||
};\
|
||||
inline void PrintTo(const class_name& t, std::ostream* os) { t.PrintTo(os); } |
||||
|
||||
/*****************************************************************************************\
|
||||
* Regression control utility for performance testing * |
||||
\*****************************************************************************************/ |
||||
class CV_EXPORTS Regression |
||||
{ |
||||
public: |
||||
static Regression& add(const std::string& name, cv::InputArray array, double eps = DBL_EPSILON); |
||||
static void Init(const std::string& testSuitName, const std::string& ext = ".xml"); |
||||
|
||||
Regression& operator() (const std::string& name, cv::InputArray array, double eps = DBL_EPSILON); |
||||
|
||||
private: |
||||
static Regression& instance(); |
||||
Regression(); |
||||
~Regression(); |
||||
|
||||
Regression(const Regression&); |
||||
Regression& operator=(const Regression&); |
||||
|
||||
cv::RNG regRNG;//own random numbers generator to make collection and verification work identical
|
||||
std::string storageInPath; |
||||
std::string storageOutPath; |
||||
cv::FileStorage storageIn; |
||||
cv::FileStorage storageOut; |
||||
cv::FileNode rootIn; |
||||
std::string currentTestNodeName; |
||||
cv::FileStorage& write(); |
||||
|
||||
static std::string getCurrentTestNodeName(); |
||||
static bool isVector(cv::InputArray a); |
||||
static double getElem(cv::Mat& m, int x, int y, int cn = 0); |
||||
|
||||
void init(const std::string& testSuitName, const std::string& ext); |
||||
void write(cv::InputArray array); |
||||
void write(cv::Mat m); |
||||
void verify(cv::FileNode node, cv::InputArray array, double eps); |
||||
void verify(cv::FileNode node, cv::Mat actual, double eps, std::string argname); |
||||
}; |
||||
|
||||
#define SANITY_CHECK(array, ...) ::perf::Regression::add(#array, array , ## __VA_ARGS__) |
||||
|
||||
|
||||
/*****************************************************************************************\
|
||||
* Container for performance metrics * |
||||
\*****************************************************************************************/ |
||||
typedef struct CV_EXPORTS performance_metrics |
||||
{ |
||||
size_t bytesIn; |
||||
size_t bytesOut; |
||||
unsigned int samples; |
||||
unsigned int outliers; |
||||
double gmean; |
||||
double gstddev;//stddev for log(time)
|
||||
double mean; |
||||
double stddev; |
||||
double median; |
||||
double min; |
||||
double frequency; |
||||
int terminationReason; |
||||
|
||||
enum
|
||||
{ |
||||
TERM_ITERATIONS = 0, |
||||
TERM_TIME = 1, |
||||
TERM_UNKNOWN = 2 |
||||
}; |
||||
|
||||
performance_metrics(); |
||||
} performance_metrics; |
||||
|
||||
|
||||
/*****************************************************************************************\
|
||||
* Base fixture for performance tests * |
||||
\*****************************************************************************************/ |
||||
class CV_EXPORTS TestBase: public ::testing::Test |
||||
{ |
||||
public: |
||||
TestBase(); |
||||
|
||||
static void Init(int argc, const char* const argv[]); |
||||
|
||||
protected: |
||||
virtual void PerfTestBody() = 0; |
||||
|
||||
virtual void SetUp(); |
||||
virtual void TearDown(); |
||||
|
||||
void startTimer(); |
||||
void stopTimer(); |
||||
bool next(); |
||||
|
||||
std::string getDataPath(const std::string& relativePath); |
||||
|
||||
//_declareHelper declare;
|
||||
|
||||
enum
|
||||
{ |
||||
WARMUP_READ, |
||||
WARMUP_WRITE, |
||||
WARMUP_RNG, |
||||
WARMUP_NONE |
||||
}; |
||||
static void warmup(cv::InputOutputArray a, int wtype = WARMUP_READ); |
||||
|
||||
performance_metrics& calcMetrics(); |
||||
void reportMetrics(bool toJUnitXML = false); |
||||
private: |
||||
typedef std::vector<std::pair<int, cv::Size> > SizeVector; |
||||
typedef std::vector<int64> TimeVector; |
||||
|
||||
SizeVector inputData; |
||||
SizeVector outputData; |
||||
unsigned int getTotalInputSize() const; |
||||
unsigned int getTotalOutputSize() const; |
||||
|
||||
TimeVector times; |
||||
int64 lastTime; |
||||
int64 totalTime; |
||||
int64 timeLimit; |
||||
static int64 timeLimitDefault; |
||||
|
||||
unsigned int nIters; |
||||
unsigned int currentIter; |
||||
|
||||
performance_metrics metrics; |
||||
void validateMetrics(); |
||||
|
||||
static int64 _timeadjustment; |
||||
static int64 _calibrate(); |
||||
|
||||
static void warmup(cv::Mat m, int wtype); |
||||
static int getSizeInBytes(cv::InputArray a); |
||||
static cv::Size getSize(cv::InputArray a); |
||||
static void declareArray(SizeVector& sizes, cv::InputOutputArray a, int wtype = 0); |
||||
|
||||
class CV_EXPORTS _declareHelper |
||||
{ |
||||
public: |
||||
_declareHelper& in(cv::InputOutputArray a1, int wtype = WARMUP_READ); |
||||
_declareHelper& in(cv::InputOutputArray a1, cv::InputOutputArray a2, int wtype = WARMUP_READ); |
||||
_declareHelper& in(cv::InputOutputArray a1, cv::InputOutputArray a2, cv::InputOutputArray a3, int wtype = WARMUP_READ); |
||||
_declareHelper& in(cv::InputOutputArray a1, cv::InputOutputArray a2, cv::InputOutputArray a3, cv::InputOutputArray a4, int wtype = WARMUP_READ); |
||||
|
||||
_declareHelper& out(cv::InputOutputArray a1, int wtype = WARMUP_WRITE); |
||||
_declareHelper& out(cv::InputOutputArray a1, cv::InputOutputArray a2, int wtype = WARMUP_WRITE); |
||||
_declareHelper& out(cv::InputOutputArray a1, cv::InputOutputArray a2, cv::InputOutputArray a3, int wtype = WARMUP_WRITE); |
||||
_declareHelper& out(cv::InputOutputArray a1, cv::InputOutputArray a2, cv::InputOutputArray a3, cv::InputOutputArray a4, int wtype = WARMUP_WRITE); |
||||
|
||||
_declareHelper& iterations(int n); |
||||
_declareHelper& time(double timeLimitSecs); |
||||
private: |
||||
TestBase* test; |
||||
_declareHelper(TestBase* t); |
||||
_declareHelper(const _declareHelper&); |
||||
_declareHelper& operator=(const _declareHelper&); |
||||
friend class TestBase; |
||||
}; |
||||
friend class _declareHelper; |
||||
|
||||
public: |
||||
_declareHelper declare; |
||||
}; |
||||
|
||||
template<typename T> class TestBaseWithParam: public TestBase, public ::testing::WithParamInterface<T> {}; |
||||
|
||||
typedef std::tr1::tuple<cv::Size, MatType> Size_MatType_t; |
||||
typedef TestBaseWithParam<Size_MatType_t> Size_MatType; |
||||
|
||||
/*****************************************************************************************\
|
||||
* Print functions for googletest * |
||||
\*****************************************************************************************/ |
||||
CV_EXPORTS void PrintTo(const MatType& t, std::ostream* os); |
||||
|
||||
} //namespace perf
|
||||
|
||||
namespace cv |
||||
{ |
||||
|
||||
CV_EXPORTS void PrintTo(const Size& sz, ::std::ostream* os); |
||||
|
||||
} //namespace cv
|
||||
|
||||
|
||||
/*****************************************************************************************\
|
||||
* Macro definitions for performance tests * |
||||
\*****************************************************************************************/ |
||||
#define PERF_PROXY_NAMESPACE_NAME_(test_case_name, test_name) \ |
||||
test_case_name##_##test_name##_perf_namespace_proxy |
||||
|
||||
// Defines a performance test.
|
||||
//
|
||||
// The first parameter is the name of the test case, and the second
|
||||
// parameter is the name of the test within the test case.
|
||||
//
|
||||
// The user should put his test code between braces after using this
|
||||
// macro. Example:
|
||||
//
|
||||
// PERF_TEST(FooTest, InitializesCorrectly) {
|
||||
// Foo foo;
|
||||
// EXPECT_TRUE(foo.StatusIsOK());
|
||||
// }
|
||||
#define PERF_TEST(test_case_name, test_name)\ |
||||
namespace PERF_PROXY_NAMESPACE_NAME_(test_case_name, test_name) {\
|
||||
class TestBase {/*compile error for this class means that you are trying to use perf::TestBase as a fixture*/};\
|
||||
class test_case_name : public ::perf::TestBase {\
|
||||
public:\
|
||||
test_case_name() {}\
|
||||
protected:\
|
||||
virtual void PerfTestBody();\
|
||||
};\
|
||||
TEST_F(test_case_name, test_name){\
|
||||
try {\
|
||||
PerfTestBody();\
|
||||
}catch(cv::Exception e) { FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws:\n " << e.what(); }\
|
||||
catch(...) { FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws."; }\
|
||||
}\
|
||||
}\
|
||||
void PERF_PROXY_NAMESPACE_NAME_(test_case_name, test_name)::test_case_name::PerfTestBody() |
||||
|
||||
// Defines a performance test that uses a test fixture.
|
||||
//
|
||||
// The first parameter is the name of the test fixture class, which
|
||||
// also doubles as the test case name. The second parameter is the
|
||||
// name of the test within the test case.
|
||||
//
|
||||
// A test fixture class must be declared earlier. The user should put
|
||||
// his test code between braces after using this macro. Example:
|
||||
//
|
||||
// class FooTest : public ::perf::TestBase {
|
||||
// protected:
|
||||
// virtual void SetUp() { TestBase::SetUp(); b_.AddElement(3); }
|
||||
//
|
||||
// Foo a_;
|
||||
// Foo b_;
|
||||
// };
|
||||
//
|
||||
// PERF_TEST_F(FooTest, InitializesCorrectly) {
|
||||
// EXPECT_TRUE(a_.StatusIsOK());
|
||||
// }
|
||||
//
|
||||
// PERF_TEST_F(FooTest, ReturnsElementCountCorrectly) {
|
||||
// EXPECT_EQ(0, a_.size());
|
||||
// EXPECT_EQ(1, b_.size());
|
||||
// }
|
||||
#define PERF_TEST_F(fixture, testname) \ |
||||
namespace PERF_PROXY_NAMESPACE_NAME_(fixture, testname) {\
|
||||
class TestBase {/*compile error for this class means that you are trying to use perf::TestBase as a fixture*/};\
|
||||
class fixture : public ::fixture {\
|
||||
public:\
|
||||
fixture() {}\
|
||||
protected:\
|
||||
virtual void PerfTestBody();\
|
||||
};\
|
||||
TEST_F(fixture, testname){\
|
||||
try {\
|
||||
PerfTestBody();\
|
||||
}catch(cv::Exception e) { FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws:\n " << e.what(); }\
|
||||
catch(...) { FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws."; }\
|
||||
}\
|
||||
}\
|
||||
void PERF_PROXY_NAMESPACE_NAME_(fixture, testname)::fixture::PerfTestBody() |
||||
|
||||
// Defines a parametrized performance test.
|
||||
//
|
||||
// The first parameter is the name of the test fixture class, which
|
||||
// also doubles as the test case name. The second parameter is the
|
||||
// name of the test within the test case.
|
||||
//
|
||||
// The user should put his test code between braces after using this
|
||||
// macro. Example:
|
||||
//
|
||||
// typedef ::perf::TestBaseWithParam<cv::Size> FooTest;
|
||||
//
|
||||
// PERF_TEST_P(FooTest, DoTestingRight, ::testing::Values(::perf::szVGA, ::perf::sz720p) {
|
||||
// cv::Mat b(GetParam(), CV_8U, cv::Scalar(10));
|
||||
// cv::Mat a(GetParam(), CV_8U, cv::Scalar(20));
|
||||
// cv::Mat c(GetParam(), CV_8U, cv::Scalar(0));
|
||||
//
|
||||
// declare.in(a, b).out(c).time(0.5);
|
||||
//
|
||||
// SIMPLE_TEST_CYCLE() cv::add(a, b, c);
|
||||
//
|
||||
// SANITY_CHECK(c);
|
||||
// }
|
||||
#define PERF_TEST_P(fixture, name, params) \ |
||||
class fixture##_##name : public ::fixture {\
|
||||
public:\
|
||||
fixture##_##name() {}\
|
||||
protected:\
|
||||
virtual void PerfTestBody();\
|
||||
};\
|
||||
TEST_P(fixture##_##name, name /*perf*/){\
|
||||
try {\
|
||||
PerfTestBody();\
|
||||
}catch(cv::Exception e) { FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws:\n " << e.what(); }\
|
||||
catch(...) { FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws."; }\
|
||||
}\
|
||||
INSTANTIATE_TEST_CASE_P(/*none*/, fixture##_##name, params);\
|
||||
void fixture##_##name::PerfTestBody() |
||||
|
||||
|
||||
#define CV_PERF_TEST_MAIN(testsuitname) \ |
||||
int main(int argc, char **argv)\
|
||||
{\
|
||||
::perf::Regression::Init(#testsuitname);\
|
||||
::perf::TestBase::Init(argc, argv);\
|
||||
::testing::InitGoogleTest(&argc, argv);\
|
||||
return RUN_ALL_TESTS();\
|
||||
} |
||||
|
||||
#define TEST_CYCLE(n) for(declare.iterations(n); startTimer(), next(); stopTimer()) |
||||
#define SIMPLE_TEST_CYCLE() for(; startTimer(), next(); stopTimer()) |
||||
|
||||
//flags
|
||||
namespace perf |
||||
{ |
||||
//GTEST_DECLARE_int32_(allowed_outliers);
|
||||
} //namespace perf
|
||||
|
||||
#endif //__OPENCV_TS_PERF_HPP__
|
@ -0,0 +1,180 @@ |
||||
import testlog_parser, sys, os, xml, re |
||||
from table_formatter import * |
||||
from optparse import OptionParser |
||||
|
||||
cvsize_re = re.compile("^\d+x\d+$") |
||||
cvtype_re = re.compile("^(8U|8S|16U|16S|32S|32F|64F)C\d{1,3}$") |
||||
|
||||
def keyselector(a): |
||||
if cvsize_re.match(a): |
||||
size = [int(d) for d in a.split('x')] |
||||
return size[0] * size[1] |
||||
elif cvtype_re.match(a): |
||||
depth = 7 |
||||
if a[0] == '8': |
||||
depth = (0, 1) [a[1] == 'S'] |
||||
elif a[0] == '1': |
||||
depth = (2, 3) [a[2] == 'S'] |
||||
elif a[2] == 'S': |
||||
depth = 4 |
||||
elif a[0] == '3': |
||||
depth = 5 |
||||
elif a[0] == '6': |
||||
depth = 6 |
||||
channels = int(a[a.index('C') + 1:]) |
||||
#return (depth & 7) + ((channels - 1) << 3) |
||||
return ((channels-1) & 511) + (depth << 8) |
||||
return a |
||||
|
||||
def getValueParams(test): |
||||
param = test.get("value_param") |
||||
if not param: |
||||
return [] |
||||
if param.startswith("("): |
||||
param = param[1:] |
||||
if param.endswith(")"): |
||||
param = param[:-1] |
||||
return [p.strip() for p in param.split(",")] |
||||
|
||||
def nextPermutation(indexes, lists, x, y): |
||||
idx = len(indexes)-1 |
||||
while idx >= 0: |
||||
while idx == x or idx == y: |
||||
idx -= 1 |
||||
if idx < 0: |
||||
return False |
||||
v = indexes[idx] + 1 |
||||
if v < len(lists[idx]): |
||||
indexes[idx] = v; |
||||
return True; |
||||
else: |
||||
indexes[idx] = 0; |
||||
idx -= 1 |
||||
return False |
||||
|
||||
def getTestWideName(sname, indexes, lists, x, y): |
||||
name = sname + "::(" |
||||
for i in range(len(indexes)): |
||||
if i > 0: |
||||
name += ", " |
||||
if i == x: |
||||
name += "X" |
||||
elif i == y: |
||||
name += "Y" |
||||
else: |
||||
name += lists[i][indexes[i]] |
||||
return str(name + ")") |
||||
|
||||
def getTest(stests, x, y, row, col): |
||||
for pair in stests: |
||||
if pair[1][x] == row and pair[1][y] == col: |
||||
return pair[0] |
||||
return None |
||||
|
||||
if __name__ == "__main__": |
||||
parser = OptionParser() |
||||
parser.add_option("-o", "--output", dest="format", help="output results in text format (can be 'txt', 'html' or 'auto' - default)", metavar="FMT", default="auto") |
||||
parser.add_option("-u", "--units", dest="units", help="units for output values (s, ms (default), mks, ns or ticks)", metavar="UNITS", default="ms") |
||||
parser.add_option("-m", "--metric", dest="metric", help="output metric", metavar="NAME", default="gmean") |
||||
parser.add_option("-x", "", dest="x", help="argument number for rows", metavar="ROW", default=1) |
||||
parser.add_option("-y", "", dest="y", help="argument number for columns", metavar="COL", default=0) |
||||
parser.add_option("-f", "--filter", dest="filter", help="regex to filter tests", metavar="REGEX", default=None) |
||||
(options, args) = parser.parse_args() |
||||
|
||||
if len(args) != 1: |
||||
print >> sys.stderr, "Usage:\n", os.path.basename(sys.argv[0]), "<log_name1>.xml" |
||||
exit(1) |
||||
|
||||
options.generateHtml = detectHtmlOutputType(options.format) |
||||
args[0] = os.path.basename(args[0]) |
||||
if options.metric not in metrix_table: |
||||
options.metric = "gmean" |
||||
if options.metric.endswith("%"): |
||||
options.metric = options.metric[:-1] |
||||
getter = metrix_table[options.metric][1] |
||||
|
||||
tests = testlog_parser.parseLogFile(args[0]) |
||||
if options.filter: |
||||
expr = re.compile(options.filter) |
||||
tests = [(t,getValueParams(t)) for t in tests if expr.search(str(t))] |
||||
|
||||
if not tests: |
||||
print >> sys.stderr, "Error - no tests matched" |
||||
exit(1) |
||||
|
||||
argsnum = len(tests[0][1]) |
||||
sname = tests[0][0].shortName() |
||||
|
||||
arglists = [] |
||||
for i in range(argsnum): |
||||
arglists.append({}) |
||||
|
||||
for pair in tests: |
||||
if len(pair[1]) != argsnum: |
||||
print >> sys.stderr, "Error - unable to create chart tables for functions having different argument numbers" |
||||
exit(1) |
||||
if pair[0].shortName() != sname: |
||||
print >> sys.stderr, "Error - unable to create chart tables for functions from different test suits:" |
||||
print >> sys.stderr, "First: ", sname |
||||
print >> sys.stderr, "Second:", pair[0].shortName() |
||||
exit(1) |
||||
for i in range(argsnum): |
||||
arglists[i][pair[1][i]] = 1 |
||||
|
||||
for i in range(argsnum): |
||||
arglists[i] = sorted([str(key) for key in arglists[i].iterkeys()], key=keyselector) |
||||
|
||||
if options.generateHtml: |
||||
htmlPrintHeader(sys.stdout, "Report %s for %s" % (args[0], sname)) |
||||
|
||||
indexes = [0] * argsnum |
||||
x = int(options.x) |
||||
y = int(options.y) |
||||
if x == y or x < 0 or y < 0 or x >= argsnum or y >= argsnum: |
||||
x = 1 |
||||
y = 0 |
||||
|
||||
while True: |
||||
stests = [] |
||||
for pair in tests: |
||||
t = pair[0] |
||||
v = pair[1] |
||||
for i in range(argsnum): |
||||
if i != x and i != y: |
||||
if v[i] != arglists[i][indexes[i]]: |
||||
t = None |
||||
break |
||||
if t: |
||||
stests.append(pair) |
||||
|
||||
tbl = table(metrix_table[options.metric][0] + " for\n" + getTestWideName(sname, indexes, arglists, x, y)) |
||||
tbl.newColumn("x", "X\Y") |
||||
for col in arglists[y]: |
||||
tbl.newColumn(col, col, align="center") |
||||
for row in arglists[x]: |
||||
tbl.newRow() |
||||
tbl.newCell("x", row) |
||||
for col in arglists[y]: |
||||
case = getTest(stests, x, y, row, col) |
||||
if case: |
||||
status = case.get("status") |
||||
if status != "run": |
||||
tbl.newCell(col, status, color = "red") |
||||
else: |
||||
val = getter(case, None, options.units) |
||||
if isinstance(val, float): |
||||
tbl.newCell(col, "%.2f %s" % (val, options.units), val) |
||||
else: |
||||
tbl.newCell(col, val, val) |
||||
else: |
||||
tbl.newCell(col, "-") |
||||
|
||||
if options.generateHtml: |
||||
tbl.htmlPrintTable(sys.stdout) |
||||
else: |
||||
tbl.consolePrintTable(sys.stdout) |
||||
if not nextPermutation(indexes, arglists, x, y): |
||||
break |
||||
|
||||
if options.generateHtml: |
||||
htmlPrintFooter(sys.stdout) |
@ -0,0 +1,384 @@ |
||||
import math, os, sys |
||||
|
||||
webcolors = { |
||||
"indianred": "#cd5c5c", |
||||
"lightcoral": "#f08080", |
||||
"salmon": "#fa8072", |
||||
"darksalmon": "#e9967a", |
||||
"lightsalmon": "#ffa07a", |
||||
"red": "#ff0000", |
||||
"crimson": "#dc143c", |
||||
"firebrick": "#b22222", |
||||
"darkred": "#8b0000", |
||||
"pink": "#ffc0cb", |
||||
"lightpink": "#ffb6c1", |
||||
"hotpink": "#ff69b4", |
||||
"deeppink": "#ff1493", |
||||
"mediumvioletred": "#c71585", |
||||
"palevioletred": "#db7093", |
||||
"lightsalmon": "#ffa07a", |
||||
"coral": "#ff7f50", |
||||
"tomato": "#ff6347", |
||||
"orangered": "#ff4500", |
||||
"darkorange": "#ff8c00", |
||||
"orange": "#ffa500", |
||||
"gold": "#ffd700", |
||||
"yellow": "#ffff00", |
||||
"lightyellow": "#ffffe0", |
||||
"lemonchiffon": "#fffacd", |
||||
"lightgoldenrodyellow": "#fafad2", |
||||
"papayawhip": "#ffefd5", |
||||
"moccasin": "#ffe4b5", |
||||
"peachpuff": "#ffdab9", |
||||
"palegoldenrod": "#eee8aa", |
||||
"khaki": "#f0e68c", |
||||
"darkkhaki": "#bdb76b", |
||||
"lavender": "#e6e6fa", |
||||
"thistle": "#d8bfd8", |
||||
"plum": "#dda0dd", |
||||
"violet": "#ee82ee", |
||||
"orchid": "#da70d6", |
||||
"fuchsia": "#ff00ff", |
||||
"magenta": "#ff00ff", |
||||
"mediumorchid": "#ba55d3", |
||||
"mediumpurple": "#9370db", |
||||
"blueviolet": "#8a2be2", |
||||
"darkviolet": "#9400d3", |
||||
"darkorchid": "#9932cc", |
||||
"darkmagenta": "#8b008b", |
||||
"purple": "#800080", |
||||
"indigo": "#4b0082", |
||||
"darkslateblue": "#483d8b", |
||||
"slateblue": "#6a5acd", |
||||
"mediumslateblue": "#7b68ee", |
||||
"greenyellow": "#adff2f", |
||||
"chartreuse": "#7fff00", |
||||
"lawngreen": "#7cfc00", |
||||
"lime": "#00ff00", |
||||
"limegreen": "#32cd32", |
||||
"palegreen": "#98fb98", |
||||
"lightgreen": "#90ee90", |
||||
"mediumspringgreen": "#00fa9a", |
||||
"springgreen": "#00ff7f", |
||||
"mediumseagreen": "#3cb371", |
||||
"seagreen": "#2e8b57", |
||||
"forestgreen": "#228b22", |
||||
"green": "#008000", |
||||
"darkgreen": "#006400", |
||||
"yellowgreen": "#9acd32", |
||||
"olivedrab": "#6b8e23", |
||||
"olive": "#808000", |
||||
"darkolivegreen": "#556b2f", |
||||
"mediumaquamarine": "#66cdaa", |
||||
"darkseagreen": "#8fbc8f", |
||||
"lightseagreen": "#20b2aa", |
||||
"darkcyan": "#008b8b", |
||||
"teal": "#008080", |
||||
"aqua": "#00ffff", |
||||
"cyan": "#00ffff", |
||||
"lightcyan": "#e0ffff", |
||||
"paleturquoise": "#afeeee", |
||||
"aquamarine": "#7fffd4", |
||||
"turquoise": "#40e0d0", |
||||
"mediumturquoise": "#48d1cc", |
||||
"darkturquoise": "#00ced1", |
||||
"cadetblue": "#5f9ea0", |
||||
"steelblue": "#4682b4", |
||||
"lightsteelblue": "#b0c4de", |
||||
"powderblue": "#b0e0e6", |
||||
"lightblue": "#add8e6", |
||||
"skyblue": "#87ceeb", |
||||
"lightskyblue": "#87cefa", |
||||
"deepskyblue": "#00bfff", |
||||
"dodgerblue": "#1e90ff", |
||||
"cornflowerblue": "#6495ed", |
||||
"royalblue": "#4169e1", |
||||
"blue": "#0000ff", |
||||
"mediumblue": "#0000cd", |
||||
"darkblue": "#00008b", |
||||
"navy": "#000080", |
||||
"midnightblue": "#191970", |
||||
"cornsilk": "#fff8dc", |
||||
"blanchedalmond": "#ffebcd", |
||||
"bisque": "#ffe4c4", |
||||
"navajowhite": "#ffdead", |
||||
"wheat": "#f5deb3", |
||||
"burlywood": "#deb887", |
||||
"tan": "#d2b48c", |
||||
"rosybrown": "#bc8f8f", |
||||
"sandybrown": "#f4a460", |
||||
"goldenrod": "#daa520", |
||||
"darkgoldenrod": "#b8860b", |
||||
"peru": "#cd853f", |
||||
"chocolate": "#d2691e", |
||||
"saddlebrown": "#8b4513", |
||||
"sienna": "#a0522d", |
||||
"brown": "#a52a2a", |
||||
"maroon": "#800000", |
||||
"white": "#ffffff", |
||||
"snow": "#fffafa", |
||||
"honeydew": "#f0fff0", |
||||
"mintcream": "#f5fffa", |
||||
"azure": "#f0ffff", |
||||
"aliceblue": "#f0f8ff", |
||||
"ghostwhite": "#f8f8ff", |
||||
"whitesmoke": "#f5f5f5", |
||||
"seashell": "#fff5ee", |
||||
"beige": "#f5f5dc", |
||||
"oldlace": "#fdf5e6", |
||||
"floralwhite": "#fffaf0", |
||||
"ivory": "#fffff0", |
||||
"antiquewhite": "#faebd7", |
||||
"linen": "#faf0e6", |
||||
"lavenderblush": "#fff0f5", |
||||
"mistyrose": "#ffe4e1", |
||||
"gainsboro": "#dcdcdc", |
||||
"lightgrey": "#d3d3d3", |
||||
"silver": "#c0c0c0", |
||||
"darkgray": "#a9a9a9", |
||||
"gray": "#808080", |
||||
"dimgray": "#696969", |
||||
"lightslategray": "#778899", |
||||
"slategray": "#708090", |
||||
"darkslategray": "#2f4f4f", |
||||
"black": "#000000", |
||||
} |
||||
|
||||
if os.name == "nt": |
||||
consoleColors = [ |
||||
"#000000", #{ 0, 0, 0 },//0 - black |
||||
"#000080", #{ 0, 0, 128 },//1 - navy |
||||
"#008000", #{ 0, 128, 0 },//2 - green |
||||
"#008080", #{ 0, 128, 128 },//3 - teal |
||||
"#800000", #{ 128, 0, 0 },//4 - maroon |
||||
"#800080", #{ 128, 0, 128 },//5 - purple |
||||
"#808000", #{ 128, 128, 0 },//6 - olive |
||||
"#C0C0C0", #{ 192, 192, 192 },//7 - silver |
||||
"#808080", #{ 128, 128, 128 },//8 - gray |
||||
"#0000FF", #{ 0, 0, 255 },//9 - blue |
||||
"#00FF00", #{ 0, 255, 0 },//a - lime |
||||
"#00FFFF", #{ 0, 255, 255 },//b - cyan |
||||
"#FF0000", #{ 255, 0, 0 },//c - red |
||||
"#FF00FF", #{ 255, 0, 255 },//d - magenta |
||||
"#FFFF00", #{ 255, 255, 0 },//e - yellow |
||||
"#FFFFFF", #{ 255, 255, 255 } //f - white |
||||
] |
||||
else: |
||||
consoleColors = [ |
||||
"#2e3436", |
||||
"#cc0000", |
||||
"#4e9a06", |
||||
"#c4a000", |
||||
"#3465a4", |
||||
"#75507b", |
||||
"#06989a", |
||||
"#d3d7cf", |
||||
"#ffffff", |
||||
|
||||
"#555753", |
||||
"#ef2929", |
||||
"#8ae234", |
||||
"#fce94f", |
||||
"#729fcf", |
||||
"#ad7fa8", |
||||
"#34e2e2", |
||||
"#eeeeec", |
||||
] |
||||
|
||||
def RGB2LAB(r,g,b): |
||||
if max(r,g,b): |
||||
r /= 255. |
||||
g /= 255. |
||||
b /= 255. |
||||
|
||||
X = (0.412453 * r + 0.357580 * g + 0.180423 * b) / 0.950456 |
||||
Y = (0.212671 * r + 0.715160 * g + 0.072169 * b) |
||||
Z = (0.019334 * r + 0.119193 * g + 0.950227 * b) / 1.088754 |
||||
|
||||
#[X * 0.950456] [0.412453 0.357580 0.180423] [R] |
||||
#[Y ] = [0.212671 0.715160 0.072169] * [G] |
||||
#[Z * 1.088754] [0.019334 0.119193 0.950227] [B] |
||||
|
||||
T = 0.008856 #threshold |
||||
|
||||
if X > T: |
||||
fX = math.pow(X, 1./3.) |
||||
else: |
||||
fX = 7.787 * X + 16./116. |
||||
|
||||
# Compute L |
||||
if Y > T: |
||||
Y3 = math.pow(Y, 1./3.) |
||||
fY = Y3 |
||||
L = 116. * Y3 - 16.0 |
||||
else: |
||||
fY = 7.787 * Y + 16./116. |
||||
L = 903.3 * Y |
||||
|
||||
if Z > T: |
||||
fZ = math.pow(Z, 1./3.) |
||||
else: |
||||
fZ = 7.787 * Z + 16./116. |
||||
|
||||
# Compute a and b |
||||
a = 500. * (fX - fY) |
||||
b = 200. * (fY - fZ) |
||||
|
||||
return (L,a,b) |
||||
|
||||
def colorDistance(r1,g1,b1 = None, r2 = None, g2 = None,b2 = None): |
||||
if type(r1) == tuple and type(g1) == tuple and b1 is None and r2 is None and g2 is None and b2 is None: |
||||
(l1,a1,b1) = RGB2LAB(*r1) |
||||
(l2,a2,b2) = RGB2LAB(*g1) |
||||
else: |
||||
(l1,a1,b1) = RGB2LAB(r1,g1,b1) |
||||
(l2,a2,b2) = RGB2LAB(r2,g2,b2) |
||||
#CIE94 |
||||
dl = l1-l2 |
||||
C1 = math.sqrt(a1*a1 + b1*b1) |
||||
C2 = math.sqrt(a2*a2 + b2*b2) |
||||
dC = C1 - C2 |
||||
da = a1-a2 |
||||
db = b1-b2 |
||||
dH = math.sqrt(max(0, da*da + db*db - dC*dC)) |
||||
Kl = 1 |
||||
K1 = 0.045 |
||||
K2 = 0.015 |
||||
|
||||
s1 = dl/Kl |
||||
s2 = dC/(1. + K1 * C1) |
||||
s3 = dH/(1. + K2 * C1) |
||||
return math.sqrt(s1*s1 + s2*s2 + s3*s3) |
||||
|
||||
def parseHexColor(col): |
||||
if len(col) != 4 and len(col) != 7 and not col.startswith("#"): |
||||
return (0,0,0) |
||||
if len(col) == 4: |
||||
r = col[1]*2 |
||||
g = col[2]*2 |
||||
b = col[3]*2 |
||||
else: |
||||
r = col[1:3] |
||||
g = col[3:5] |
||||
b = col[5:7] |
||||
return (int(r,16), int(g,16), int(b,16)) |
||||
|
||||
def getColor(col): |
||||
if isinstance(col, str): |
||||
if col.lower() in webcolors: |
||||
return parseHexColor(webcolors[col.lower()]) |
||||
else: |
||||
return parseHexColor(col) |
||||
else: |
||||
return col |
||||
|
||||
def getNearestConsoleColor(col): |
||||
color = getColor(col) |
||||
minidx = 0 |
||||
mindist = colorDistance(color, getColor(consoleColors[0])) |
||||
for i in range(len(consoleColors)): |
||||
dist = colorDistance(color, getColor(consoleColors[i])) |
||||
if dist < mindist: |
||||
mindist = dist |
||||
minidx = i |
||||
return minidx |
||||
|
||||
if os.name == 'nt': |
||||
import msvcrt |
||||
from ctypes import windll, Structure, c_short, c_ushort, byref |
||||
SHORT = c_short |
||||
WORD = c_ushort |
||||
|
||||
class COORD(Structure): |
||||
_fields_ = [ |
||||
("X", SHORT), |
||||
("Y", SHORT)] |
||||
|
||||
class SMALL_RECT(Structure): |
||||
_fields_ = [ |
||||
("Left", SHORT), |
||||
("Top", SHORT), |
||||
("Right", SHORT), |
||||
("Bottom", SHORT)] |
||||
|
||||
class CONSOLE_SCREEN_BUFFER_INFO(Structure): |
||||
_fields_ = [ |
||||
("dwSize", COORD), |
||||
("dwCursorPosition", COORD), |
||||
("wAttributes", WORD), |
||||
("srWindow", SMALL_RECT), |
||||
("dwMaximumWindowSize", COORD)] |
||||
|
||||
class winConsoleColorizer(object): |
||||
def __init__(self, stream): |
||||
self.handle = msvcrt.get_osfhandle(stream.fileno()) |
||||
self.default_attrs = 7#self.get_text_attr() |
||||
self.stream = stream |
||||
|
||||
def get_text_attr(self): |
||||
csbi = CONSOLE_SCREEN_BUFFER_INFO() |
||||
windll.kernel32.GetConsoleScreenBufferInfo(self.handle, byref(csbi)) |
||||
return csbi.wAttributes |
||||
|
||||
def set_text_attr(self, color): |
||||
windll.kernel32.SetConsoleTextAttribute(self.handle, color) |
||||
|
||||
def write(self, *text, **attrs): |
||||
if not text: |
||||
return |
||||
color = attrs.get("color", None) |
||||
if color: |
||||
col = getNearestConsoleColor(color) |
||||
self.stream.flush() |
||||
self.set_text_attr(col) |
||||
self.stream.write(" ".join([str(t) for t in text])) |
||||
if color: |
||||
self.stream.flush() |
||||
self.set_text_attr(self.default_attrs) |
||||
|
||||
class dummyColorizer(object): |
||||
def __init__(self, stream): |
||||
self.stream = stream |
||||
|
||||
def write(self, *text, **attrs): |
||||
if text: |
||||
self.stream.write(" ".join([str(t) for t in text])) |
||||
|
||||
class asciiSeqColorizer(object): |
||||
RESET_SEQ = "\033[0m" |
||||
#BOLD_SEQ = "\033[1m" |
||||
ITALIC_SEQ = "\033[3m" |
||||
UNDERLINE_SEQ = "\033[4m" |
||||
STRIKEOUT_SEQ = "\033[9m" |
||||
COLOR_SEQ0 = "\033[00;%dm" #dark |
||||
COLOR_SEQ1 = "\033[01;%dm" #bold and light |
||||
|
||||
def __init__(self, stream): |
||||
self.stream = stream |
||||
|
||||
def get_seq(self, code): |
||||
if code > 8: |
||||
return self.__class__.COLOR_SEQ1 % (30 + code - 9) |
||||
else: |
||||
return self.__class__.COLOR_SEQ0 % (30 + code) |
||||
|
||||
def write(self, *text, **attrs): |
||||
if not text: |
||||
return |
||||
color = attrs.get("color", None) |
||||
if color: |
||||
col = getNearestConsoleColor(color) |
||||
self.stream.write(self.get_seq(col)) |
||||
self.stream.write(" ".join([str(t) for t in text])) |
||||
if color: |
||||
self.stream.write(self.__class__.RESET_SEQ) |
||||
|
||||
|
||||
def getColorizer(stream): |
||||
if stream.isatty(): |
||||
if os.name == "nt": |
||||
return winConsoleColorizer(stream) |
||||
else: |
||||
return asciiSeqColorizer(stream) |
||||
else: |
||||
return dummyColorizer(stream) |
@ -0,0 +1,80 @@ |
||||
import testlog_parser, sys, os, xml, re |
||||
from table_formatter import * |
||||
from optparse import OptionParser |
||||
|
||||
if __name__ == "__main__": |
||||
parser = OptionParser() |
||||
parser.add_option("-o", "--output", dest="format", help="output results in text format (can be 'txt', 'html' or 'auto' - default)", metavar="FMT", default="auto") |
||||
parser.add_option("-u", "--units", dest="units", help="units for output values (s, ms (default), mks, ns or ticks)", metavar="UNITS", default="ms") |
||||
parser.add_option("-c", "--columns", dest="columns", help="comma-separated list of columns to show", metavar="COLS", default="") |
||||
parser.add_option("-f", "--filter", dest="filter", help="regex to filter tests", metavar="REGEX", default=None) |
||||
parser.add_option("", "--show-all", action="store_true", dest="showall", default=False, help="also include empty and \"notrun\" lines") |
||||
(options, args) = parser.parse_args() |
||||
|
||||
if len(args) < 1: |
||||
print >> sys.stderr, "Usage:\n", os.path.basename(sys.argv[0]), "<log_name1>.xml" |
||||
exit(0) |
||||
|
||||
options.generateHtml = detectHtmlOutputType(options.format) |
||||
args[0] = os.path.basename(args[0]) |
||||
|
||||
tests = [] |
||||
files = [] |
||||
for arg in set(args): |
||||
files.append(os.path.basename(arg)) |
||||
tests.extend(testlog_parser.parseLogFile(arg)) |
||||
|
||||
if options.filter: |
||||
expr = re.compile(options.filter) |
||||
tests = [t for t in tests if expr.search(str(t))] |
||||
|
||||
tbl = table(", ".join(files)) |
||||
if options.columns: |
||||
metrics = [s.strip() for s in options.columns.split(",")] |
||||
metrics = [m for m in metrics if m and not m.endswith("%") and m in metrix_table] |
||||
else: |
||||
metrics = None |
||||
if not metrics: |
||||
metrics = ["name", "samples", "outliers", "min", "median", "gmean", "mean", "stddev"] |
||||
if "name" not in metrics: |
||||
metrics.insert(0, "name") |
||||
|
||||
for m in metrics: |
||||
if m == "name": |
||||
tbl.newColumn(m, metrix_table[m][0]) |
||||
else: |
||||
tbl.newColumn(m, metrix_table[m][0], align = "center") |
||||
|
||||
needNewRow = True |
||||
for case in sorted(tests): |
||||
if needNewRow: |
||||
tbl.newRow() |
||||
if not options.showall: |
||||
needNewRow = False |
||||
status = case.get("status") |
||||
if status != "run": |
||||
if status != "notrun": |
||||
needNewRow = True |
||||
for m in metrics: |
||||
if m == "name": |
||||
tbl.newCell(m, str(case)) |
||||
else: |
||||
tbl.newCell(m, status, color = "red") |
||||
else: |
||||
needNewRow = True |
||||
for m in metrics: |
||||
val = metrix_table[m][1](case, None, options.units) |
||||
if isinstance(val, float): |
||||
tbl.newCell(m, "%.2f %s" % (val, options.units), val) |
||||
else: |
||||
tbl.newCell(m, val, val) |
||||
if not needNewRow: |
||||
tbl.trimLastRow() |
||||
|
||||
# output table |
||||
if options.generateHtml: |
||||
htmlPrintHeader(sys.stdout, "Report %s tests from %s" % (len(tests), ", ".join(files))) |
||||
tbl.htmlPrintTable(sys.stdout) |
||||
htmlPrintFooter(sys.stdout) |
||||
else: |
||||
tbl.consolePrintTable(sys.stdout) |
@ -0,0 +1,421 @@ |
||||
import sys, os, platform, xml, re, tempfile, glob, datetime, getpass |
||||
from optparse import OptionParser |
||||
from subprocess import Popen, PIPE |
||||
|
||||
hostos = os.name # 'nt', 'posix' |
||||
hostmachine = platform.machine() # 'x86', 'AMD64', 'x86_64' |
||||
nameprefix = "opencv_perf_" |
||||
|
||||
parse_patterns = ( |
||||
{'name': "has_perf_tests", 'default': "OFF", 'pattern': re.compile("^BUILD_PERF_TESTS:BOOL=(ON)$")}, |
||||
{'name': "cmake_home", 'default': None, 'pattern': re.compile("^CMAKE_HOME_DIRECTORY:INTERNAL=(.+)$")}, |
||||
{'name': "opencv_home", 'default': None, 'pattern': re.compile("^OpenCV_SOURCE_DIR:STATIC=(.+)$")}, |
||||
{'name': "tests_dir", 'default': None, 'pattern': re.compile("^EXECUTABLE_OUTPUT_PATH:PATH=(.+)$")}, |
||||
{'name': "build_type", 'default': "Release", 'pattern': re.compile("^CMAKE_BUILD_TYPE:STRING=(.*)$")}, |
||||
{'name': "svnversion_path", 'default': None, 'pattern': re.compile("^SVNVERSION_PATH:FILEPATH=(.*)$")}, |
||||
{'name': "cxx_flags", 'default': None, 'pattern': re.compile("^CMAKE_CXX_FLAGS:STRING=(.*)$")}, |
||||
{'name': "cxx_flags_debug", 'default': None, 'pattern': re.compile("^CMAKE_CXX_FLAGS_DEBUG:STRING=(.*)$")}, |
||||
{'name': "cxx_flags_release", 'default': None, 'pattern': re.compile("^CMAKE_CXX_FLAGS_RELEASE:STRING=(.*)$")}, |
||||
{'name': "ndk_path", 'default': None, 'pattern': re.compile("^ANDROID_NDK(?:_TOOLCHAIN_ROOT)?:PATH=(.*)$")}, |
||||
{'name': "arm_target", 'default': None, 'pattern': re.compile("^ARM_TARGET:INTERNAL=(.*)$")}, |
||||
{'name': "android_executable", 'default': None, 'pattern': re.compile("^ANDROID_EXECUTABLE:FILEPATH=(.*android.*)$")}, |
||||
{'name': "is_x64", 'default': "OFF", 'pattern': re.compile("^CUDA_64_BIT_DEVICE_CODE:BOOL=(ON)$")},#ugly( |
||||
{'name': "cmake_generator", 'default': None, 'pattern': re.compile("^CMAKE_GENERATOR:INTERNAL=(.+)$")}, |
||||
{'name': "cxx_compiler", 'default': None, 'pattern': re.compile("^CMAKE_CXX_COMPILER:FILEPATH=(.+)$")}, |
||||
) |
||||
|
||||
def query_yes_no(stdout, question, default="yes"): |
||||
valid = {"yes":True, "y":True, "ye":True, "no":False, "n":False} |
||||
if default == None: |
||||
prompt = " [y/n] " |
||||
elif default == "yes": |
||||
prompt = " [Y/n] " |
||||
elif default == "no": |
||||
prompt = " [y/N] " |
||||
else: |
||||
raise ValueError("invalid default answer: '%s'" % default) |
||||
|
||||
while True: |
||||
stdout.write(os.linesep + question + prompt) |
||||
choice = raw_input().lower() |
||||
if default is not None and choice == '': |
||||
return valid[default] |
||||
elif choice in valid: |
||||
return valid[choice] |
||||
else: |
||||
stdout.write("Please respond with 'yes' or 'no' "\ |
||||
"(or 'y' or 'n').\n") |
||||
|
||||
class RunInfo(object): |
||||
def __init__(self, path): |
||||
self.path = path |
||||
self.error = None |
||||
for p in parse_patterns: |
||||
setattr(self, p["name"], p["default"]) |
||||
cachefile = open(os.path.join(path, "CMakeCache.txt"), "rt") |
||||
try: |
||||
for l in cachefile.readlines(): |
||||
ll = l.strip() |
||||
if not ll or ll.startswith("#"): |
||||
continue |
||||
for p in parse_patterns: |
||||
match = p["pattern"].match(ll) |
||||
if match: |
||||
value = match.groups()[0] |
||||
if value and not value.endswith("-NOTFOUND"): |
||||
setattr(self, p["name"], value) |
||||
except: |
||||
pass |
||||
cachefile.close() |
||||
# fix empty tests dir |
||||
if not self.tests_dir: |
||||
self.tests_dir = self.path |
||||
# add path to adb |
||||
if self.android_executable: |
||||
self.adb = os.path.join(os.path.dirname(os.path.dirname(self.android_executable)), ("platform-tools/adb","platform-tools/adb.exe")[hostos == 'nt']) |
||||
else: |
||||
self.adb = None |
||||
|
||||
# detect target platform |
||||
if self.android_executable or self.arm_target or self.ndk_path: |
||||
self.targetos = "android" |
||||
if not self.adb: |
||||
try: |
||||
output = Popen(["adb", "devices"], stdout=PIPE, stderr=PIPE).communicate() |
||||
self.adb = "adb" |
||||
except OSError: |
||||
pass |
||||
else: |
||||
self.targetos = hostos |
||||
|
||||
# fix has_perf_tests param |
||||
self.has_perf_tests = self.has_perf_tests == "ON" |
||||
# fix is_x64 flag |
||||
self.is_x64 = self.is_x64 == "ON" |
||||
if not self.is_x64 and ("X64" in "%s %s %s" % (self.cxx_flags, self.cxx_flags_release, self.cxx_flags_debug) or "Win64" in self.cmake_generator): |
||||
self.is_x64 = True |
||||
|
||||
# fix test path |
||||
if "Visual Studio" in self.cmake_generator: |
||||
self.tests_dir = os.path.join(self.tests_dir, self.build_type) |
||||
elif not self.is_x64 and self.cxx_compiler: |
||||
#one more attempt to detect x64 compiler |
||||
try: |
||||
output = Popen([self.cxx_compiler, "-v"], stdout=PIPE, stderr=PIPE).communicate() |
||||
if not output[0] and "x86_64" in output[1]: |
||||
self.is_x64 = True |
||||
except OSError: |
||||
pass |
||||
|
||||
# detect target arch |
||||
if self.targetos == "android": |
||||
self.targetarch = "arm" |
||||
elif self.is_x64 and hostmachine in ["AMD64", "x86_64"]: |
||||
self.targetarch = "x64" |
||||
elif hostmachine in ["x86", "AMD64", "x86_64"]: |
||||
self.targetarch = "x86" |
||||
else: |
||||
self.targetarch = "unknown" |
||||
|
||||
self.hardware = None |
||||
|
||||
self.getSvnVersion(self.cmake_home, "cmake_home_svn") |
||||
if self.opencv_home == self.cmake_home: |
||||
self.opencv_home_svn = self.cmake_home_svn |
||||
else: |
||||
self.getSvnVersion(self.opencv_home, "opencv_home_svn") |
||||
|
||||
self.tests = self.getAvailableTestApps() |
||||
|
||||
def getSvnVersion(self, path, name): |
||||
if not path: |
||||
setattr(self, name, None) |
||||
return |
||||
if not self.svnversion_path and hostos == 'nt': |
||||
self.tryGetSvnVersionWithTortoise(path, name) |
||||
else: |
||||
svnversion = self.svnversion_path |
||||
if not svnversion: |
||||
svnversion = "svnversion" |
||||
try: |
||||
output = Popen([svnversion, "-n", path], stdout=PIPE, stderr=PIPE).communicate() |
||||
if not output[1]: |
||||
setattr(self, name, output[0]) |
||||
else: |
||||
setattr(self, name, None) |
||||
except OSError: |
||||
setattr(self, name, None) |
||||
|
||||
def tryGetSvnVersionWithTortoise(self, path, name): |
||||
try: |
||||
wcrev = "SubWCRev.exe" |
||||
dir = tempfile.mkdtemp() |
||||
#print dir |
||||
tmpfilename = os.path.join(dir, "svn.tmp") |
||||
tmpfilename2 = os.path.join(dir, "svn_out.tmp") |
||||
tmpfile = open(tmpfilename, "w") |
||||
tmpfile.write("$WCRANGE$$WCMODS?M:$") |
||||
tmpfile.close(); |
||||
output = Popen([wcrev, path, tmpfilename, tmpfilename2, "-f"], stdout=PIPE, stderr=PIPE).communicate() |
||||
if "is not a working copy" in output[0]: |
||||
version = "exported" |
||||
else: |
||||
tmpfile = open(tmpfilename2, "r") |
||||
version = tmpfile.read() |
||||
tmpfile.close() |
||||
setattr(self, name, version) |
||||
except: |
||||
setattr(self, name, None) |
||||
finally: |
||||
if dir: |
||||
import shutil |
||||
shutil.rmtree(dir) |
||||
|
||||
def isTest(self, fullpath): |
||||
if not os.path.isfile(fullpath): |
||||
return False |
||||
if hostos == self.targetos: |
||||
return os.access(fullpath, os.X_OK) |
||||
return True |
||||
|
||||
def getAvailableTestApps(self): |
||||
if self.tests_dir and os.path.isdir(self.tests_dir): |
||||
files = glob.glob(os.path.join(self.tests_dir, nameprefix + "*")) |
||||
if self.targetos == hostos: |
||||
files = [f for f in files if self.isTest(f)] |
||||
return files |
||||
return [] |
||||
|
||||
def getLogName(self, app, timestamp): |
||||
app = os.path.basename(app) |
||||
if app.endswith(".exe"): |
||||
app = app[:-4] |
||||
if app.startswith(nameprefix): |
||||
app = app[len(nameprefix):] |
||||
if self.cmake_home_svn: |
||||
if self.cmake_home_svn == self.opencv_home_svn: |
||||
rev = self.cmake_home_svn |
||||
elif self.opencv_home_svn: |
||||
rev = self.cmake_home_svn + "-" + self.opencv_home_svn |
||||
else: |
||||
rev = self.cmake_home_svn |
||||
else: |
||||
rev = None |
||||
if rev: |
||||
rev = rev.replace(":","to") + "_" |
||||
else: |
||||
rev = "" |
||||
if self.hardware: |
||||
hw = str(self.hardware).replace(" ", "_") + "_" |
||||
else: |
||||
hw = "" |
||||
return "%s_%s_%s_%s%s%s.xml" %(app, self.targetos, self.targetarch, hw, rev, timestamp.strftime("%Y%m%dT%H%M%S")) |
||||
|
||||
def getTest(self, name): |
||||
# full path |
||||
if self.isTest(name): |
||||
return name |
||||
|
||||
# name only |
||||
fullname = os.path.join(self.tests_dir, name) |
||||
if self.isTest(fullname): |
||||
return fullname |
||||
|
||||
# name without extension |
||||
fullname += ".exe" |
||||
if self.isTest(fullname): |
||||
return fullname |
||||
|
||||
# short name for OpenCV tests |
||||
for t in self.tests: |
||||
if t == name: |
||||
return t |
||||
fname = os.path.basename(t) |
||||
if fname == name: |
||||
return t |
||||
if fname.endswith(".exe"): |
||||
fname = fname[:-4] |
||||
if fname == name: |
||||
return t |
||||
if fname.startswith(nameprefix): |
||||
fname = fname[len(nameprefix):] |
||||
if fname == name: |
||||
return t |
||||
return None |
||||
|
||||
def runAdb(self, *args): |
||||
cmd = [self.adb] |
||||
cmd.extend(args) |
||||
try: |
||||
output = Popen(cmd, stdout=PIPE, stderr=PIPE).communicate() |
||||
if not output[1]: |
||||
return output[0] |
||||
self.error = output[1] |
||||
print self.error |
||||
except OSError: |
||||
pass |
||||
return None |
||||
|
||||
def isRunnable(self): |
||||
#if not self.has_perf_tests or not self.tests: |
||||
#self.error = "Performance tests are not built (at %s)" % self.path |
||||
#return False |
||||
if self.targetarch == "x64" and hostmachine == "x86": |
||||
self.error = "Target architecture is incompatible with current platform (at %s)" % self.path |
||||
return False |
||||
if self.targetos == "android": |
||||
if not self.adb or not os.path.isfile(self.adb) or not os.access(self.adb, os.X_OK): |
||||
self.error = "Could not find adb executable (at %s)" % self.path |
||||
return False |
||||
adb_res = self.runAdb("devices") |
||||
if not adb_res: |
||||
self.error = "Could not run adb command: %s (at %s)" % (self.error, self.path) |
||||
return False |
||||
connected_devices = len(re.findall(r"^[^ \t]+[ \t]+device$", adb_res, re.MULTILINE)) |
||||
if connected_devices == 0: |
||||
self.error = "No Android device connected (at %s)" % self.path |
||||
return False |
||||
if connected_devices > 1: |
||||
self.error = "Too many (%s) devices are connected. Single device is required. (at %s)" % (connected_devices, self.path) |
||||
return False |
||||
if "armeabi-v7a" in self.arm_target: |
||||
adb_res = self.runAdb("shell", "cat /proc/cpuinfo") |
||||
if not adb_res: |
||||
self.error = "Could not get info about Android platform: %s (at %s)" % (self.error, self.path) |
||||
return False |
||||
if "ARMv7" not in adb_res: |
||||
self.error = "Android device does not support ARMv7 commands, but tests are built for armeabi-v7a (at %s)" % self.path |
||||
return False |
||||
if "NEON" in self.arm_target and "neon" not in adb_res: |
||||
self.error = "Android device has no NEON, but tests are built for %s (at %s)" % (self.arm_target, self.path) |
||||
return False |
||||
hw = re.search(r"^Hardware[ \t]*:[ \t]*(.*?)$", adb_res, re.MULTILINE) |
||||
if hw: |
||||
self.hardware = hw.groups()[0].strip() |
||||
return True |
||||
|
||||
def runTest(self, path, workingDir, _stdout, _stderr, args = []): |
||||
if self.error: |
||||
return |
||||
args = args[:] |
||||
timestamp = datetime.datetime.now() |
||||
logfile = self.getLogName(path, timestamp) |
||||
exe = os.path.abspath(path) |
||||
|
||||
userlog = [a for a in args if a.startswith("--gtest_output=")] |
||||
if len(userlog) == 0: |
||||
args.append("--gtest_output=xml:" + logfile) |
||||
else: |
||||
logfile = userlog[userlog[0].find(":")+1:] |
||||
|
||||
if self.targetos == "android": |
||||
try: |
||||
andoidcwd = "/data/bin/" + getpass.getuser().replace(" ","") + "_perf/" |
||||
exename = os.path.basename(exe) |
||||
androidexe = andoidcwd + exename |
||||
#upload |
||||
print >> _stderr, "Uploading", exename, "to device..." |
||||
output = Popen([self.adb, "push", exe, androidexe], stdout=_stdout, stderr=_stderr).wait() |
||||
if output != 0: |
||||
print >> _stderr, "adb finishes unexpectedly with error code", output |
||||
return |
||||
#chmod |
||||
print >> _stderr, "Changing mode of ", androidexe |
||||
output = Popen([self.adb, "shell", "chmod 777 " + androidexe], stdout=_stdout, stderr=_stderr).wait() |
||||
if output != 0: |
||||
print >> _stderr, "adb finishes unexpectedly with error code", output |
||||
return |
||||
#run |
||||
command = exename + " " + " ".join(args) |
||||
print >> _stderr, "Running:", command |
||||
Popen([self.adb, "shell", "export OPENCV_TEST_DATA_PATH=" + self.test_data_path + "&& cd " + andoidcwd + "&& ./" + command], stdout=_stdout, stderr=_stderr).wait() |
||||
# try get log |
||||
print >> _stderr, "Pulling", logfile, "from device..." |
||||
hostlogpath = os.path.join(workingDir, logfile) |
||||
output = Popen([self.adb, "pull", andoidcwd + logfile, hostlogpath], stdout=_stdout, stderr=_stderr).wait() |
||||
if output != 0: |
||||
print >> _stderr, "adb finishes unexpectedly with error code", output |
||||
return |
||||
#rm log |
||||
Popen([self.adb, "shell", "rm " + andoidcwd + logfile], stdout=_stdout, stderr=_stderr).wait() |
||||
except OSError: |
||||
pass |
||||
if os.path.isfile(hostlogpath): |
||||
return hostlogpath |
||||
return None |
||||
else: |
||||
cmd = [exe] |
||||
cmd.extend(args) |
||||
print >> _stderr, "Running:", " ".join(cmd) |
||||
try: |
||||
Popen(cmd, stdout=_stdout, stderr=_stderr, cwd = workingDir).wait() |
||||
except OSError: |
||||
pass |
||||
|
||||
logpath = os.path.join(workingDir, logfile) |
||||
if os.path.isfile(logpath): |
||||
return logpath |
||||
return None |
||||
|
||||
def runTests(self, tests, _stdout, _stderr, workingDir, args = []): |
||||
if self.error: |
||||
return [] |
||||
if not tests: |
||||
tests = self.tests |
||||
logs = [] |
||||
for test in tests: |
||||
t = self.getTest(test) |
||||
if t: |
||||
logfile = self.runTest(t, workingDir, _stdout, _stderr, args) |
||||
if logfile: |
||||
logs.append(os.path.relpath(logfile, ".")) |
||||
else: |
||||
print >> _stderr, "Error: Test \"%s\" is not found in %s" % (test, self.tests_dir) |
||||
return logs |
||||
|
||||
if __name__ == "__main__": |
||||
test_args = [a for a in sys.argv if a.startswith("--perf_") or a.startswith("--gtest_")] |
||||
argv = [a for a in sys.argv if not(a.startswith("--perf_") or a.startswith("--gtest_"))] |
||||
|
||||
parser = OptionParser() |
||||
parser.add_option("-t", "--tests", dest="tests", help="comma-separated list of modules to test", metavar="SUITS", default="") |
||||
parser.add_option("-w", "--cwd", dest="cwd", help="working directory for tests", metavar="PATH", default=".") |
||||
parser.add_option("", "--android_test_data_path", dest="test_data_path", help="OPENCV_TEST_DATA_PATH for Android run", metavar="PATH", default="/sdcard/opencv_testdata/") |
||||
|
||||
(options, args) = parser.parse_args(argv) |
||||
|
||||
run_args = [] |
||||
|
||||
for path in args: |
||||
path = os.path.abspath(path) |
||||
while (True): |
||||
if os.path.isdir(path) and os.path.isfile(os.path.join(path, "CMakeCache.txt")): |
||||
run_args.append(path) |
||||
break |
||||
npath = os.path.dirname(path) |
||||
if npath == path: |
||||
break |
||||
path = npath |
||||
|
||||
if len(run_args) == 0: |
||||
print >> sys.stderr, "Usage:\n", os.path.basename(sys.argv[0]), "<build_path>" |
||||
exit(1) |
||||
|
||||
tests = [s.strip() for s in options.tests.split(",") if s] |
||||
|
||||
if len(tests) != 1 or len(run_args) != 1: |
||||
#remove --gtest_output from params |
||||
test_args = [a for a in test_args if not a.startswith("--gtest_output=")] |
||||
|
||||
logs = [] |
||||
for path in run_args: |
||||
info = RunInfo(path) |
||||
#print vars(info),"\n" |
||||
if not info.isRunnable(): |
||||
print >> sys.stderr, "Error:", info.error |
||||
else: |
||||
info.test_data_path = options.test_data_path |
||||
logs.extend(info.runTests(tests, sys.stdout, sys.stderr, options.cwd, test_args)) |
||||
|
||||
if logs: |
||||
print >> sys.stderr, "Collected:", " ".join(logs) |
@ -0,0 +1,144 @@ |
||||
import testlog_parser, sys, os, xml, glob |
||||
from table_formatter import * |
||||
from optparse import OptionParser |
||||
|
||||
if __name__ == "__main__": |
||||
if len(sys.argv) < 2: |
||||
print >> sys.stderr, "Usage:\n", os.path.basename(sys.argv[0]), "<log_name1>.xml [<log_name2>.xml ...]" |
||||
exit(0) |
||||
|
||||
parser = OptionParser() |
||||
parser.add_option("-o", "--output", dest="format", help="output results in text format (can be 'txt', 'html' or 'auto' - default)", metavar="FMT", default="auto") |
||||
parser.add_option("-m", "--metric", dest="metric", help="output metric", metavar="NAME", default="gmean") |
||||
parser.add_option("-u", "--units", dest="units", help="units for output values (s, ms (default), mks, ns or ticks)", metavar="UNITS", default="ms") |
||||
parser.add_option("-f", "--filter", dest="filter", help="regex to filter tests", metavar="REGEX", default=None) |
||||
parser.add_option("", "--no-relatives", action="store_false", dest="calc_relatives", default=True, help="do not output relative values") |
||||
parser.add_option("", "--show-all", action="store_true", dest="showall", default=False, help="also include empty and \"notrun\" lines") |
||||
(options, args) = parser.parse_args() |
||||
|
||||
options.generateHtml = detectHtmlOutputType(options.format) |
||||
if options.metric not in metrix_table: |
||||
options.metric = "gmean" |
||||
if options.metric.endswith("%"): |
||||
options.calc_relatives = False |
||||
|
||||
# expand wildcards and filter duplicates |
||||
files = [] |
||||
files1 = [] |
||||
for arg in args: |
||||
if ("*" in arg) or ("?" in arg): |
||||
files1.extend([os.path.abspath(f) for f in glob.glob(arg)]) |
||||
else: |
||||
files.append(os.path.abspath(arg)) |
||||
seen = set() |
||||
files = [ x for x in files if x not in seen and not seen.add(x)] |
||||
files.extend((set(files1) - set(files))) |
||||
|
||||
# read all passed files |
||||
test_sets = [] |
||||
for arg in files: |
||||
try: |
||||
tests = testlog_parser.parseLogFile(arg) |
||||
if options.filter: |
||||
expr = re.compile(options.filter) |
||||
tests = [t for t in tests if expr.search(str(t))] |
||||
if tests: |
||||
test_sets.append((os.path.basename(arg), tests)) |
||||
except IOError as err: |
||||
sys.stderr.write("IOError reading \"" + arg + "\" - " + str(err) + os.linesep) |
||||
except xml.parsers.expat.ExpatError as err: |
||||
sys.stderr.write("ExpatError reading \"" + arg + "\" - " + str(err) + os.linesep) |
||||
|
||||
if not test_sets: |
||||
sys.stderr.write("Error: no test data found" + os.linesep) |
||||
quit() |
||||
|
||||
# find matches |
||||
setsCount = len(test_sets) |
||||
test_cases = {} |
||||
|
||||
for i in range(setsCount): |
||||
for case in test_sets[i][1]: |
||||
name = str(case) |
||||
if name not in test_cases: |
||||
test_cases[name] = [None] * setsCount |
||||
test_cases[name][i] = case |
||||
|
||||
# build table |
||||
getter = metrix_table[options.metric][1] |
||||
if options.calc_relatives: |
||||
getter_p = metrix_table[options.metric + "%"][1] |
||||
tbl = table(metrix_table[options.metric][0]) |
||||
|
||||
# header |
||||
tbl.newColumn("name", "Name of Test", align = "left") |
||||
i = 0 |
||||
for set in test_sets: |
||||
tbl.newColumn(str(i), set[0].replace(".xml","").replace("_", "\n"), align = "center") |
||||
i += 1 |
||||
if options.calc_relatives: |
||||
i = 1 |
||||
for set in test_sets[1:]: |
||||
tbl.newColumn(str(i) + "%", set[0].replace(".xml","").replace("_", "\n") + "\nvs\n" + test_sets[0][0].replace(".xml","").replace("_", "\n"), align = "center") |
||||
i += 1 |
||||
|
||||
# rows |
||||
needNewRow = True |
||||
for name in sorted(test_cases.iterkeys()): |
||||
cases = test_cases[name] |
||||
if needNewRow: |
||||
tbl.newRow() |
||||
if not options.showall: |
||||
needNewRow = False |
||||
tbl.newCell("name", name) |
||||
for i in range(setsCount): |
||||
case = cases[i] |
||||
if case is None: |
||||
tbl.newCell(str(i), "-") |
||||
if options.calc_relatives and i > 0: |
||||
tbl.newCell(str(i) + "%", "-") |
||||
else: |
||||
status = case.get("status") |
||||
if status != "run": |
||||
tbl.newCell(str(i), status, color = "red") |
||||
if status != "notrun": |
||||
needNewRow = True |
||||
if options.calc_relatives and i > 0: |
||||
tbl.newCell(str(i) + "%", "-", color = "red") |
||||
else: |
||||
val = getter(case, cases[0], options.units) |
||||
if options.calc_relatives and i > 0 and val: |
||||
valp = getter_p(case, cases[0], options.units) |
||||
else: |
||||
valp = None |
||||
if not valp or i == 0: |
||||
color = None |
||||
elif valp > 1.05: |
||||
color = "red" |
||||
elif valp < 0.95: |
||||
color = "green" |
||||
else: |
||||
color = None |
||||
if val: |
||||
needNewRow = True |
||||
if options.metric.endswith("%"): |
||||
tbl.newCell(str(i), "%.2f" % val, val, color = color) |
||||
else: |
||||
tbl.newCell(str(i), "%.3f %s" % (val, options.units), val, color = color) |
||||
else: |
||||
tbl.newCell(str(i), "-") |
||||
if options.calc_relatives and i > 0: |
||||
if valp: |
||||
tbl.newCell(str(i) + "%", "%.2f" % valp, valp, color = color, bold = color) |
||||
else: |
||||
tbl.newCell(str(i) + "%", "-") |
||||
if not needNewRow: |
||||
tbl.trimLastRow() |
||||
|
||||
# output table |
||||
if options.generateHtml: |
||||
htmlPrintHeader(sys.stdout, "Summary report for %s tests from %s test logs" % (len(test_cases), setsCount)) |
||||
tbl.htmlPrintTable(sys.stdout) |
||||
htmlPrintFooter(sys.stdout) |
||||
else: |
||||
tbl.consolePrintTable(sys.stdout) |
@ -0,0 +1,586 @@ |
||||
import sys, re, os.path, cgi, stat |
||||
from optparse import OptionParser |
||||
from color import getColorizer |
||||
|
||||
class tblCell(object): |
||||
def __init__(self, text, value = None, props = None): |
||||
self.text = text |
||||
self.value = value |
||||
self.props = props |
||||
|
||||
class tblColumn(object): |
||||
def __init__(self, caption, title = None, props = None): |
||||
self.text = caption |
||||
self.title = title |
||||
self.props = props |
||||
|
||||
class tblRow(object): |
||||
def __init__(self, colsNum, props = None): |
||||
self.cells = [None] * colsNum |
||||
self.props = props |
||||
|
||||
def htmlEncode(str): |
||||
return '<br/>'.join([cgi.escape(s) for s in str]) |
||||
|
||||
class table(object): |
||||
def_align = "left" |
||||
def_valign = "middle" |
||||
def_color = None |
||||
def_colspan = 1 |
||||
def_rowspan = 1 |
||||
def_bold = False |
||||
def_italic = False |
||||
def_text="-" |
||||
|
||||
def __init__(self, caption = None): |
||||
self.columns = {} |
||||
self.rows = [] |
||||
self.ridx = -1; |
||||
self.caption = caption |
||||
pass |
||||
|
||||
def newRow(self, **properties): |
||||
if len(self.rows) - 1 == self.ridx: |
||||
self.rows.append(tblRow(len(self.columns), properties)) |
||||
else: |
||||
self.rows[ridx + 1].props = properties |
||||
self.ridx += 1 |
||||
|
||||
def trimLastRow(self): |
||||
if self.rows: |
||||
self.rows.pop() |
||||
if self.ridx >= len(self.rows): |
||||
self.ridx = len(self.rows) - 1 |
||||
|
||||
def newColumn(self, name, caption, title = None, **properties): |
||||
if name in self.columns: |
||||
index = self.columns[name].index |
||||
else: |
||||
index = len(self.columns) |
||||
if isinstance(caption, tblColumn): |
||||
caption.index = index |
||||
self.columns[name] = caption |
||||
return caption |
||||
else: |
||||
col = tblColumn(caption, title, properties) |
||||
col.index = index |
||||
self.columns[name] = col |
||||
return col |
||||
|
||||
def getColumn(self, name): |
||||
if isinstance(name, str): |
||||
return self.columns.get(name, None) |
||||
else: |
||||
vals = [v for v in self.columns.values() if v.index == name] |
||||
if vals: |
||||
return vals[0] |
||||
return None |
||||
|
||||
def newCell(self, col_name, text, value = None, **properties): |
||||
if self.ridx < 0: |
||||
self.newRow() |
||||
col = self.getColumn(col_name) |
||||
row = self.rows[self.ridx] |
||||
if not col: |
||||
return None |
||||
if isinstance(text, tblCell): |
||||
cl = text |
||||
else: |
||||
cl = tblCell(text, value, properties) |
||||
row.cells[col.index] = cl |
||||
return cl |
||||
|
||||
def layoutTable(self): |
||||
columns = self.columns.values() |
||||
columns.sort(key=lambda c: c.index) |
||||
|
||||
colspanned = [] |
||||
rowspanned = [] |
||||
|
||||
self.headerHeight = 1 |
||||
rowsToAppend = 0 |
||||
|
||||
for col in columns: |
||||
self.measureCell(col) |
||||
if col.height > self.headerHeight: |
||||
self.headerHeight = col.height |
||||
col.minwidth = col.width |
||||
col.line = None |
||||
|
||||
for r in range(len(self.rows)): |
||||
row = self.rows[r] |
||||
row.minheight = 1 |
||||
for i in range(len(row.cells)): |
||||
cell = row.cells[i] |
||||
if row.cells[i] is None: |
||||
continue |
||||
cell.line = None |
||||
self.measureCell(cell) |
||||
colspan = int(self.getValue("colspan", cell)) |
||||
rowspan = int(self.getValue("rowspan", cell)) |
||||
if colspan > 1: |
||||
colspanned.append((r,i)) |
||||
if i + colspan > len(columns): |
||||
colspan = len(columns) - i |
||||
cell.colspan = colspan |
||||
#clear spanned cells |
||||
for j in range(i+1, min(len(row.cells), i + colspan)): |
||||
row.cells[j] = None |
||||
elif columns[i].minwidth < cell.width: |
||||
columns[i].minwidth = cell.width |
||||
if rowspan > 1: |
||||
rowspanned.append((r,i)) |
||||
rowsToAppend2 = r + colspan - len(self.rows) |
||||
if rowsToAppend2 > rowsToAppend: |
||||
rowsToAppend = rowsToAppend2 |
||||
cell.rowspan = rowspan |
||||
#clear spanned cells |
||||
for j in range(r+1, min(len(self.rows), r + rowspan)): |
||||
if len(self.rows[j].cells) > i: |
||||
self.rows[j].cells[i] = None |
||||
elif row.minheight < cell.height: |
||||
row.minheight = cell.height |
||||
|
||||
self.ridx = len(self.rows) - 1 |
||||
for r in range(rowsToAppend): |
||||
self.newRow() |
||||
self.rows[len(self.rows) - 1].minheight = 1 |
||||
|
||||
while colspanned: |
||||
colspanned_new = [] |
||||
for r, c in colspanned: |
||||
cell = self.rows[r].cells[c] |
||||
sum([col.minwidth for col in columns[c:c + cell.colspan]]) |
||||
cell.awailable = sum([col.minwidth for col in columns[c:c + cell.colspan]]) + cell.colspan - 1 |
||||
if cell.awailable < cell.width: |
||||
colspanned_new.append((r,c)) |
||||
colspanned = colspanned_new |
||||
if colspanned: |
||||
r,c = colspanned[0] |
||||
cell = self.rows[r].cells[c] |
||||
cols = columns[c:c + cell.colspan] |
||||
total = cell.awailable - cell.colspan + 1 |
||||
budget = cell.width - cell.awailable |
||||
spent = 0 |
||||
s = 0 |
||||
for col in cols: |
||||
s += col.minwidth |
||||
addition = s * budget / total - spent |
||||
spent += addition |
||||
col.minwidth += addition |
||||
|
||||
while rowspanned: |
||||
rowspanned_new = [] |
||||
for r, c in rowspanned: |
||||
cell = self.rows[r].cells[c] |
||||
cell.awailable = sum([row.minheight for row in self.rows[r:r + cell.rowspan]]) |
||||
if cell.awailable < cell.height: |
||||
rowspanned_new.append((r,c)) |
||||
rowspanned = rowspanned_new |
||||
if rowspanned: |
||||
r,c = rowspanned[0] |
||||
cell = self.rows[r].cells[c] |
||||
rows = self.rows[r:r + cell.rowspan] |
||||
total = cell.awailable |
||||
budget = cell.height - cell.awailable |
||||
spent = 0 |
||||
s = 0 |
||||
for row in rows: |
||||
s += row.minheight |
||||
addition = s * budget / total - spent |
||||
spent += addition |
||||
row.minheight += addition |
||||
|
||||
return columns |
||||
|
||||
def measureCell(self, cell): |
||||
text = self.getValue("text", cell) |
||||
cell.text = self.reformatTextValue(text) |
||||
cell.height = len(cell.text) |
||||
cell.width = len(max(cell.text, key = lambda line: len(line))) |
||||
|
||||
def reformatTextValue(self, value): |
||||
if isinstance(value, str): |
||||
vstr = value |
||||
elif isinstance(value, unicode): |
||||
vstr = str(value) |
||||
else: |
||||
try: |
||||
vstr = '\n'.join([str(v) for v in value]) |
||||
except TypeError: |
||||
vstr = str(value) |
||||
return vstr.splitlines() |
||||
|
||||
def adjustColWidth(self, cols, width): |
||||
total = sum([c.minWidth for c in cols]) |
||||
if total + len(cols) - 1 >= width: |
||||
return |
||||
budget = width - len(cols) + 1 - total |
||||
spent = 0 |
||||
s = 0 |
||||
for col in cols: |
||||
s += col.minWidth |
||||
addition = s * budget / total - spent |
||||
spent += addition |
||||
col.minWidth += addition |
||||
|
||||
def getValue(self, name, *elements): |
||||
for el in elements: |
||||
try: |
||||
return getattr(el, name) |
||||
except AttributeError: |
||||
pass |
||||
try: |
||||
val = el.props[name] |
||||
if val: |
||||
return val |
||||
except AttributeError: |
||||
pass |
||||
except KeyError: |
||||
pass |
||||
try: |
||||
return getattr(self.__class__, "def_" + name) |
||||
except AttributeError: |
||||
return None |
||||
|
||||
def consolePrintTable(self, out): |
||||
columns = self.layoutTable() |
||||
colrizer = getColorizer(out) |
||||
|
||||
if self.caption: |
||||
out.write("%s%s%s" % ( os.linesep, os.linesep.join(self.reformatTextValue(self.caption)), os.linesep * 2)) |
||||
|
||||
headerRow = tblRow(len(columns), {"align": "center", "valign": "top", "bold": True, "header": True}) |
||||
headerRow.cells = columns |
||||
headerRow.minheight = self.headerHeight |
||||
|
||||
self.consolePrintRow2(colrizer, headerRow, columns) |
||||
|
||||
for i in range(0, len(self.rows)): |
||||
self.consolePrintRow2(colrizer, i, columns) |
||||
|
||||
def consolePrintRow2(self, out, r, columns): |
||||
if isinstance(r, tblRow): |
||||
row = r |
||||
r = -1 |
||||
else: |
||||
row = self.rows[r] |
||||
|
||||
#evaluate initial values for line numbers |
||||
i = 0 |
||||
while i < len(row.cells): |
||||
cell = row.cells[i] |
||||
colspan = self.getValue("colspan", cell) |
||||
if cell is not None: |
||||
cell.wspace = sum([col.minwidth for col in columns[i:i + colspan]]) + colspan - 1 |
||||
if cell.line is None: |
||||
if r < 0: |
||||
rows = [row] |
||||
else: |
||||
rows = self.rows[r:r + self.getValue("rowspan", cell)] |
||||
cell.line = self.evalLine(cell, rows, columns[i]) |
||||
if len(rows) > 1: |
||||
for rw in rows: |
||||
rw.cells[i] = cell |
||||
i += colspan |
||||
|
||||
#print content |
||||
for ln in range(row.minheight): |
||||
i = 0 |
||||
while i < len(row.cells): |
||||
if i > 0: |
||||
out.write(" ") |
||||
cell = row.cells[i] |
||||
column = columns[i] |
||||
if cell is None: |
||||
out.write(" " * column.minwidth) |
||||
i += 1 |
||||
else: |
||||
self.consolePrintLine(cell, row, column, out) |
||||
i += self.getValue("colspan", cell) |
||||
out.write(os.linesep) |
||||
|
||||
def consolePrintLine(self, cell, row, column, out): |
||||
if cell.line < 0 or cell.line >= cell.height: |
||||
line = "" |
||||
else: |
||||
line = cell.text[cell.line] |
||||
width = cell.wspace |
||||
align = self.getValue("align", ((None, cell)[isinstance(cell, tblCell)]), row, column) |
||||
|
||||
if align == "right": |
||||
pattern = "%" + str(width) + "s" |
||||
elif align == "center": |
||||
pattern = "%" + str((width - len(line)) / 2 + len(line)) + "s" + " " * (width - len(line) - (width - len(line)) / 2) |
||||
else: |
||||
pattern = "%-" + str(width) + "s" |
||||
|
||||
out.write(pattern % line, color = self.getValue("color", cell, row, column)) |
||||
cell.line += 1 |
||||
|
||||
def evalLine(self, cell, rows, column): |
||||
height = cell.height |
||||
valign = self.getValue("valign", cell, rows[0], column) |
||||
space = sum([row.minheight for row in rows]) |
||||
if valign == "bottom": |
||||
return height - space |
||||
if valign == "middle": |
||||
return (height - space + 1) / 2 |
||||
return 0 |
||||
|
||||
def htmlPrintTable(self, out): |
||||
columns = self.layoutTable() |
||||
|
||||
out.write("<div class=\"tableFormatter\">\n<table class=\"tbl\">\n") |
||||
if self.caption: |
||||
out.write(" <caption>%s</caption>\n" % htmlEncode(self.reformatTextValue(self.caption))) |
||||
out.write(" <thead>\n") |
||||
|
||||
headerRow = tblRow(len(columns), {"align": "center", "valign": "top", "bold": True, "header": True}) |
||||
headerRow.cells = columns |
||||
|
||||
header_rows = [headerRow] |
||||
|
||||
header_rows.extend([row for row in self.rows if self.getValue("header")]) |
||||
|
||||
for row in header_rows: |
||||
out.write(" <tr>\n") |
||||
for th in row.cells: |
||||
align = self.getValue("align", ((None, th)[isinstance(th, tblCell)]), row, row) |
||||
valign = self.getValue("valign", th, row) |
||||
attr = "" |
||||
if align: |
||||
attr += " align=\"%s\"" % align |
||||
if valign: |
||||
attr += " valign=\"%s\"" % valign |
||||
out.write(" <th%s>\n" % attr) |
||||
if th is not None: |
||||
out.write(" %s\n" % htmlEncode(th.text)) |
||||
out.write(" </th>\n") |
||||
out.write(" </tr>\n") |
||||
|
||||
out.write(" </thead>\n <tbody>\n") |
||||
|
||||
rows = [row for row in self.rows if not self.getValue("header")] |
||||
for r in range(len(rows)): |
||||
row = rows[r] |
||||
out.write(" <tr>\n") |
||||
i = 0 |
||||
while i < len(row.cells): |
||||
column = columns[i] |
||||
td = row.cells[i] |
||||
if isinstance(td, int): |
||||
i += td |
||||
continue |
||||
colspan = self.getValue("colspan", td) |
||||
rowspan = self.getValue("rowspan", td) |
||||
align = self.getValue("align", td, row, column) |
||||
valign = self.getValue("valign", td, row, column) |
||||
color = self.getValue("color", td, row, column) |
||||
bold = self.getValue("bold", td, row, column) |
||||
italic = self.getValue("italic", td, row, column) |
||||
style = "" |
||||
attr = "" |
||||
if color: |
||||
style += "color:%s;" % color |
||||
if bold: |
||||
style += "font-weight: bold;" |
||||
if italic: |
||||
style += "font-style: italic;" |
||||
if align and align != "left": |
||||
attr += " align=\"%s\"" % align |
||||
if valign and valign != "middle": |
||||
attr += " valign=\"%s\"" % valign |
||||
if colspan > 1: |
||||
attr += " colspan=\"%s\"" % colspan |
||||
if rowspan > 1: |
||||
attr += " rowspan=\"%s\"" % rowspan |
||||
for q in range(r+1, min(r+rowspan, len(rows))): |
||||
rows[q].cells[i] = colspan |
||||
if style: |
||||
attr += " style=\"%s\"" % style |
||||
out.write(" <td%s>\n" % attr) |
||||
if th is not None: |
||||
out.write(" %s\n" % htmlEncode(td.text)) |
||||
out.write(" </td>\n") |
||||
i += colspan |
||||
out.write(" </tr>\n") |
||||
|
||||
out.write(" </tbody>\n</table>\n</div>\n") |
||||
|
||||
def htmlPrintHeader(out, title = None): |
||||
if title: |
||||
titletag = "<title>%s</title>\n" % htmlEncode([str(title)]) |
||||
else: |
||||
titletag = "" |
||||
out.write("""<!DOCTYPE HTML> |
||||
<html> |
||||
<head> |
||||
<meta http-equiv="Content-Type" content="text/html; charset=us-ascii"> |
||||
%s<style type="text/css"> |
||||
html, body {font-family: Lucida Console, Courier New, Courier;font-size: 16px;color:#3e4758;} |
||||
.tbl{background:none repeat scroll 0 0 #FFFFFF;border-collapse:collapse;font-family:"Lucida Sans Unicode","Lucida Grande",Sans-Serif;font-size:14px;margin:20px;text-align:left;width:480px;margin-left: auto;margin-right: auto;white-space:nowrap;} |
||||
.tbl span{display:block;white-space:nowrap;} |
||||
.tbl thead tr:last-child th {padding-bottom:5px;} |
||||
.tbl tbody tr:first-child td {border-top:2px solid #6678B1;} |
||||
.tbl th{color:#003399;font-size:16px;font-weight:normal;white-space:nowrap;padding:3px 10px;} |
||||
.tbl td{border-bottom:1px solid #CCCCCC;color:#666699;padding:6px 8px;white-space:nowrap;} |
||||
.tbl tbody tr:hover td{color:#000099;} |
||||
.tbl caption{font:italic 16px "Trebuchet MS",Verdana,Arial,Helvetica,sans-serif;padding:0 0 5px;text-align:right;} |
||||
</style> |
||||
</head> |
||||
<body> |
||||
""" % titletag) |
||||
|
||||
def htmlPrintFooter(out): |
||||
out.write("</body>\n</html>") |
||||
|
||||
def getStdoutFilename(): |
||||
try: |
||||
if os.name == "nt": |
||||
import msvcrt, ctypes |
||||
handle = msvcrt.get_osfhandle(sys.stdout.fileno()) |
||||
size = ctypes.c_ulong(1024) |
||||
nameBuffer = ctypes.create_string_buffer(size.value) |
||||
ctypes.windll.kernel32.GetFinalPathNameByHandleA(handle, nameBuffer, size, 4) |
||||
return nameBuffer.value |
||||
else: |
||||
return os.readlink('/proc/self/fd/1') |
||||
except: |
||||
return "" |
||||
|
||||
def detectHtmlOutputType(requestedType): |
||||
if requestedType == "txt": |
||||
return False |
||||
elif requestedType == "html": |
||||
return True |
||||
else: |
||||
if sys.stdout.isatty(): |
||||
return False |
||||
else: |
||||
outname = getStdoutFilename() |
||||
if outname: |
||||
if outname.endswith(".htm") or outname.endswith(".html"): |
||||
return True |
||||
else: |
||||
return False |
||||
else: |
||||
return False |
||||
|
||||
def getRelativeVal(test, test0, metric): |
||||
if not test or not test0: |
||||
return None |
||||
val0 = test0.get(metric, "s") |
||||
if not val0 or val0 == 0: |
||||
return None |
||||
val = test.get(metric, "s") |
||||
if not val: |
||||
return None |
||||
return float(val)/val0 |
||||
|
||||
|
||||
metrix_table = \ |
||||
{ |
||||
"name": ("Name of Test", lambda test,test0,units: str(test)), |
||||
|
||||
"samples": ("Number of\ncollected samples", lambda test,test0,units: test.get("samples", units)), |
||||
"outliers": ("Number of\noutliers", lambda test,test0,units: test.get("outliers", units)), |
||||
|
||||
"gmean": ("Geometric mean", lambda test,test0,units: test.get("gmean", units)), |
||||
"mean": ("Mean", lambda test,test0,units: test.get("mean", units)), |
||||
"min": ("Min", lambda test,test0,units: test.get("min", units)), |
||||
"median": ("Median", lambda test,test0,units: test.get("median", units)), |
||||
"stddev": ("Standard deviation", lambda test,test0,units: test.get("stddev", units)), |
||||
"gstddev": ("Standard deviation of Ln(time)", lambda test,test0,units: test.get("gstddev")), |
||||
|
||||
"gmean%": ("Geometric mean (relative)", lambda test,test0,units: getRelativeVal(test, test0, "gmean")), |
||||
"mean%": ("Mean (relative)", lambda test,test0,units: getRelativeVal(test, test0, "mean")), |
||||
"min%": ("Min (relative)", lambda test,test0,units: getRelativeVal(test, test0, "min")), |
||||
"median%": ("Median (relative)", lambda test,test0,units: getRelativeVal(test, test0, "median")), |
||||
"stddev%": ("Standard deviation (relative)", lambda test,test0,units: getRelativeVal(test, test0, "stddev")), |
||||
"gstddev%": ("Standard deviation of Ln(time) (relative)", lambda test,test0,units: getRelativeVal(test, test0, "gstddev")), |
||||
} |
||||
|
||||
if __name__ == "__main__": |
||||
if len(sys.argv) < 2: |
||||
print "Usage:\n", os.path.basename(sys.argv[0]), "<log_name>.xml" |
||||
exit(0) |
||||
|
||||
parser = OptionParser() |
||||
parser.add_option("-o", "--output", dest="format", help="output results in text format (can be 'txt', 'html' or 'auto' - default)", metavar="FMT", default="auto") |
||||
parser.add_option("-m", "--metric", dest="metric", help="output metric", metavar="NAME", default="gmean") |
||||
parser.add_option("-u", "--units", dest="units", help="units for output values (s, ms (default), mks, ns or ticks)", metavar="UNITS", default="ms") |
||||
(options, args) = parser.parse_args() |
||||
|
||||
options.generateHtml = detectHtmlOutputType(options.format) |
||||
if options.metric not in metrix_table: |
||||
options.metric = "gmean" |
||||
|
||||
#print options |
||||
#print args |
||||
|
||||
# tbl = table() |
||||
# tbl.newColumn("first", "qqqq", align = "left") |
||||
# tbl.newColumn("second", "wwww\nz\nx\n") |
||||
# tbl.newColumn("third", "wwasdas") |
||||
# |
||||
# tbl.newCell(0, "ccc111", align = "right") |
||||
# tbl.newCell(1, "dddd1") |
||||
# tbl.newCell(2, "8768756754") |
||||
# tbl.newRow() |
||||
# tbl.newCell(0, "1\n2\n3\n4\n5\n6\n7", align = "center", colspan = 2, rowspan = 2) |
||||
# tbl.newCell(2, "xxx\nqqq", align = "center", colspan = 1, valign = "middle") |
||||
# tbl.newRow() |
||||
# tbl.newCell(2, "+", align = "center", colspan = 1, valign = "middle") |
||||
# tbl.newRow() |
||||
# tbl.newCell(0, "vcvvbasdsadassdasdasv", align = "right", colspan = 2) |
||||
# tbl.newCell(2, "dddd1") |
||||
# tbl.newRow() |
||||
# tbl.newCell(0, "vcvvbv") |
||||
# tbl.newCell(1, "3445324", align = "right") |
||||
# tbl.newCell(2, None) |
||||
# tbl.newCell(1, "0000") |
||||
# if sys.stdout.isatty(): |
||||
# tbl.consolePrintTable(sys.stdout) |
||||
# else: |
||||
# htmlPrintHeader(sys.stdout) |
||||
# tbl.htmlPrintTable(sys.stdout) |
||||
# htmlPrintFooter(sys.stdout) |
||||
|
||||
import testlog_parser |
||||
|
||||
if options.generateHtml: |
||||
htmlPrintHeader(sys.stdout, "Tables demo") |
||||
|
||||
getter = metrix_table[options.metric][1] |
||||
|
||||
for arg in args: |
||||
tests = testlog_parser.parseLogFile(arg) |
||||
tbl = table(arg) |
||||
tbl.newColumn("name", "Name of Test", align = "left") |
||||
tbl.newColumn("value", metrix_table[options.metric][0], align = "center", bold = "true") |
||||
|
||||
for t in sorted(tests): |
||||
tbl.newRow() |
||||
tbl.newCell("name", str(t)) |
||||
|
||||
status = t.get("status") |
||||
if status != "run": |
||||
tbl.newCell("value", status) |
||||
else: |
||||
val = getter(t, None, options.units) |
||||
if val: |
||||
if options.metric.endswith("%"): |
||||
tbl.newCell("value", "%.2f" % val, val) |
||||
else: |
||||
tbl.newCell("value", "%.3f %s" % (val, options.units), val) |
||||
else: |
||||
tbl.newCell("value", "-") |
||||
|
||||
if options.generateHtml: |
||||
tbl.htmlPrintTable(sys.stdout) |
||||
else: |
||||
tbl.consolePrintTable(sys.stdout) |
||||
|
||||
if options.generateHtml: |
||||
htmlPrintFooter(sys.stdout) |
@ -0,0 +1,171 @@ |
||||
import sys, re, os.path |
||||
from xml.dom.minidom import parse |
||||
|
||||
class TestInfo(object): |
||||
|
||||
def __init__(self, xmlnode): |
||||
self.fixture = xmlnode.getAttribute("classname") |
||||
self.name = xmlnode.getAttribute("name") |
||||
self.value_param = xmlnode.getAttribute("value_param") |
||||
self.type_param = xmlnode.getAttribute("type_param") |
||||
self.name = xmlnode.getAttribute("name") |
||||
if xmlnode.getElementsByTagName("failure"): |
||||
self.status = "failed" |
||||
else: |
||||
self.status = xmlnode.getAttribute("status") |
||||
if self.name.startswith("DISABLED_"): |
||||
self.status = "disabled" |
||||
self.fixture = self.fixture.replace("DISABLED_", "") |
||||
self.name = self.name.replace("DISABLED_", "") |
||||
self.metrix = {} |
||||
self.parseLongMetric(xmlnode, "bytesIn"); |
||||
self.parseLongMetric(xmlnode, "bytesOut"); |
||||
self.parseIntMetric(xmlnode, "samples"); |
||||
self.parseIntMetric(xmlnode, "outliers"); |
||||
self.parseFloatMetric(xmlnode, "frequency", 1); |
||||
self.parseLongMetric(xmlnode, "min"); |
||||
self.parseLongMetric(xmlnode, "median"); |
||||
self.parseLongMetric(xmlnode, "gmean"); |
||||
self.parseLongMetric(xmlnode, "mean"); |
||||
self.parseLongMetric(xmlnode, "stddev"); |
||||
self.parseFloatMetric(xmlnode, "gstddev"); |
||||
|
||||
def parseLongMetric(self, xmlnode, name, default = 0): |
||||
if xmlnode.hasAttribute(name): |
||||
tmp = xmlnode.getAttribute(name) |
||||
val = long(tmp) |
||||
self.metrix[name] = val |
||||
else: |
||||
self.metrix[name] = default |
||||
|
||||
def parseIntMetric(self, xmlnode, name, default = 0): |
||||
if xmlnode.hasAttribute(name): |
||||
tmp = xmlnode.getAttribute(name) |
||||
val = int(tmp) |
||||
self.metrix[name] = val |
||||
else: |
||||
self.metrix[name] = default |
||||
|
||||
def parseFloatMetric(self, xmlnode, name, default = 0): |
||||
if xmlnode.hasAttribute(name): |
||||
tmp = xmlnode.getAttribute(name) |
||||
val = float(tmp) |
||||
self.metrix[name] = val |
||||
else: |
||||
self.metrix[name] = default |
||||
|
||||
def parseStringMetric(self, xmlnode, name, default = None): |
||||
if xmlnode.hasAttribute(name): |
||||
tmp = xmlnode.getAttribute(name) |
||||
self.metrix[name] = tmp.strip() |
||||
else: |
||||
self.metrix[name] = default |
||||
|
||||
def get(self, name, units="ms"): |
||||
if name == "classname": |
||||
return self.fixture |
||||
if name == "name": |
||||
return self.name |
||||
if name == "fullname": |
||||
return self.__str__() |
||||
if name == "value_param": |
||||
return self.value_param |
||||
if name == "type_param": |
||||
return self.type_param |
||||
if name == "status": |
||||
return self.status |
||||
val = self.metrix.get(name, None) |
||||
if not val: |
||||
return val |
||||
if name in ["gmean", "min", "mean", "median", "stddev"]: |
||||
scale = 1.0 |
||||
frequency = self.metrix.get("frequency", 1.0) or 1.0 |
||||
if units == "ms": |
||||
scale = 1000.0 |
||||
if units == "mks": |
||||
scale = 1000000.0 |
||||
if units == "ns": |
||||
scale = 1000000000.0 |
||||
if units == "ticks": |
||||
frequency = long(1) |
||||
scale = long(1) |
||||
return val * scale / frequency |
||||
return val |
||||
|
||||
|
||||
def dump(self, units="ms"): |
||||
print "%s ->\t\033[1;31m%s\033[0m = \t%.2f%s" % (str(self), self.status, self.get("gmean", units), units) |
||||
|
||||
def shortName(self): |
||||
pos = self.name.find("/") |
||||
if pos > 0: |
||||
name = self.name[:pos] |
||||
else: |
||||
name = self.name |
||||
if self.fixture.endswith(name): |
||||
fixture = self.fixture[:-len(name)] |
||||
else: |
||||
fixture = self.fixture |
||||
if fixture.endswith("_"): |
||||
fixture = fixture[:-1] |
||||
return '::'.join(filter(None, [name, fixture])) |
||||
|
||||
def __str__(self): |
||||
pos = self.name.find("/") |
||||
if pos > 0: |
||||
name = self.name[:pos] |
||||
else: |
||||
name = self.name |
||||
if self.fixture.endswith(name): |
||||
fixture = self.fixture[:-len(name)] |
||||
else: |
||||
fixture = self.fixture |
||||
if fixture.endswith("_"): |
||||
fixture = fixture[:-1] |
||||
return '::'.join(filter(None, [name, fixture, self.type_param, self.value_param])) |
||||
|
||||
def __cmp__(self, other): |
||||
r = cmp(self.fixture, other.fixture); |
||||
if r != 0: |
||||
return r |
||||
if self.type_param: |
||||
if other.type_param: |
||||
r = cmp(self.type_param, other.type_param); |
||||
if r != 0: |
||||
return r |
||||
else: |
||||
return -1 |
||||
else: |
||||
if other.type_param: |
||||
return 1 |
||||
if self.value_param: |
||||
if other.value_param: |
||||
r = cmp(self.value_param, other.value_param); |
||||
if r != 0: |
||||
return r |
||||
else: |
||||
return -1 |
||||
else: |
||||
if other.value_param: |
||||
return 1 |
||||
return 0 |
||||
|
||||
def parseLogFile(filename): |
||||
tests = [] |
||||
log = parse(filename) |
||||
for case in log.getElementsByTagName("testcase"): |
||||
tests.append(TestInfo(case)) |
||||
return tests |
||||
|
||||
|
||||
if __name__ == "__main__": |
||||
if len(sys.argv) < 2: |
||||
print "Usage:\n", os.path.basename(sys.argv[0]), "<log_name>.xml" |
||||
exit(0) |
||||
|
||||
for arg in sys.argv[1:]: |
||||
print "Tests found in", arg |
||||
tests = parseLogFile(arg) |
||||
for t in sorted(tests): |
||||
t.dump() |
||||
print |
@ -1,12 +1,10 @@ |
||||
#define GTEST_CREATE_AS_SHARED_LIBRARY 1 |
||||
|
||||
#if _MSC_VER >= 1200 |
||||
#pragma warning( disable: 4127 4251) |
||||
#endif |
||||
|
||||
#include "opencv2/ts/ts.hpp" |
||||
#include "opencv2/core/core_c.h" |
||||
#include "opencv2/ts/ts.hpp" |
||||
|
||||
#if ANDROID |
||||
int wcscasecmp(const wchar_t* lhs, const wchar_t* rhs); |
||||
#endif |
||||
#if GTEST_LINKED_AS_SHARED_LIBRARY |
||||
#error ts module should not have GTEST_LINKED_AS_SHARED_LIBRARY defined |
||||
#endif |
||||
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,930 @@ |
||||
#include "precomp.hpp" |
||||
|
||||
using namespace perf; |
||||
|
||||
void randu(cv::Mat& m) |
||||
{ |
||||
const int bigValue = 0x00000FFF; |
||||
if (m.depth() < CV_32F) |
||||
{ |
||||
int minmax[] = {0, 256}; |
||||
cv::Mat mr = cv::Mat(m.rows, m.cols * m.elemSize(), CV_8U, m.ptr(), m.step[0]); |
||||
cv::randu(mr, cv::Mat(1, 1, CV_32S, minmax), cv::Mat(1, 1, CV_32S, minmax + 1)); |
||||
} |
||||
else if (m.depth() == CV_32F) |
||||
{ |
||||
//float minmax[] = {-FLT_MAX, FLT_MAX};
|
||||
float minmax[] = {-bigValue, bigValue}; |
||||
cv::Mat mr = m.reshape(1); |
||||
cv::randu(mr, cv::Mat(1, 1, CV_32F, minmax), cv::Mat(1, 1, CV_32F, minmax + 1)); |
||||
} |
||||
else |
||||
{ |
||||
//double minmax[] = {-DBL_MAX, DBL_MAX};
|
||||
double minmax[] = {-bigValue, bigValue}; |
||||
cv::Mat mr = m.reshape(1); |
||||
cv::randu(mr, cv::Mat(1, 1, CV_64F, minmax), cv::Mat(1, 1, CV_64F, minmax + 1)); |
||||
} |
||||
} |
||||
|
||||
|
||||
/*****************************************************************************************\
|
||||
* ::perf::Regression |
||||
\*****************************************************************************************/ |
||||
|
||||
Regression& Regression::instance() |
||||
{ |
||||
static Regression single; |
||||
return single; |
||||
} |
||||
|
||||
Regression& Regression::add(const std::string& name, cv::InputArray array, double eps) |
||||
{ |
||||
return instance()(name, array, eps); |
||||
} |
||||
|
||||
void Regression::Init(const std::string& testSuitName, const std::string& ext) |
||||
{ |
||||
instance().init(testSuitName, ext); |
||||
} |
||||
|
||||
void Regression::init(const std::string& testSuitName, const std::string& ext) |
||||
{ |
||||
if (!storageInPath.empty()) |
||||
{ |
||||
LOGE("Subsequent initialisation of Regression utility is not allowed."); |
||||
return; |
||||
} |
||||
|
||||
const char *data_path_dir = getenv("OPENCV_TEST_DATA_PATH"); |
||||
const char *path_separator = "/"; |
||||
|
||||
if (data_path_dir) |
||||
{ |
||||
int len = strlen(data_path_dir)-1; |
||||
if (len < 0) len = 0; |
||||
std::string path_base = (data_path_dir[0] == 0 ? std::string(".") : std::string(data_path_dir)) |
||||
+ (data_path_dir[len] == '/' || data_path_dir[len] == '\\' ? "" : path_separator) |
||||
+ "perf" |
||||
+ path_separator; |
||||
|
||||
storageInPath = path_base + testSuitName + ext; |
||||
storageOutPath = path_base + testSuitName; |
||||
} |
||||
else |
||||
{ |
||||
storageInPath = testSuitName + ext; |
||||
storageOutPath = testSuitName; |
||||
} |
||||
|
||||
if (storageIn.open(storageInPath, cv::FileStorage::READ)) |
||||
{ |
||||
rootIn = storageIn.root(); |
||||
if (storageInPath.length() > 3 && storageInPath.substr(storageInPath.length()-3) == ".gz") |
||||
storageOutPath += "_new"; |
||||
storageOutPath += ext; |
||||
} |
||||
else |
||||
storageOutPath = storageInPath; |
||||
} |
||||
|
||||
Regression::Regression() : regRNG(cv::getTickCount())//this rng should be really random
|
||||
{ |
||||
} |
||||
|
||||
Regression::~Regression() |
||||
{ |
||||
if (storageIn.isOpened()) |
||||
storageIn.release(); |
||||
if (storageOut.isOpened()) |
||||
{ |
||||
if (!currentTestNodeName.empty()) |
||||
storageOut << "}"; |
||||
storageOut.release(); |
||||
} |
||||
} |
||||
|
||||
cv::FileStorage& Regression::write() |
||||
{ |
||||
if (!storageOut.isOpened() && !storageOutPath.empty()) |
||||
{ |
||||
int mode = (storageIn.isOpened() && storageInPath == storageOutPath) |
||||
? cv::FileStorage::APPEND : cv::FileStorage::WRITE; |
||||
storageOut.open(storageOutPath, mode); |
||||
if (!storageOut.isOpened()) |
||||
{ |
||||
LOGE("Could not open \"%s\" file for writing", storageOutPath.c_str()); |
||||
storageOutPath.clear(); |
||||
} |
||||
else if (mode == cv::FileStorage::WRITE && !rootIn.empty()) |
||||
{ |
||||
//TODO: write content of rootIn node into the storageOut
|
||||
} |
||||
} |
||||
return storageOut; |
||||
} |
||||
|
||||
std::string Regression::getCurrentTestNodeName() |
||||
{ |
||||
const ::testing::TestInfo* const test_info = |
||||
::testing::UnitTest::GetInstance()->current_test_info(); |
||||
|
||||
if (test_info == 0) |
||||
return "undefined"; |
||||
|
||||
std::string nodename = std::string(test_info->test_case_name()) + "--" + test_info->name(); |
||||
size_t idx = nodename.find_first_of('/'); |
||||
if (idx != std::string::npos) |
||||
nodename.erase(idx); |
||||
|
||||
const char* type_param = test_info->type_param(); |
||||
if (type_param != 0) |
||||
(nodename += "--") += type_param; |
||||
|
||||
const char* value_param = test_info->value_param(); |
||||
if (value_param != 0) |
||||
(nodename += "--") += value_param; |
||||
|
||||
for(size_t i = 0; i < nodename.length(); ++i) |
||||
if (!isalnum(nodename[i]) && '_' != nodename[i]) |
||||
nodename[i] = '-'; |
||||
|
||||
return nodename; |
||||
} |
||||
|
||||
bool Regression::isVector(cv::InputArray a) |
||||
{ |
||||
return a.kind() == cv::_InputArray::STD_VECTOR_MAT || a.kind() == cv::_InputArray::STD_VECTOR_VECTOR; |
||||
} |
||||
|
||||
double Regression::getElem(cv::Mat& m, int y, int x, int cn) |
||||
{ |
||||
switch (m.depth()) |
||||
{ |
||||
case CV_8U: return *(m.ptr<unsigned char>(y, x) + cn); |
||||
case CV_8S: return *(m.ptr<signed char>(y, x) + cn); |
||||
case CV_16U: return *(m.ptr<unsigned short>(y, x) + cn); |
||||
case CV_16S: return *(m.ptr<signed short>(y, x) + cn); |
||||
case CV_32S: return *(m.ptr<signed int>(y, x) + cn); |
||||
case CV_32F: return *(m.ptr<float>(y, x) + cn); |
||||
case CV_64F: return *(m.ptr<double>(y, x) + cn); |
||||
default: return 0; |
||||
} |
||||
} |
||||
|
||||
void Regression::write(cv::Mat m) |
||||
{ |
||||
double min, max; |
||||
cv::minMaxLoc(m, &min, &max); |
||||
write() << "min" << min << "max" << max; |
||||
|
||||
write() << "last" << "{" << "x" << m.cols-1 << "y" << m.rows-1 |
||||
<< "val" << getElem(m, m.rows-1, m.cols-1, m.channels()-1) << "}"; |
||||
|
||||
int x, y, cn; |
||||
x = regRNG.uniform(0, m.cols); |
||||
y = regRNG.uniform(0, m.rows); |
||||
cn = regRNG.uniform(0, m.channels()); |
||||
write() << "rng1" << "{" << "x" << x << "y" << y; |
||||
if(cn > 0) write() << "cn" << cn; |
||||
write() << "val" << getElem(m, y, x, cn) << "}"; |
||||
|
||||
x = regRNG.uniform(0, m.cols); |
||||
y = regRNG.uniform(0, m.rows); |
||||
cn = regRNG.uniform(0, m.channels()); |
||||
write() << "rng2" << "{" << "x" << x << "y" << y; |
||||
if (cn > 0) write() << "cn" << cn; |
||||
write() << "val" << getElem(m, y, x, cn) << "}"; |
||||
} |
||||
|
||||
void Regression::verify(cv::FileNode node, cv::Mat actual, double eps, std::string argname) |
||||
{ |
||||
double actualmin, actualmax; |
||||
cv::minMaxLoc(actual, &actualmin, &actualmax); |
||||
|
||||
ASSERT_NEAR((double)node["min"], actualmin, eps) |
||||
<< " " << argname << " has unexpected minimal value"; |
||||
ASSERT_NEAR((double)node["max"], actualmax, eps) |
||||
<< " " << argname << " has unexpected maximal value"; |
||||
|
||||
cv::FileNode last = node["last"]; |
||||
double actualLast = getElem(actual, actual.rows - 1, actual.cols - 1, actual.channels() - 1); |
||||
ASSERT_EQ((int)last["x"], actual.cols - 1) |
||||
<< " " << argname << " has unexpected number of columns"; |
||||
ASSERT_EQ((int)last["y"], actual.rows - 1) |
||||
<< " " << argname << " has unexpected number of rows"; |
||||
ASSERT_NEAR((double)last["val"], actualLast, eps) |
||||
<< " " << argname << " has unexpected value of last element"; |
||||
|
||||
cv::FileNode rng1 = node["rng1"]; |
||||
int x1 = rng1["x"]; |
||||
int y1 = rng1["y"]; |
||||
int cn1 = rng1["cn"]; |
||||
ASSERT_NEAR((double)rng1["val"], getElem(actual, y1, x1, cn1), eps) |
||||
<< " " << argname << " has unexpected value of ["<< x1 << ":" << y1 << ":" << cn1 <<"] element"; |
||||
|
||||
cv::FileNode rng2 = node["rng2"]; |
||||
int x2 = rng2["x"]; |
||||
int y2 = rng2["y"]; |
||||
int cn2 = rng2["cn"]; |
||||
ASSERT_NEAR((double)rng2["val"], getElem(actual, y2, x2, cn2), eps) |
||||
<< " " << argname << " has unexpected value of ["<< x2 << ":" << y2 << ":" << cn2 <<"] element"; |
||||
} |
||||
|
||||
void Regression::write(cv::InputArray array) |
||||
{ |
||||
write() << "kind" << array.kind(); |
||||
write() << "type" << array.type(); |
||||
if (isVector(array)) |
||||
{ |
||||
int total = array.total(); |
||||
int idx = regRNG.uniform(0, total); |
||||
write() << "len" << total; |
||||
write() << "idx" << idx; |
||||
|
||||
cv::Mat m = array.getMat(idx); |
||||
|
||||
if (m.total() * m.channels() < 26) //5x5 or smaller
|
||||
write() << "val" << m; |
||||
else |
||||
write(m); |
||||
} |
||||
else |
||||
{ |
||||
if (array.total() * array.channels() < 26) //5x5 or smaller
|
||||
write() << "val" << array.getMat(); |
||||
else |
||||
write(array.getMat()); |
||||
} |
||||
} |
||||
|
||||
void Regression::verify(cv::FileNode node, cv::InputArray array, double eps) |
||||
{ |
||||
ASSERT_EQ((int)node["kind"], array.kind()) << " Argument " << node.name() << " has unexpected kind"; |
||||
ASSERT_EQ((int)node["type"], array.type()) << " Argument " << node.name() << " has unexpected type"; |
||||
|
||||
cv::FileNode valnode = node["val"]; |
||||
if (isVector(array)) |
||||
{ |
||||
ASSERT_EQ((int)node["len"], (int)array.total()) << " Vector " << node.name() << " has unexpected length"; |
||||
int idx = node["idx"]; |
||||
|
||||
cv::Mat actual = array.getMat(idx); |
||||
|
||||
if (valnode.isNone()) |
||||
{ |
||||
ASSERT_LE((size_t)26, actual.total() * (size_t)actual.channels()) |
||||
<< " " << node.name() << "[" << idx << "] has unexpected number of elements"; |
||||
verify(node, actual, eps, cv::format("%s[%d]", node.name().c_str(), idx)); |
||||
} |
||||
else |
||||
{ |
||||
cv::Mat expected; |
||||
valnode >> expected; |
||||
|
||||
ASSERT_EQ(expected.size(), actual.size()) |
||||
<< " " << node.name() << "[" << idx<< "] has unexpected size"; |
||||
|
||||
cv::Mat diff; |
||||
cv::absdiff(expected, actual, diff); |
||||
if (!cv::checkRange(diff, true, 0, 0, eps)) |
||||
FAIL() << " Difference between argument " |
||||
<< node.name() << "[" << idx << "] and expected value is bugger than " << eps; |
||||
} |
||||
} |
||||
else |
||||
{ |
||||
if (valnode.isNone()) |
||||
{ |
||||
ASSERT_LE((size_t)26, array.total() * (size_t)array.channels()) |
||||
<< " Argument " << node.name() << " has unexpected number of elements"; |
||||
verify(node, array.getMat(), eps, "Argument " + node.name()); |
||||
} |
||||
else |
||||
{ |
||||
cv::Mat expected; |
||||
valnode >> expected; |
||||
cv::Mat actual = array.getMat(); |
||||
|
||||
ASSERT_EQ(expected.size(), actual.size()) |
||||
<< " Argument " << node.name() << " has unexpected size"; |
||||
|
||||
cv::Mat diff; |
||||
cv::absdiff(expected, actual, diff); |
||||
if (!cv::checkRange(diff, true, 0, 0, eps)) |
||||
FAIL() << " Difference between argument " << node.name() |
||||
<< " and expected value is bugger than " << eps; |
||||
} |
||||
} |
||||
} |
||||
|
||||
Regression& Regression::operator() (const std::string& name, cv::InputArray array, double eps) |
||||
{ |
||||
std::string nodename = getCurrentTestNodeName(); |
||||
|
||||
cv::FileNode n = rootIn[nodename]; |
||||
if(n.isNone()) |
||||
{ |
||||
if (nodename != currentTestNodeName) |
||||
{ |
||||
if (!currentTestNodeName.empty()) |
||||
write() << "}"; |
||||
currentTestNodeName = nodename; |
||||
|
||||
write() << nodename << "{"; |
||||
} |
||||
write() << name << "{"; |
||||
write(array); |
||||
write() << "}"; |
||||
} |
||||
else |
||||
{ |
||||
cv::FileNode this_arg = n[name]; |
||||
if (!this_arg.isMap()) |
||||
ADD_FAILURE() << " No regression data for " << name << " argument"; |
||||
else |
||||
verify(this_arg, array, eps); |
||||
} |
||||
return *this; |
||||
} |
||||
|
||||
|
||||
/*****************************************************************************************\
|
||||
* ::perf::performance_metrics |
||||
\*****************************************************************************************/ |
||||
performance_metrics::performance_metrics() |
||||
{ |
||||
bytesIn = 0; |
||||
bytesOut = 0; |
||||
samples = 0; |
||||
outliers = 0; |
||||
gmean = 0; |
||||
gstddev = 0; |
||||
mean = 0; |
||||
stddev = 0; |
||||
median = 0; |
||||
min = 0; |
||||
frequency = 0; |
||||
terminationReason = TERM_UNKNOWN; |
||||
} |
||||
|
||||
|
||||
/*****************************************************************************************\
|
||||
* ::perf::TestBase |
||||
\*****************************************************************************************/ |
||||
int64 TestBase::timeLimitDefault = 0; |
||||
int64 TestBase::_timeadjustment = 0; |
||||
|
||||
const char *command_line_keys = |
||||
{ |
||||
"{!!bugbugbugbug!! |perf_max_outliers |8 |percent of allowed outliers}" |
||||
"{!!bugbugbugbug!! |perf_min_samples |10 |minimal required numer of samples}" |
||||
"{!!bugbugbugbug!! |perf_seed |809564 |seed for random numbers generator}" |
||||
#if ANDROID |
||||
"{!!bugbugbugbug!! |perf_time_limit |2.0 |default time limit for a single test (in seconds)}" |
||||
#else |
||||
"{!!bugbugbugbug!! |perf_time_limit |1.0 |default time limit for a single test (in seconds)}" |
||||
#endif |
||||
"{!!bugbugbugbug!! |perf_max_deviation |1.0 |}" |
||||
"{h |help |false |}" |
||||
}; |
||||
|
||||
double param_max_outliers; |
||||
double param_max_deviation; |
||||
unsigned int param_min_samples; |
||||
uint64 param_seed; |
||||
double param_time_limit; |
||||
|
||||
void TestBase::Init(int argc, const char* const argv[]) |
||||
{ |
||||
cv::CommandLineParser args(argc, argv, command_line_keys); |
||||
param_max_outliers = std::min(100., std::max(0., args.get<double>("perf_max_outliers"))); |
||||
param_min_samples = std::max(1u, args.get<unsigned int>("perf_min_samples")); |
||||
param_max_deviation = std::max(0., args.get<double>("perf_max_deviation")); |
||||
param_seed = args.get<uint64>("perf_seed"); |
||||
param_time_limit = std::max(0., args.get<double>("perf_time_limit")); |
||||
|
||||
if (args.get<bool>("help")) |
||||
{ |
||||
args.printParams(); |
||||
printf("\n\n"); |
||||
return; |
||||
} |
||||
|
||||
//LOGD("!!!!!!!!!!!! %f !!!!!!", param_time_limit);
|
||||
|
||||
timeLimitDefault = param_time_limit == 0.0 ? 1 : (int64)(param_time_limit * cv::getTickFrequency()); |
||||
_timeadjustment = _calibrate(); |
||||
} |
||||
|
||||
int64 TestBase::_calibrate() |
||||
{ |
||||
class _helper : public ::perf::TestBase |
||||
{ |
||||
public: |
||||
performance_metrics& getMetrics() { return calcMetrics(); } |
||||
virtual void TestBody() {} |
||||
virtual void PerfTestBody() |
||||
{ |
||||
//the whole system warmup
|
||||
SetUp(); |
||||
cv::Mat a(2048, 2048, CV_32S, cv::Scalar(1)); |
||||
cv::Mat b(2048, 2048, CV_32S, cv::Scalar(2)); |
||||
declare.time(30); |
||||
double s = 0; |
||||
for(declare.iterations(20); startTimer(), next(); stopTimer()) |
||||
s+=a.dot(b); |
||||
declare.time(s); |
||||
|
||||
//self calibration
|
||||
SetUp(); |
||||
for(declare.iterations(1000); startTimer(), next(); stopTimer()){} |
||||
} |
||||
}; |
||||
|
||||
_timeadjustment = 0; |
||||
_helper h; |
||||
h.PerfTestBody(); |
||||
double compensation = h.getMetrics().min; |
||||
LOGD("Time compensation is %.0f", compensation); |
||||
return (int64)compensation; |
||||
} |
||||
|
||||
TestBase::TestBase(): declare(this) |
||||
{ |
||||
} |
||||
|
||||
void TestBase::declareArray(SizeVector& sizes, cv::InputOutputArray a, int wtype) |
||||
{ |
||||
if (!a.empty()) |
||||
{ |
||||
sizes.push_back(std::pair<int, cv::Size>(getSizeInBytes(a), getSize(a))); |
||||
warmup(a, wtype); |
||||
} |
||||
else if (a.kind() != cv::_InputArray::NONE) |
||||
ADD_FAILURE() << " Uninitialized input/output parameters are not allowed for performance tests"; |
||||
} |
||||
|
||||
void TestBase::warmup(cv::InputOutputArray a, int wtype) |
||||
{ |
||||
if (a.empty()) return; |
||||
if (a.kind() != cv::_InputArray::STD_VECTOR_MAT && a.kind() != cv::_InputArray::STD_VECTOR_VECTOR) |
||||
warmup(a.getMat(), wtype); |
||||
else |
||||
{ |
||||
size_t total = a.total(); |
||||
for (size_t i = 0; i < total; ++i) |
||||
warmup(a.getMat(i), wtype); |
||||
} |
||||
} |
||||
|
||||
int TestBase::getSizeInBytes(cv::InputArray a) |
||||
{ |
||||
if (a.empty()) return 0; |
||||
int total = (int)a.total(); |
||||
if (a.kind() != cv::_InputArray::STD_VECTOR_MAT && a.kind() != cv::_InputArray::STD_VECTOR_VECTOR) |
||||
return total * CV_ELEM_SIZE(a.type()); |
||||
|
||||
int size = 0; |
||||
for (int i = 0; i < total; ++i) |
||||
size += (int)a.total(i) * CV_ELEM_SIZE(a.type(i)); |
||||
|
||||
return size; |
||||
} |
||||
|
||||
cv::Size TestBase::getSize(cv::InputArray a) |
||||
{ |
||||
if (a.kind() != cv::_InputArray::STD_VECTOR_MAT && a.kind() != cv::_InputArray::STD_VECTOR_VECTOR) |
||||
return a.size(); |
||||
return cv::Size(); |
||||
} |
||||
|
||||
bool TestBase::next() |
||||
{ |
||||
return ++currentIter < nIters && totalTime < timeLimit; |
||||
} |
||||
|
||||
void TestBase::warmup(cv::Mat m, int wtype) |
||||
{ |
||||
switch(wtype) |
||||
{ |
||||
case WARMUP_READ: |
||||
cv::sum(m.reshape(1)); |
||||
return; |
||||
case WARMUP_WRITE: |
||||
m.reshape(1).setTo(cv::Scalar::all(0)); |
||||
return; |
||||
case WARMUP_RNG: |
||||
randu(m); |
||||
return; |
||||
default: |
||||
return; |
||||
} |
||||
} |
||||
|
||||
unsigned int TestBase::getTotalInputSize() const |
||||
{ |
||||
unsigned int res = 0; |
||||
for (SizeVector::const_iterator i = inputData.begin(); i != inputData.end(); ++i) |
||||
res += i->first; |
||||
return res; |
||||
} |
||||
|
||||
unsigned int TestBase::getTotalOutputSize() const |
||||
{ |
||||
unsigned int res = 0; |
||||
for (SizeVector::const_iterator i = outputData.begin(); i != outputData.end(); ++i) |
||||
res += i->first; |
||||
return res; |
||||
} |
||||
|
||||
void TestBase::startTimer() |
||||
{ |
||||
lastTime = cv::getTickCount(); |
||||
} |
||||
|
||||
void TestBase::stopTimer() |
||||
{ |
||||
int64 time = cv::getTickCount(); |
||||
if (lastTime == 0) |
||||
ADD_FAILURE() << " stopTimer() is called before startTimer()"; |
||||
lastTime = time - lastTime; |
||||
totalTime += lastTime; |
||||
lastTime -= _timeadjustment; |
||||
if (lastTime < 0) lastTime = 0; |
||||
times.push_back(lastTime); |
||||
lastTime = 0; |
||||
} |
||||
|
||||
performance_metrics& TestBase::calcMetrics() |
||||
{ |
||||
if ((metrics.samples == (unsigned int)currentIter) || times.size() == 0) |
||||
return metrics; |
||||
|
||||
metrics.bytesIn = getTotalInputSize(); |
||||
metrics.bytesOut = getTotalOutputSize(); |
||||
metrics.frequency = cv::getTickFrequency(); |
||||
metrics.samples = (unsigned int)times.size(); |
||||
metrics.outliers = 0; |
||||
|
||||
if (currentIter == nIters) |
||||
metrics.terminationReason = performance_metrics::TERM_ITERATIONS; |
||||
else if (totalTime >= timeLimit) |
||||
metrics.terminationReason = performance_metrics::TERM_TIME; |
||||
else |
||||
metrics.terminationReason = performance_metrics::TERM_UNKNOWN; |
||||
|
||||
std::sort(times.begin(), times.end()); |
||||
|
||||
//estimate mean and stddev for log(time)
|
||||
double gmean = 0; |
||||
double gstddev = 0; |
||||
int n = 0; |
||||
for(TimeVector::const_iterator i = times.begin(); i != times.end(); ++i) |
||||
{ |
||||
double x = (double)*i; |
||||
if (x < DBL_EPSILON) continue; |
||||
double lx = log(x); |
||||
|
||||
++n; |
||||
double delta = lx - gmean; |
||||
gmean += delta / n; |
||||
gstddev += delta * (lx - gmean); |
||||
} |
||||
|
||||
gstddev = n > 1 ? sqrt(gstddev / (n - 1)) : 0; |
||||
|
||||
TimeVector::const_iterator start = times.begin(); |
||||
TimeVector::const_iterator end = times.end(); |
||||
|
||||
//filter outliers assuming log-normal distribution
|
||||
//http://stackoverflow.com/questions/1867426/modeling-distribution-of-performance-measurements
|
||||
int offset = 0; |
||||
if (gstddev > DBL_EPSILON) |
||||
{ |
||||
double minout = exp(gmean - 3 * gstddev); |
||||
double maxout = exp(gmean + 3 * gstddev); |
||||
while(*start < minout) ++start, ++metrics.outliers, ++offset; |
||||
do --end, ++metrics.outliers; while(*end > maxout); |
||||
++end, --metrics.outliers; |
||||
} |
||||
|
||||
metrics.min = (double)*start; |
||||
//calc final metrics
|
||||
n = 0; |
||||
gmean = 0; |
||||
gstddev = 0; |
||||
double mean = 0; |
||||
double stddev = 0; |
||||
int m = 0; |
||||
for(; start != end; ++start) |
||||
{ |
||||
double x = (double)*start; |
||||
if (x > DBL_EPSILON) |
||||
{ |
||||
double lx = log(x); |
||||
++m; |
||||
double gdelta = lx - gmean; |
||||
gmean += gdelta / m; |
||||
gstddev += gdelta * (lx - gmean); |
||||
} |
||||
++n; |
||||
double delta = x - mean; |
||||
mean += delta / n; |
||||
stddev += delta * (x - mean); |
||||
} |
||||
|
||||
metrics.mean = mean; |
||||
metrics.gmean = exp(gmean); |
||||
metrics.gstddev = m > 1 ? sqrt(gstddev / (m - 1)) : 0; |
||||
metrics.stddev = n > 1 ? sqrt(stddev / (n - 1)) : 0; |
||||
metrics.median = n % 2 |
||||
? (double)times[offset + n / 2] |
||||
: 0.5 * (times[offset + n / 2] + times[offset + n / 2 - 1]); |
||||
|
||||
return metrics; |
||||
} |
||||
|
||||
void TestBase::validateMetrics() |
||||
{ |
||||
performance_metrics& m = calcMetrics(); |
||||
|
||||
if (HasFailure()) return; |
||||
|
||||
ASSERT_GE(m.samples, 1u) |
||||
<< " No time measurements was performed.\nstartTimer() and stopTimer() commands are required for performance tests."; |
||||
|
||||
EXPECT_GE(m.samples, param_min_samples) |
||||
<< " Only a few samples are collected.\nPlease increase number of iterations or/and time limit to get reliable performance measurements."; |
||||
|
||||
if (m.gstddev > DBL_EPSILON) |
||||
{ |
||||
EXPECT_GT(/*m.gmean * */1., /*m.gmean * */ 2 * sinh(m.gstddev * param_max_deviation)) |
||||
<< " Test results are not reliable ((mean-sigma,mean+sigma) deviation interval is bigger than measured time interval)."; |
||||
} |
||||
|
||||
EXPECT_LE(m.outliers, std::max((unsigned int)cvCeil(m.samples * param_max_outliers / 100.), 1u)) |
||||
<< " Test results are not reliable (too many outliers)."; |
||||
} |
||||
|
||||
void TestBase::reportMetrics(bool toJUnitXML) |
||||
{ |
||||
performance_metrics& m = calcMetrics(); |
||||
|
||||
if (toJUnitXML) |
||||
{ |
||||
RecordProperty("bytesIn", (int)m.bytesIn); |
||||
RecordProperty("bytesOut", (int)m.bytesOut); |
||||
RecordProperty("term", m.terminationReason); |
||||
RecordProperty("samples", (int)m.samples); |
||||
RecordProperty("outliers", (int)m.outliers); |
||||
RecordProperty("frequency", cv::format("%.0f", m.frequency).c_str()); |
||||
RecordProperty("min", cv::format("%.0f", m.min).c_str()); |
||||
RecordProperty("median", cv::format("%.0f", m.median).c_str()); |
||||
RecordProperty("gmean", cv::format("%.0f", m.gmean).c_str()); |
||||
RecordProperty("gstddev", cv::format("%.6f", m.gstddev).c_str()); |
||||
RecordProperty("mean", cv::format("%.0f", m.mean).c_str()); |
||||
RecordProperty("stddev", cv::format("%.0f", m.stddev).c_str()); |
||||
} |
||||
else |
||||
{ |
||||
const ::testing::TestInfo* const test_info = ::testing::UnitTest::GetInstance()->current_test_info(); |
||||
const char* type_param = test_info->type_param(); |
||||
const char* value_param = test_info->value_param(); |
||||
|
||||
#if defined(ANDROID) && defined(USE_ANDROID_LOGGING) |
||||
LOGD("[ FAILED ] %s.%s", test_info->test_case_name(), test_info->name()); |
||||
#endif |
||||
|
||||
if (type_param) LOGD("type = %11s", type_param); |
||||
if (value_param) LOGD("param = %11s", value_param); |
||||
|
||||
switch (m.terminationReason) |
||||
{ |
||||
case performance_metrics::TERM_ITERATIONS: |
||||
LOGD("termination reason: reached maximum number of iterations"); |
||||
break; |
||||
case performance_metrics::TERM_TIME: |
||||
LOGD("termination reason: reached time limit"); |
||||
break; |
||||
case performance_metrics::TERM_UNKNOWN: |
||||
default: |
||||
LOGD("termination reason: unknown"); |
||||
break; |
||||
}; |
||||
|
||||
LOGD("bytesIn =%11lu", m.bytesIn); |
||||
LOGD("bytesOut =%11lu", m.bytesOut); |
||||
if (nIters == (unsigned int)-1 || m.terminationReason == performance_metrics::TERM_ITERATIONS) |
||||
LOGD("samples =%11u", m.samples); |
||||
else |
||||
LOGD("samples =%11u of %u", m.samples, nIters); |
||||
LOGD("outliers =%11u", m.outliers); |
||||
LOGD("frequency =%11.0f", m.frequency); |
||||
LOGD("min =%11.0f = %.2fms", m.min, m.min * 1e3 / m.frequency); |
||||
LOGD("median =%11.0f = %.2fms", m.median, m.median * 1e3 / m.frequency); |
||||
LOGD("gmean =%11.0f = %.2fms", m.gmean, m.gmean * 1e3 / m.frequency); |
||||
LOGD("gstddev =%11.8f = %.2fms for 97%% dispersion interval", m.gstddev, m.gmean * 2 * sinh(m.gstddev * 3) * 1e3 / m.frequency); |
||||
LOGD("mean =%11.0f = %.2fms", m.mean, m.mean * 1e3 / m.frequency); |
||||
LOGD("stddev =%11.0f = %.2fms", m.stddev, m.stddev * 1e3 / m.frequency); |
||||
} |
||||
} |
||||
|
||||
void TestBase::SetUp() |
||||
{ |
||||
lastTime = 0; |
||||
totalTime = 0; |
||||
nIters = (unsigned int)-1; |
||||
currentIter = (unsigned int)-1; |
||||
timeLimit = timeLimitDefault; |
||||
times.clear(); |
||||
cv::theRNG().state = param_seed;//this rng should generate same numbers for each run
|
||||
} |
||||
|
||||
void TestBase::TearDown() |
||||
{ |
||||
validateMetrics(); |
||||
if (HasFailure()) |
||||
reportMetrics(false); |
||||
else |
||||
{ |
||||
const ::testing::TestInfo* const test_info = ::testing::UnitTest::GetInstance()->current_test_info(); |
||||
const char* type_param = test_info->type_param(); |
||||
const char* value_param = test_info->value_param(); |
||||
if (value_param) printf("[ VALUE ] \t%s\n", value_param), fflush(stdout); |
||||
if (type_param) printf("[ TYPE ] \t%s\n", type_param), fflush(stdout); |
||||
reportMetrics(true); |
||||
} |
||||
} |
||||
|
||||
std::string TestBase::getDataPath(const std::string& relativePath) |
||||
{ |
||||
if (relativePath.empty()) |
||||
{ |
||||
ADD_FAILURE() << " Bad path to test resource"; |
||||
return std::string(); |
||||
} |
||||
|
||||
const char *data_path_dir = getenv("OPENCV_TEST_DATA_PATH"); |
||||
const char *path_separator = "/"; |
||||
|
||||
std::string path; |
||||
if (data_path_dir) |
||||
{ |
||||
int len = strlen(data_path_dir) - 1; |
||||
if (len < 0) len = 0; |
||||
path = (data_path_dir[0] == 0 ? std::string(".") : std::string(data_path_dir)) |
||||
+ (data_path_dir[len] == '/' || data_path_dir[len] == '\\' ? "" : path_separator); |
||||
} |
||||
else |
||||
{ |
||||
path = "."; |
||||
path += path_separator; |
||||
} |
||||
|
||||
if (relativePath[0] == '/' || relativePath[0] == '\\') |
||||
path += relativePath.substr(1); |
||||
else |
||||
path += relativePath; |
||||
|
||||
FILE* fp = fopen(path.c_str(), "r"); |
||||
if (fp) |
||||
fclose(fp); |
||||
else |
||||
ADD_FAILURE() << " Requested file \"" << path << "\" does not exist."; |
||||
return path; |
||||
} |
||||
|
||||
/*****************************************************************************************\
|
||||
* ::perf::TestBase::_declareHelper |
||||
\*****************************************************************************************/ |
||||
TestBase::_declareHelper& TestBase::_declareHelper::iterations(int n) |
||||
{ |
||||
test->times.clear(); |
||||
test->times.reserve(n); |
||||
test->nIters = n; |
||||
test->currentIter = (unsigned int)-1; |
||||
return *this; |
||||
} |
||||
|
||||
TestBase::_declareHelper& TestBase::_declareHelper::time(double timeLimitSecs) |
||||
{ |
||||
test->times.clear(); |
||||
test->currentIter = (unsigned int)-1; |
||||
test->timeLimit = (int64)(timeLimitSecs * cv::getTickFrequency()); |
||||
return *this; |
||||
} |
||||
|
||||
TestBase::_declareHelper& TestBase::_declareHelper::in(cv::InputOutputArray a1, int wtype) |
||||
{ |
||||
if (!test->times.empty()) return *this; |
||||
TestBase::declareArray(test->inputData, a1, wtype); |
||||
return *this; |
||||
} |
||||
|
||||
TestBase::_declareHelper& TestBase::_declareHelper::in(cv::InputOutputArray a1, cv::InputOutputArray a2, int wtype) |
||||
{ |
||||
if (!test->times.empty()) return *this; |
||||
TestBase::declareArray(test->inputData, a1, wtype); |
||||
TestBase::declareArray(test->inputData, a2, wtype); |
||||
return *this; |
||||
} |
||||
|
||||
TestBase::_declareHelper& TestBase::_declareHelper::in(cv::InputOutputArray a1, cv::InputOutputArray a2, cv::InputOutputArray a3, int wtype) |
||||
{ |
||||
if (!test->times.empty()) return *this; |
||||
TestBase::declareArray(test->inputData, a1, wtype); |
||||
TestBase::declareArray(test->inputData, a2, wtype); |
||||
TestBase::declareArray(test->inputData, a3, wtype); |
||||
return *this; |
||||
} |
||||
|
||||
TestBase::_declareHelper& TestBase::_declareHelper::in(cv::InputOutputArray a1, cv::InputOutputArray a2, cv::InputOutputArray a3, cv::InputOutputArray a4, int wtype) |
||||
{ |
||||
if (!test->times.empty()) return *this; |
||||
TestBase::declareArray(test->inputData, a1, wtype); |
||||
TestBase::declareArray(test->inputData, a2, wtype); |
||||
TestBase::declareArray(test->inputData, a3, wtype); |
||||
TestBase::declareArray(test->inputData, a4, wtype); |
||||
return *this; |
||||
} |
||||
|
||||
TestBase::_declareHelper& TestBase::_declareHelper::out(cv::InputOutputArray a1, int wtype) |
||||
{ |
||||
if (!test->times.empty()) return *this; |
||||
TestBase::declareArray(test->outputData, a1, wtype); |
||||
return *this; |
||||
} |
||||
|
||||
TestBase::_declareHelper& TestBase::_declareHelper::out(cv::InputOutputArray a1, cv::InputOutputArray a2, int wtype) |
||||
{ |
||||
if (!test->times.empty()) return *this; |
||||
TestBase::declareArray(test->outputData, a1, wtype); |
||||
TestBase::declareArray(test->outputData, a2, wtype); |
||||
return *this; |
||||
} |
||||
|
||||
TestBase::_declareHelper& TestBase::_declareHelper::out(cv::InputOutputArray a1, cv::InputOutputArray a2, cv::InputOutputArray a3, int wtype) |
||||
{ |
||||
if (!test->times.empty()) return *this; |
||||
TestBase::declareArray(test->outputData, a1, wtype); |
||||
TestBase::declareArray(test->outputData, a2, wtype); |
||||
TestBase::declareArray(test->outputData, a3, wtype); |
||||
return *this; |
||||
} |
||||
|
||||
TestBase::_declareHelper& TestBase::_declareHelper::out(cv::InputOutputArray a1, cv::InputOutputArray a2, cv::InputOutputArray a3, cv::InputOutputArray a4, int wtype) |
||||
{ |
||||
if (!test->times.empty()) return *this; |
||||
TestBase::declareArray(test->outputData, a1, wtype); |
||||
TestBase::declareArray(test->outputData, a2, wtype); |
||||
TestBase::declareArray(test->outputData, a3, wtype); |
||||
TestBase::declareArray(test->outputData, a4, wtype); |
||||
return *this; |
||||
} |
||||
|
||||
TestBase::_declareHelper::_declareHelper(TestBase* t) : test(t) |
||||
{ |
||||
} |
||||
|
||||
/*****************************************************************************************\
|
||||
* ::perf::PrintTo |
||||
\*****************************************************************************************/ |
||||
namespace perf |
||||
{ |
||||
|
||||
void PrintTo(const MatType& t, ::std::ostream* os) |
||||
{ |
||||
switch( CV_MAT_DEPTH((int)t) ) |
||||
{ |
||||
case CV_8U: *os << "8U"; break; |
||||
case CV_8S: *os << "8S"; break; |
||||
case CV_16U: *os << "16U"; break; |
||||
case CV_16S: *os << "16S"; break; |
||||
case CV_32S: *os << "32S"; break; |
||||
case CV_32F: *os << "32F"; break; |
||||
case CV_64F: *os << "64F"; break; |
||||
case CV_USRTYPE1: *os << "USRTYPE1"; break; |
||||
default: *os << "INVALID_TYPE"; break; |
||||
} |
||||
*os << 'C' << CV_MAT_CN((int)t); |
||||
} |
||||
|
||||
} //namespace perf
|
||||
|
||||
/*****************************************************************************************\
|
||||
* ::cv::PrintTo |
||||
\*****************************************************************************************/ |
||||
namespace cv { |
||||
|
||||
void PrintTo(const Size& sz, ::std::ostream* os) |
||||
{ |
||||
*os << /*"Size:" << */sz.width << "x" << sz.height; |
||||
} |
||||
|
||||
} // namespace cv
|
||||
|
||||
|
||||
/*****************************************************************************************\
|
||||
* ::cv::PrintTo |
||||
\*****************************************************************************************/ |
Loading…
Reference in new issue