|
|
|
@ -140,10 +140,6 @@ TEST_P(Test_TensorFlow_layers, padding) |
|
|
|
|
|
|
|
|
|
TEST_P(Test_TensorFlow_layers, padding_same) |
|
|
|
|
{ |
|
|
|
|
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) |
|
|
|
|
if (backend == DNN_BACKEND_INFERENCE_ENGINE) |
|
|
|
|
throw SkipTestException("Test is disabled for DLIE"); |
|
|
|
|
#endif |
|
|
|
|
#if defined(INF_ENGINE_RELEASE) |
|
|
|
|
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD |
|
|
|
|
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X |
|
|
|
@ -251,10 +247,6 @@ TEST_P(Test_TensorFlow_layers, reshape) |
|
|
|
|
|
|
|
|
|
TEST_P(Test_TensorFlow_layers, flatten) |
|
|
|
|
{ |
|
|
|
|
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) |
|
|
|
|
if (backend == DNN_BACKEND_INFERENCE_ENGINE) |
|
|
|
|
throw SkipTestException("Test is disabled for DLIE"); |
|
|
|
|
#endif |
|
|
|
|
#if defined(INF_ENGINE_RELEASE) |
|
|
|
|
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD |
|
|
|
|
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_2 |
|
|
|
@ -267,11 +259,6 @@ TEST_P(Test_TensorFlow_layers, flatten) |
|
|
|
|
|
|
|
|
|
TEST_P(Test_TensorFlow_layers, unfused_flatten) |
|
|
|
|
{ |
|
|
|
|
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) |
|
|
|
|
if (backend == DNN_BACKEND_INFERENCE_ENGINE) |
|
|
|
|
throw SkipTestException("Test is disabled for DLIE"); |
|
|
|
|
#endif |
|
|
|
|
|
|
|
|
|
runTensorFlowNet("unfused_flatten"); |
|
|
|
|
runTensorFlowNet("unfused_flatten_unknown_batch"); |
|
|
|
|
} |
|
|
|
@ -320,11 +307,14 @@ class Test_TensorFlow_nets : public DNNTestLayer {}; |
|
|
|
|
|
|
|
|
|
TEST_P(Test_TensorFlow_nets, MobileNet_SSD) |
|
|
|
|
{ |
|
|
|
|
checkBackend(); |
|
|
|
|
if ((backend == DNN_BACKEND_INFERENCE_ENGINE && target != DNN_TARGET_CPU) || |
|
|
|
|
(backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)) |
|
|
|
|
throw SkipTestException(""); |
|
|
|
|
#if defined(INF_ENGINE_RELEASE) |
|
|
|
|
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD |
|
|
|
|
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X |
|
|
|
|
) |
|
|
|
|
throw SkipTestException("Test is disabled for MyriadX"); |
|
|
|
|
#endif |
|
|
|
|
|
|
|
|
|
checkBackend(); |
|
|
|
|
std::string netPath = findDataFile("dnn/ssd_mobilenet_v1_coco.pb", false); |
|
|
|
|
std::string netConfig = findDataFile("dnn/ssd_mobilenet_v1_coco.pbtxt", false); |
|
|
|
|
std::string imgPath = findDataFile("dnn/street.png", false); |
|
|
|
@ -333,30 +323,18 @@ TEST_P(Test_TensorFlow_nets, MobileNet_SSD) |
|
|
|
|
resize(imread(imgPath), inp, Size(300, 300)); |
|
|
|
|
inp = blobFromImage(inp, 1.0f / 127.5, Size(), Scalar(127.5, 127.5, 127.5), true); |
|
|
|
|
|
|
|
|
|
std::vector<String> outNames(3); |
|
|
|
|
outNames[0] = "concat"; |
|
|
|
|
outNames[1] = "concat_1"; |
|
|
|
|
outNames[2] = "detection_out"; |
|
|
|
|
|
|
|
|
|
std::vector<Mat> refs(outNames.size()); |
|
|
|
|
for (int i = 0; i < outNames.size(); ++i) |
|
|
|
|
{ |
|
|
|
|
std::string path = findDataFile("dnn/tensorflow/ssd_mobilenet_v1_coco." + outNames[i] + ".npy", false); |
|
|
|
|
refs[i] = blobFromNPY(path); |
|
|
|
|
} |
|
|
|
|
Mat ref = blobFromNPY(findDataFile("dnn/tensorflow/ssd_mobilenet_v1_coco.detection_out.npy", false)); |
|
|
|
|
|
|
|
|
|
Net net = readNetFromTensorflow(netPath, netConfig); |
|
|
|
|
net.setPreferableBackend(backend); |
|
|
|
|
net.setPreferableTarget(target); |
|
|
|
|
|
|
|
|
|
net.setInput(inp); |
|
|
|
|
Mat out = net.forward(); |
|
|
|
|
|
|
|
|
|
std::vector<Mat> output; |
|
|
|
|
net.forward(output, outNames); |
|
|
|
|
|
|
|
|
|
normAssert(refs[0].reshape(1, 1), output[0].reshape(1, 1), "", 1e-5, 1.5e-4); |
|
|
|
|
normAssert(refs[1].reshape(1, 1), output[1].reshape(1, 1), "", 1e-5, 3e-4); |
|
|
|
|
normAssertDetections(refs[2], output[2], "", 0.2); |
|
|
|
|
double scoreDiff = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.0043 : default_l1; |
|
|
|
|
double iouDiff = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.037 : default_lInf; |
|
|
|
|
normAssertDetections(ref, out, "", 0.2, scoreDiff, iouDiff); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
TEST_P(Test_TensorFlow_nets, Inception_v2_SSD) |
|
|
|
@ -597,10 +575,6 @@ TEST_P(Test_TensorFlow_layers, fp16_weights) |
|
|
|
|
|
|
|
|
|
TEST_P(Test_TensorFlow_layers, fp16_padding_same) |
|
|
|
|
{ |
|
|
|
|
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) |
|
|
|
|
if (backend == DNN_BACKEND_INFERENCE_ENGINE) |
|
|
|
|
throw SkipTestException("Test is disabled for DLIE"); |
|
|
|
|
#endif |
|
|
|
|
#if defined(INF_ENGINE_RELEASE) |
|
|
|
|
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD |
|
|
|
|
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X |
|
|
|
|