diff --git a/modules/dnn/test/test_onnx_importer.cpp b/modules/dnn/test/test_onnx_importer.cpp index b8615912c5..ef8c494131 100644 --- a/modules/dnn/test/test_onnx_importer.cpp +++ b/modules/dnn/test/test_onnx_importer.cpp @@ -1287,6 +1287,48 @@ TEST_P(Test_ONNX_layers, GRU) testONNXModels("gru", npy, 0, 0, false, false); } +TEST_P(Test_ONNX_layers, gru_cell_batchsize_50_seqlen_1) +{ +#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2022010000) + // IE exception: Node GRU_22 was not assigned on any pointed device + if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16)) + applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, + CV_TEST_TAG_DNN_SKIP_IE_NGRAPH, CV_TEST_TAG_DNN_SKIP_IE_VERSION + ); +#endif + if(backend == DNN_BACKEND_CUDA) + applyTestTag(CV_TEST_TAG_DNN_SKIP_CUDA); + testONNXModels("gru_cell_batchsize_50_seqlen_1", npy, 0, 0, false, false); +} + +TEST_P(Test_ONNX_layers, gru_cell_batchsize_5_seqlen_5) +{ +#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2022010000) + // IE exception: Node GRU_22 was not assigned on any pointed device + if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16)) + applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, + CV_TEST_TAG_DNN_SKIP_IE_NGRAPH, CV_TEST_TAG_DNN_SKIP_IE_VERSION + ); +#endif + if(backend == DNN_BACKEND_CUDA) + applyTestTag(CV_TEST_TAG_DNN_SKIP_CUDA); + testONNXModels("gru_cell_batchsize_5_seqlen_5", npy, 0, 0, false, false); +} + +TEST_P(Test_ONNX_layers, gru_cell_batchsize_1_seqlen_50) +{ +#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2022010000) + // IE exception: Node GRU_22 was not assigned on any pointed device + if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16)) + applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, + CV_TEST_TAG_DNN_SKIP_IE_NGRAPH, CV_TEST_TAG_DNN_SKIP_IE_VERSION + ); +#endif + if(backend == DNN_BACKEND_CUDA) + applyTestTag(CV_TEST_TAG_DNN_SKIP_CUDA); + testONNXModels("gru_cell_batchsize_1_seqlen_50", npy, 0, 0, false, false); +} + TEST_P(Test_ONNX_layers, GRU_bidirectional) { testONNXModels("gru_bi", npy, 0, 0, false, false); @@ -1323,6 +1365,27 @@ TEST_P(Test_ONNX_layers, LSTM_cell_with_peepholes) testONNXModels("lstm_cell_with_peepholes", npy, 0, 0, false, false); } +TEST_P(Test_ONNX_layers, LSTM_cell_batchsize_50_seqlen_1) +{ + if(backend == DNN_BACKEND_CUDA) + applyTestTag(CV_TEST_TAG_DNN_SKIP_CUDA); + testONNXModels("lstm_cell_batchsize_50_seqlen_1", npy, 0, 0, false, false); +} + +TEST_P(Test_ONNX_layers, LSTM_cell_batchsize_1_seqlen_50) +{ + if(backend == DNN_BACKEND_CUDA) + applyTestTag(CV_TEST_TAG_DNN_SKIP_CUDA); + testONNXModels("lstm_cell_batchsize_1_seqlen_50", npy, 0, 0, false, false); +} + +TEST_P(Test_ONNX_layers, LSTM_cell_batchsize_5_seqlen_5) +{ + if(backend == DNN_BACKEND_CUDA) + applyTestTag(CV_TEST_TAG_DNN_SKIP_CUDA); + testONNXModels("lstm_cell_batchsize_5_seqlen_5", npy, 0, 0, false, false); +} + TEST_P(Test_ONNX_layers, Pad2d_Unfused) { testONNXModels("ReflectionPad2d");