|
|
|
@ -2228,7 +2228,7 @@ public: |
|
|
|
|
|
|
|
|
|
static testing::internal::ParamGenerator<tuple<Backend, Target> > dnnBackendsAndTargetsForFusionTests() |
|
|
|
|
{ |
|
|
|
|
return dnnBackendsAndTargets(false, false, true, false, false, false); // OCV OpenCL + OCV CPU
|
|
|
|
|
return dnnBackendsAndTargets(false, false, true, false, true, false); // OCV OpenCL + OCV CPU + CUDA
|
|
|
|
|
} |
|
|
|
|
}; |
|
|
|
|
|
|
|
|
@ -2280,7 +2280,12 @@ TEST_P(ConvolutionActivationFusion, Accuracy) |
|
|
|
|
expectedFusedLayers.push_back(activId); |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
else if (backendId == DNN_BACKEND_CUDA) |
|
|
|
|
{ |
|
|
|
|
if (actType == "ReLU" || actType == "ReLU6" || actType == "TanH" || actType == "Swish" || |
|
|
|
|
actType == "Mish" || actType == "Sigmoid" || actType == "Power") |
|
|
|
|
expectedFusedLayers.push_back(activId); |
|
|
|
|
} |
|
|
|
|
TestLayerFusion::test(input, net, backendId, targetId, expectedFusedLayers); |
|
|
|
|
} |
|
|
|
|
INSTANTIATE_TEST_CASE_P(TestLayerFusion, ConvolutionActivationFusion, Combine( |
|
|
|
@ -2319,7 +2324,7 @@ TEST_P(ConvolutionEltwiseFusion, Accuracy) |
|
|
|
|
std::string eltwiseOp = get<1>(GetParam()); |
|
|
|
|
bool weightedEltwise = get<2>(GetParam()); |
|
|
|
|
if (eltwiseOp != "sum" && weightedEltwise) |
|
|
|
|
throw SkipTestException("weighted eltwise not supported"); |
|
|
|
|
throw SkipTestException("weighted eltwise not supported"); |
|
|
|
|
LayerParams eltwiseParams; |
|
|
|
|
TestLayerFusion::makeDefaultTestEltwiseLayer(eltwiseParams, eltwiseOp, weightedEltwise); |
|
|
|
|
|
|
|
|
@ -2332,7 +2337,11 @@ TEST_P(ConvolutionEltwiseFusion, Accuracy) |
|
|
|
|
|
|
|
|
|
Backend backendId = get<0>(get<3>(GetParam())); |
|
|
|
|
Target targetId = get<1>(get<3>(GetParam())); |
|
|
|
|
TestLayerFusion::test(input, net, backendId, targetId); |
|
|
|
|
|
|
|
|
|
std::vector<int> expectedFusedLayers; |
|
|
|
|
if (backendId == DNN_BACKEND_CUDA && eltwiseOp == "sum" && !weightedEltwise) |
|
|
|
|
expectedFusedLayers.push_back(eltwiseId); |
|
|
|
|
TestLayerFusion::test(input, net, backendId, targetId, expectedFusedLayers); |
|
|
|
|
} |
|
|
|
|
INSTANTIATE_TEST_CASE_P(TestLayerFusion, ConvolutionEltwiseFusion, Combine( |
|
|
|
|
/* bias */ testing::Bool(), |
|
|
|
@ -2411,7 +2420,16 @@ TEST_P(ConvolutionEltwiseActivationFusion, Accuracy) |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
else if(backendId == DNN_BACKEND_CUDA) |
|
|
|
|
{ |
|
|
|
|
if (eltwiseOp == "sum" && !weightedEltwise) |
|
|
|
|
{ |
|
|
|
|
expectedFusedLayers.push_back(eltwiseId); |
|
|
|
|
if (actType == "ReLU" || actType == "ReLU6" || actType == "TanH" || actType == "Swish" || |
|
|
|
|
actType == "Mish" || actType == "Sigmoid" || actType == "Power") |
|
|
|
|
expectedFusedLayers.push_back(activId); |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
TestLayerFusion::test(input, net, backendId, targetId, expectedFusedLayers); |
|
|
|
|
} |
|
|
|
|
INSTANTIATE_TEST_CASE_P(TestLayerFusion, ConvolutionEltwiseActivationFusion, Combine( |
|
|
|
@ -2486,7 +2504,16 @@ TEST_P(ConvolutionActivationEltwiseFusion, Accuracy) |
|
|
|
|
expectedFusedLayers.push_back(activId); // activation fused with convolution
|
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
else if(backendId == DNN_BACKEND_CUDA) |
|
|
|
|
{ |
|
|
|
|
if (actType == "ReLU" || actType == "ReLU6" || actType == "TanH" || actType == "Swish" || |
|
|
|
|
actType == "Mish" || actType == "Sigmoid" || actType == "Power") |
|
|
|
|
{ |
|
|
|
|
expectedFusedLayers.push_back(activId); |
|
|
|
|
if (eltwiseOp == "sum" && !weightedEltwise) |
|
|
|
|
expectedFusedLayers.push_back(eltwiseId); |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
TestLayerFusion::test(input, net, backendId, targetId, expectedFusedLayers); |
|
|
|
|
} |
|
|
|
|
INSTANTIATE_TEST_CASE_P(TestLayerFusion, ConvolutionActivationEltwiseFusion, Combine( |
|
|
|
|