Merge pull request #14899 from alalek:dnn_fix_bnll_layer

* dnn: fix BNLLLayer implementation

details: https://github.com/BVLC/caffe/blame/1.0/src/caffe/layers/bnll_layer.cpp#L17

* dnn: enable OCV/OpenCL BNLL layer
pull/14904/head^2
Alexander Alekhin 6 years ago committed by GitHub
parent c251915198
commit 24790e4061
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 30
      modules/dnn/src/layers/elementwise_layers.cpp
  2. 3
      modules/dnn/src/opencl/activations.cl
  3. 5
      modules/dnn/test/test_halide_layers.cpp

@ -771,7 +771,8 @@ struct BNLLFunctor
for( int i = 0; i < len; i++ ) for( int i = 0; i < len; i++ )
{ {
float x = srcptr[i]; float x = srcptr[i];
dstptr[i] = log(1.f + exp(-abs(x))); // https://github.com/BVLC/caffe/blame/1.0/src/caffe/layers/bnll_layer.cpp#L17
dstptr[i] = x > 0 ? x + log(1. + exp(-x)) : log(1. + exp(x));
} }
} }
} }
@ -779,8 +780,28 @@ struct BNLLFunctor
#ifdef HAVE_OPENCL #ifdef HAVE_OPENCL
bool applyOCL(InputArrayOfArrays inps, OutputArrayOfArrays outs, OutputArrayOfArrays internals) bool applyOCL(InputArrayOfArrays inps, OutputArrayOfArrays outs, OutputArrayOfArrays internals)
{ {
// TODO: implement OCL version std::vector<UMat> inputs;
return false; std::vector<UMat> outputs;
inps.getUMatVector(inputs);
outs.getUMatVector(outputs);
String buildopt = oclGetTMacro(inputs[0]);
for (size_t i = 0; i < inputs.size(); i++)
{
UMat& src = inputs[i];
UMat& dst = outputs[i];
ocl::Kernel kernel("BNLLForward", ocl::dnn::activations_oclsrc, buildopt);
kernel.set(0, (int)src.total());
kernel.set(1, ocl::KernelArg::PtrReadOnly(src));
kernel.set(2, ocl::KernelArg::PtrWriteOnly(dst));
size_t gSize = src.total();
CV_Assert(kernel.run(1, &gSize, NULL, false));
}
return true;
} }
#endif #endif
@ -788,7 +809,8 @@ struct BNLLFunctor
void attachHalide(const Halide::Expr& input, Halide::Func& top) void attachHalide(const Halide::Expr& input, Halide::Func& top)
{ {
Halide::Var x("x"), y("y"), c("c"), n("n"); Halide::Var x("x"), y("y"), c("c"), n("n");
top(x, y, c, n) = log(1.0f + exp(-abs(input))); // https://github.com/BVLC/caffe/blame/1.0/src/caffe/layers/bnll_layer.cpp#L17
top(x, y, c, n) = max(input, 0) + log(1.0f + exp(-abs(input)));
} }
#endif // HAVE_HALIDE #endif // HAVE_HALIDE

@ -98,7 +98,8 @@ __kernel void SigmoidForward(const int count, __global const T* in, __global T*
__kernel void BNLLForward(const int n, __global const T* in, __global T* out) { __kernel void BNLLForward(const int n, __global const T* in, __global T* out) {
int index = get_global_id(0); int index = get_global_id(0);
if (index < n) { if (index < n) {
out[index] = in[index] > 0 ? in[index] + log(1.0f + exp(-in[index])) : log(1.0f + exp(in[index])); T x = in[index];
out[index] = x > 0 ? x + log(1.0f + exp(-x)) : log(1.0f + exp(x));
} }
} }

@ -34,6 +34,11 @@ static void test(Mat& input, Net& net, Backend backendId, Target targetId, bool
double l1, lInf; double l1, lInf;
DNNTestLayer::getDefaultThresholds(backendId, targetId, &l1, &lInf); DNNTestLayer::getDefaultThresholds(backendId, targetId, &l1, &lInf);
#if 0
std::cout << "l1=" << l1 << " lInf=" << lInf << std::endl;
std::cout << outputDefault.reshape(1, outputDefault.total()).t() << std::endl;
std::cout << outputHalide.reshape(1, outputDefault.total()).t() << std::endl;
#endif
normAssert(outputDefault, outputHalide, "", l1, lInf); normAssert(outputDefault, outputHalide, "", l1, lInf);
} }

Loading…
Cancel
Save