|
|
|
@ -1202,6 +1202,105 @@ struct PowerFunctor : public BaseFunctor |
|
|
|
|
int64 getFLOPSPerElement() const { return power == 1 ? 2 : 10; } |
|
|
|
|
}; |
|
|
|
|
|
|
|
|
|
struct ExpFunctor : public BaseFunctor |
|
|
|
|
{ |
|
|
|
|
typedef ExpLayer Layer; |
|
|
|
|
float base, scale, shift; |
|
|
|
|
float normScale, normShift; |
|
|
|
|
|
|
|
|
|
ExpFunctor(float base_ = -1.f, float scale_ = 1.f, float shift_ = 0.f) |
|
|
|
|
: base(base_), scale(scale_), shift(shift_) |
|
|
|
|
{ |
|
|
|
|
// For base > 0 :
|
|
|
|
|
// y = base^(scale * input + shift)
|
|
|
|
|
// ln(y) = ln(base)*(scale * input + shift)
|
|
|
|
|
// y = exp((ln(base)*scale) * input + (ln(base)*shift))
|
|
|
|
|
// y = exp(normalized_scale * input + normalized_shift)
|
|
|
|
|
CV_Check(base, base == -1.f || base > 0.f, "Unsupported 'base' value"); |
|
|
|
|
const float ln_base = (base == -1.f) ? 1.f : log(base); |
|
|
|
|
normScale = scale * ln_base; |
|
|
|
|
normShift = shift * ln_base; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
bool supportBackend(int backendId, int targetId) |
|
|
|
|
{ |
|
|
|
|
return backendId == DNN_BACKEND_OPENCV || |
|
|
|
|
backendId == DNN_BACKEND_HALIDE || backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void apply(const float* srcptr, float* dstptr, int len, size_t planeSize, int cn0, int cn1) const |
|
|
|
|
{ |
|
|
|
|
float a = normScale, b = normShift; |
|
|
|
|
for( int cn = cn0; cn < cn1; cn++, srcptr += planeSize, dstptr += planeSize ) |
|
|
|
|
{ |
|
|
|
|
for( int i = 0; i < len; i++ ) |
|
|
|
|
{ |
|
|
|
|
float x = srcptr[i]; |
|
|
|
|
dstptr[i] = exp(a*x + b); |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
#ifdef HAVE_OPENCL |
|
|
|
|
bool applyOCL(InputArrayOfArrays inps, OutputArrayOfArrays outs, OutputArrayOfArrays internals) |
|
|
|
|
{ |
|
|
|
|
std::vector<UMat> inputs; |
|
|
|
|
std::vector<UMat> outputs; |
|
|
|
|
|
|
|
|
|
inps.getUMatVector(inputs); |
|
|
|
|
outs.getUMatVector(outputs); |
|
|
|
|
String buildopt = oclGetTMacro(inputs[0]); |
|
|
|
|
|
|
|
|
|
for (size_t i = 0; i < inputs.size(); i++) |
|
|
|
|
{ |
|
|
|
|
UMat& src = inputs[i]; |
|
|
|
|
UMat& dst = outputs[i]; |
|
|
|
|
|
|
|
|
|
ocl::Kernel kernel("ExpForward", ocl::dnn::activations_oclsrc, buildopt); |
|
|
|
|
kernel.set(0, (int)src.total()); |
|
|
|
|
kernel.set(1, ocl::KernelArg::PtrReadOnly(src)); |
|
|
|
|
kernel.set(2, ocl::KernelArg::PtrWriteOnly(dst)); |
|
|
|
|
kernel.set(3, (float)normScale); |
|
|
|
|
kernel.set(4, (float)normShift); |
|
|
|
|
|
|
|
|
|
size_t gSize = src.total(); |
|
|
|
|
CV_Assert(kernel.run(1, &gSize, NULL, false)); |
|
|
|
|
} |
|
|
|
|
return true; |
|
|
|
|
} |
|
|
|
|
#endif |
|
|
|
|
|
|
|
|
|
#ifdef HAVE_HALIDE |
|
|
|
|
void attachHalide(const Halide::Expr& input, Halide::Func& top) |
|
|
|
|
{ |
|
|
|
|
Halide::Var x("x"), y("y"), c("c"), n("n"); |
|
|
|
|
top(x, y, c, n) = exp(normScale * input + normShift); |
|
|
|
|
} |
|
|
|
|
#endif // HAVE_HALIDE
|
|
|
|
|
|
|
|
|
|
#ifdef HAVE_DNN_IE_NN_BUILDER_2019 |
|
|
|
|
InferenceEngine::Builder::Layer initInfEngineBuilderAPI() |
|
|
|
|
{ |
|
|
|
|
CV_Error(Error::StsNotImplemented, ""); |
|
|
|
|
} |
|
|
|
|
#endif // HAVE_DNN_IE_NN_BUILDER_2019
|
|
|
|
|
|
|
|
|
|
#ifdef HAVE_DNN_NGRAPH |
|
|
|
|
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node) |
|
|
|
|
{ |
|
|
|
|
auto scale_node = std::make_shared<ngraph::op::Constant>(ngraph::element::f32, |
|
|
|
|
ngraph::Shape{1}, &normScale); |
|
|
|
|
auto shift_node = std::make_shared<ngraph::op::Constant>(ngraph::element::f32, |
|
|
|
|
ngraph::Shape{1}, &normShift); |
|
|
|
|
auto mul = std::make_shared<ngraph::op::v1::Multiply>(scale_node, node, ngraph::op::AutoBroadcastType::NUMPY); |
|
|
|
|
auto scale_shift = std::make_shared<ngraph::op::v1::Add>(mul, shift_node, ngraph::op::AutoBroadcastType::NUMPY); |
|
|
|
|
return std::make_shared<ngraph::op::v0::Exp>(scale_shift); |
|
|
|
|
} |
|
|
|
|
#endif // HAVE_DNN_NGRAPH
|
|
|
|
|
|
|
|
|
|
int64 getFLOPSPerElement() const { return 3; } |
|
|
|
|
}; |
|
|
|
|
|
|
|
|
|
struct ChannelsPReLUFunctor : public BaseFunctor |
|
|
|
|
{ |
|
|
|
|
typedef ChannelsPReLULayer Layer; |
|
|
|
@ -1419,6 +1518,20 @@ Ptr<PowerLayer> PowerLayer::create(const LayerParams& params) |
|
|
|
|
return l; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
Ptr<ExpLayer> ExpLayer::create(const LayerParams& params) |
|
|
|
|
{ |
|
|
|
|
float base = params.get<float>("base", -1.0f); |
|
|
|
|
float scale = params.get<float>("scale", 1.0f); |
|
|
|
|
float shift = params.get<float>("shift", 0.0f); |
|
|
|
|
Ptr<ExpLayer> l(new ElementWiseLayer<ExpFunctor>(ExpFunctor(base, scale, shift))); |
|
|
|
|
l->setParamsFrom(params); |
|
|
|
|
l->base = base; |
|
|
|
|
l->scale = scale; |
|
|
|
|
l->shift = shift; |
|
|
|
|
|
|
|
|
|
return l; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
Ptr<Layer> ChannelsPReLULayer::create(const LayerParams& params) |
|
|
|
|
{ |
|
|
|
|
CV_Assert(params.blobs.size() == 1); |
|
|
|
|