Made some deep learning layers params are public (#1134)

pull/1190/head
Dmitry Kurtaev 8 years ago committed by Vadim Pisarevsky
parent 78fabfe606
commit 2abf45d1ee
  1. 13
      modules/dnn/include/opencv2/dnn/all_layers.hpp
  2. 3
      modules/dnn/src/layers/batch_norm_layer.cpp
  3. 5
      modules/dnn/src/layers/blank_layer.cpp
  4. 4
      modules/dnn/src/layers/elementwise_layers.cpp
  5. 4
      modules/dnn/src/layers/max_unpooling_layer.cpp
  6. 2
      modules/dnn/src/layers/scale_layer.cpp

@ -329,6 +329,8 @@ namespace dnn
class CV_EXPORTS ReLULayer : public Layer
{
public:
float negativeSlope;
static Ptr<ReLULayer> create(const LayerParams &params);
};
@ -365,6 +367,8 @@ namespace dnn
class CV_EXPORTS PowerLayer : public Layer
{
public:
float power, scale, shift;
static Ptr<PowerLayer> create(const LayerParams &params);
};
@ -395,18 +399,27 @@ namespace dnn
class CV_EXPORTS BatchNormLayer : public Layer
{
public:
bool hasWeights, hasBias;
float epsilon;
static Ptr<BatchNormLayer> create(const LayerParams &params);
};
class CV_EXPORTS MaxUnpoolLayer : public Layer
{
public:
Size poolKernel;
Size poolPad;
Size poolStride;
static Ptr<MaxUnpoolLayer> create(const LayerParams &params);
};
class CV_EXPORTS ScaleLayer : public Layer
{
public:
bool hasBias;
static Ptr<ScaleLayer> create(const LayerParams& params);
};

@ -91,9 +91,6 @@ public:
}
return flops;
}
bool hasWeights, hasBias;
float epsilon;
};
Ptr<BatchNormLayer> BatchNormLayer::create(const LayerParams& params)

@ -47,7 +47,10 @@ namespace dnn
class BlankLayerImpl : public BlankLayer
{
public:
BlankLayerImpl(const LayerParams&) {}
BlankLayerImpl(const LayerParams& params)
{
setParamsFrom(params);
}
bool getMemoryShapes(const std::vector<MatShape> &inputs,
const int requiredOutputs,

@ -261,6 +261,7 @@ Ptr<ReLULayer> ReLULayer::create(const LayerParams& params)
float negativeSlope = params.get<float>("negative_slope", 0.f);
Ptr<ReLULayer> l(new ElementWiseLayer<ReLUFunctor>(true, ReLUFunctor(negativeSlope)));
l->setParamsFrom(params);
l->negativeSlope = negativeSlope;
return l;
}
@ -306,6 +307,9 @@ Ptr<PowerLayer> PowerLayer::create(const LayerParams& params)
(PowerLayer*)(new ElementWiseLayer<PowerFunctor1>(false, PowerFunctor1(scale, shift))) :
(PowerLayer*)(new ElementWiseLayer<PowerFunctor>(true, PowerFunctor(power, scale, shift))));
l->setParamsFrom(params);
l->power = power;
l->scale = scale;
l->shift = shift;
return l;
}

@ -81,10 +81,6 @@ public:
}
}
}
Size poolKernel;
Size poolPad;
Size poolStride;
};
Ptr<MaxUnpoolLayer> MaxUnpoolLayer::create(const LayerParams& params)

@ -67,8 +67,6 @@ public:
}
return flops;
}
bool hasBias;
};

Loading…
Cancel
Save