Merge pull request #9811 from dkurt:prelu_with_shared_channels

pull/9834/head
Vadim Pisarevsky 7 years ago
commit 0be1f4a573
  1. 2
      modules/dnn/include/opencv2/dnn/all_layers.hpp
  2. 6
      modules/dnn/src/caffe/caffe_importer.cpp
  3. 1
      modules/dnn/src/init.cpp
  4. 9
      modules/dnn/src/layers/elementwise_layers.cpp
  5. 5
      modules/dnn/test/test_layers.cpp

@ -422,7 +422,7 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN
class CV_EXPORTS ChannelsPReLULayer : public ActivationLayer class CV_EXPORTS ChannelsPReLULayer : public ActivationLayer
{ {
public: public:
static Ptr<ChannelsPReLULayer> create(const LayerParams& params); static Ptr<Layer> create(const LayerParams& params);
}; };
class CV_EXPORTS ELULayer : public ActivationLayer class CV_EXPORTS ELULayer : public ActivationLayer

@ -216,7 +216,7 @@ public:
shape.push_back((int)_shape.dim(i)); shape.push_back((int)_shape.dim(i));
} }
else else
CV_Error(Error::StsError, "Unknown shape of input blob"); shape.resize(1, 1); // Is a scalar.
} }
void blobFromProto(const caffe::BlobProto &pbBlob, cv::Mat &dstBlob) void blobFromProto(const caffe::BlobProto &pbBlob, cv::Mat &dstBlob)
@ -274,9 +274,9 @@ public:
struct BlobNote struct BlobNote
{ {
BlobNote(const std::string &_name, int _layerId, int _outNum) : BlobNote(const std::string &_name, int _layerId, int _outNum) :
name(_name.c_str()), layerId(_layerId), outNum(_outNum) {} name(_name), layerId(_layerId), outNum(_outNum) {}
const char *name; std::string name;
int layerId, outNum; int layerId, outNum;
}; };

@ -97,6 +97,7 @@ void initializeLayerFactory()
CV_DNN_REGISTER_LAYER_CLASS(ReLU, ReLULayer); CV_DNN_REGISTER_LAYER_CLASS(ReLU, ReLULayer);
CV_DNN_REGISTER_LAYER_CLASS(ReLU6, ReLU6Layer); CV_DNN_REGISTER_LAYER_CLASS(ReLU6, ReLU6Layer);
CV_DNN_REGISTER_LAYER_CLASS(ChannelsPReLU, ChannelsPReLULayer); CV_DNN_REGISTER_LAYER_CLASS(ChannelsPReLU, ChannelsPReLULayer);
CV_DNN_REGISTER_LAYER_CLASS(PReLU, ChannelsPReLULayer);
CV_DNN_REGISTER_LAYER_CLASS(Sigmoid, SigmoidLayer); CV_DNN_REGISTER_LAYER_CLASS(Sigmoid, SigmoidLayer);
CV_DNN_REGISTER_LAYER_CLASS(TanH, TanHLayer); CV_DNN_REGISTER_LAYER_CLASS(TanH, TanHLayer);
CV_DNN_REGISTER_LAYER_CLASS(ELU, ELULayer); CV_DNN_REGISTER_LAYER_CLASS(ELU, ELULayer);

@ -754,8 +754,15 @@ Ptr<PowerLayer> PowerLayer::create(const LayerParams& params)
return l; return l;
} }
Ptr<ChannelsPReLULayer> ChannelsPReLULayer::create(const LayerParams& params) Ptr<Layer> ChannelsPReLULayer::create(const LayerParams& params)
{ {
CV_Assert(params.blobs.size() == 1);
if (params.blobs[0].total() == 1)
{
LayerParams reluParams = params;
reluParams.set("negative_slope", params.blobs[0].at<float>(0));
return ReLULayer::create(reluParams);
}
Ptr<ChannelsPReLULayer> l(new ElementWiseLayer<ChannelsPReLUFunctor>(ChannelsPReLUFunctor(params.blobs[0]))); Ptr<ChannelsPReLULayer> l(new ElementWiseLayer<ChannelsPReLUFunctor>(ChannelsPReLUFunctor(params.blobs[0])));
l->setParamsFrom(params); l->setParamsFrom(params);

@ -279,6 +279,11 @@ TEST(Layer_Test_Eltwise, Accuracy)
testLayerUsingCaffeModels("layer_eltwise"); testLayerUsingCaffeModels("layer_eltwise");
} }
TEST(Layer_Test_PReLU, Accuracy)
{
testLayerUsingCaffeModels("layer_prelu", DNN_TARGET_CPU, true);
}
//template<typename XMat> //template<typename XMat>
//static void test_Layer_Concat() //static void test_Layer_Concat()
//{ //{

Loading…
Cancel
Save