diff --git a/modules/dnn/src/caffe/layer_loaders.cpp b/modules/dnn/src/caffe/layer_loaders.cpp index a6edef749..7878b96eb 100644 --- a/modules/dnn/src/caffe/layer_loaders.cpp +++ b/modules/dnn/src/caffe/layer_loaders.cpp @@ -72,17 +72,22 @@ Ptr createLayerFromCaffe(LayerParams ¶ms) pad.height, pad.width, stride.height, stride.width, padMode); //getCaffeConvParams(params, kernel, pad, stride); + Ptr l; if (!globalPooling) - return Ptr(PoolingLayer::create(type, kernel, stride, pad, padMode)); + l = PoolingLayer::create(type, kernel, stride, pad, padMode); else - return Ptr(PoolingLayer::createGlobal(type)); + l = PoolingLayer::createGlobal(type); + l->setParamsFrom(params); + return l; } template<> Ptr createLayerFromCaffe(LayerParams ¶ms) { int axis = params.get("axis", 1); - return Ptr(SoftmaxLayer::create(axis)); + Ptr l(SoftmaxLayer::create(axis)); + l->setParamsFrom(params); + return l; } template<> //InnerProduct specialization @@ -129,17 +134,21 @@ Ptr createLayerFromCaffe(LayerParams& params) double bias = params.get("bias", 1); bool normBySize = params.get("norm_by_size", true); - return Ptr(LRNLayer::create(type, size, alpha, beta, bias, normBySize)); + Ptr l(LRNLayer::create(type, size, alpha, beta, bias, normBySize)); + l->setParamsFrom(params); + return l; } template<> Ptr createLayerFromCaffe(LayerParams ¶ms) { - return Ptr(MVNLayer::create( + Ptr l(MVNLayer::create( params.get("normalize_variance", true), params.get("across_channels", false), params.get("eps", 1e-9) )); + l->setParamsFrom(params); + return l; } /* Reshape layers */ @@ -164,13 +173,17 @@ Ptr createLayerFromCaffe(LayerParams ¶ms) else newShape = Shape::all(0); - return Ptr(ReshapeLayer::create(newShape, applyingRange, enableReordering)); + Ptr l(ReshapeLayer::create(newShape, applyingRange, enableReordering)); + l->setParamsFrom(params); + return l; } template<> Ptr createLayerFromCaffe(LayerParams& params) { - return Ptr(ConcatLayer::create(params.get("axis", 1))); + Ptr l(ConcatLayer::create(params.get("axis", 1))); + l->setParamsFrom(params); + return l; } template<> @@ -189,7 +202,9 @@ Ptr createLayerFromCaffe(LayerParams ¶ms) outputsCount = -1; } - return Ptr(SplitLayer::create(outputsCount)); + Ptr l(SplitLayer::create(outputsCount)); + l->setParamsFrom(params); + return l; } template<> @@ -197,9 +212,10 @@ Ptr createLayerFromCaffe(LayerParams& params) { int axis = params.get("axis", 1); + Ptr l; if (!params.has("slice_point")) { - return Ptr(SliceLayer::create(axis)); + l = SliceLayer::create(axis); } else { @@ -208,8 +224,10 @@ Ptr createLayerFromCaffe(LayerParams& params) for (int i = 0; i < indicesValue.size(); i++) sliceIndices[i] = indicesValue.get(i); - return Ptr(SliceLayer::create(axis, sliceIndices)); + l = SliceLayer::create(axis, sliceIndices); } + l->setParamsFrom(params); + return l; } /* Activation layers */ @@ -224,7 +242,9 @@ template<> //ReLU specialization Ptr createLayerFromCaffe(LayerParams& params) { float negative_slope = params.get("negative_slope", 0.f); - return Ptr(ReLULayer::create(negative_slope)); + Ptr l(ReLULayer::create(negative_slope)); + l->setParamsFrom(params); + return l; } template<> //Power specialization @@ -233,7 +253,9 @@ Ptr createLayerFromCaffe(LayerParams& params) float power = params.get("power", 1.0f); float scale = params.get("scale", 1.0f); float shift = params.get("shift", 0.0f); - return Ptr(PowerLayer::create(power, scale, shift)); + Ptr l(PowerLayer::create(power, scale, shift)); + l->setParamsFrom(params); + return l; } template<> //CropLayer specialization @@ -249,7 +271,9 @@ Ptr createLayerFromCaffe(LayerParams& params) offset.push_back(paramOffset->get(i)); } - return Ptr(CropLayer::create(start_axis, offset)); + Ptr l(CropLayer::create(start_axis, offset)); + l->setParamsFrom(params); + return l; } template<> //Eltwise specialization @@ -279,7 +303,9 @@ Ptr createLayerFromCaffe(LayerParams& params) coeffs[i] = paramCoeff.get(i); } } - return Ptr(EltwiseLayer::create(op, coeffs)); + Ptr l(EltwiseLayer::create(op, coeffs)); + l->setParamsFrom(params); + return l; } template<> //BatchNormLayer specialization @@ -313,6 +339,7 @@ Ptr createLayerFromCaffe(LayerParams& params) Size outSize(params.get("out_w"), params.get("out_h")); Ptr l = MaxUnpoolLayer::create(outSize); + l->setParamsFrom(params); return Ptr(l); }