From 6fd67d44b0349ffe6a441b94288a91e2dcf34417 Mon Sep 17 00:00:00 2001 From: Vitaliy Lyudvichenko Date: Wed, 1 Jul 2015 12:12:49 +0300 Subject: [PATCH] Updated test. Added and successfuly passed AlexNet reproducibility test. Small fixes. --- modules/dnn/include/opencv2/dnn/dnn.hpp | 2 +- modules/dnn/src/caffe_importer.cpp | 18 +- modules/dnn/src/dnn.cpp | 20 +- modules/dnn/src/layers/layers_common.cpp | 2 +- modules/dnn/src/layers/layers_common.hpp | 2 +- modules/dnn/src/layers/pooling_layer.cpp | 6 +- modules/dnn/test/npy_blob.hpp | 33 +++ modules/dnn/test/test_alexnet.cpp | 75 +++++ modules/dnn/test/test_caffe_importer.cpp | 31 +- modules/dnn/test/test_layers.cpp | 17 +- .../dnn/testdata/dnn/bvlc_alexnet.prototxt | 276 ++++++++++++++++++ ...eploy.prototxt => bvlc_googlenet.prototxt} | 0 12 files changed, 420 insertions(+), 62 deletions(-) create mode 100644 modules/dnn/test/npy_blob.hpp create mode 100644 modules/dnn/test/test_alexnet.cpp create mode 100644 modules/dnn/testdata/dnn/bvlc_alexnet.prototxt rename modules/dnn/testdata/dnn/{googlenet_deploy.prototxt => bvlc_googlenet.prototxt} (100%) diff --git a/modules/dnn/include/opencv2/dnn/dnn.hpp b/modules/dnn/include/opencv2/dnn/dnn.hpp index b7074f8eb..daf167f57 100644 --- a/modules/dnn/include/opencv2/dnn/dnn.hpp +++ b/modules/dnn/include/opencv2/dnn/dnn.hpp @@ -147,7 +147,7 @@ namespace dnn Blob getBlob(BlobId outputName); void setParam(LayerId layer, int numParam, const Blob &blob); - void getParam(LayerId layer, int numParam); + Blob getParam(LayerId layer, int numParam = 0); private: diff --git a/modules/dnn/src/caffe_importer.cpp b/modules/dnn/src/caffe_importer.cpp index 1318ca8b0..87c94dd65 100644 --- a/modules/dnn/src/caffe_importer.cpp +++ b/modules/dnn/src/caffe_importer.cpp @@ -68,32 +68,30 @@ namespace bool isRepeated = field->is_repeated(); const std::string &name = field->name(); - std::cout << field->type_name() << " " << name << ":"; - #define GET_FIRST(Type) (isRepeated ? msgRefl->GetRepeated##Type(msg, field, 0) : msgRefl->Get##Type(msg, field)) switch (type) { case FieldDescriptor::CPPTYPE_INT32: - std::cout << params.set(name, GET_FIRST(Int32)); + params.set(name, GET_FIRST(Int32)); break; case FieldDescriptor::CPPTYPE_UINT32: - std::cout << params.set(name, GET_FIRST(UInt32)); + params.set(name, GET_FIRST(UInt32)); break; case FieldDescriptor::CPPTYPE_DOUBLE: - std::cout << params.set(name, GET_FIRST(Double)); + params.set(name, GET_FIRST(Double)); break; case FieldDescriptor::CPPTYPE_FLOAT: - std::cout << params.set(name, GET_FIRST(Float)); + params.set(name, GET_FIRST(Float)); break; case FieldDescriptor::CPPTYPE_ENUM: - std::cout << params.set(name, GET_FIRST(Enum)->name()); + params.set(name, GET_FIRST(Enum)->name()); break; case FieldDescriptor::CPPTYPE_BOOL: - std::cout << params.set(name, GET_FIRST(Bool)); + params.set(name, GET_FIRST(Bool)); break; default: - std::cout << "unknown"; + CV_Error(cv::Error::StsError, "Unknown type \"" + String(field->type_name()) + "\" in prototxt"); break; } @@ -214,8 +212,6 @@ namespace tops.assign(layer.top().begin(), layer.top().end()); bottomsVec[li].assign(layer.bottom().begin(), layer.bottom().end()); - std::cout << std::endl << "LAYER: " << name << std::endl; - extractLayerParams(layer, layerParams); extractBinaryLayerParms(layer, layerParams); diff --git a/modules/dnn/src/dnn.cpp b/modules/dnn/src/dnn.cpp index a5ea58e4b..fe214b705 100644 --- a/modules/dnn/src/dnn.cpp +++ b/modules/dnn/src/dnn.cpp @@ -312,10 +312,13 @@ struct Net::Impl { LayerData& ld = it->second; - std::cout << ld.name << std::endl; - std::cout << "Connected:" << std::endl; - for (std::set::iterator j = ld.inputLayersId.begin(); j != ld.inputLayersId.end(); j++) - std::cout << layers[*j].name << std::endl; + std::cout << "Layer \"" << ld.name << "\"" << std::endl; + if (ld.inputBlobsId.size() > 0) + { + std::cout << "Connected to:" << std::endl; + for (std::set::iterator j = ld.inputLayersId.begin(); j != ld.inputLayersId.end(); j++) + std::cout << layers[*j].name << std::endl; + } std::cout << std::endl; } } @@ -519,6 +522,15 @@ Blob Net::getBlob(BlobId outputName) return ld.outputBlobs[found.oid]; } +Blob Net::getParam(LayerId layer, int numParam) +{ + LayerData &ld = impl->getLayerData(layer); + + std::vector &layerBlobs = ld.layerInstance->learnedParams; + CV_Assert(numParam < (int)layerBlobs.size()); + return layerBlobs[numParam]; +} + Importer::~Importer() { diff --git a/modules/dnn/src/layers/layers_common.cpp b/modules/dnn/src/layers/layers_common.cpp index b4bec3f66..cea9c31cc 100644 --- a/modules/dnn/src/layers/layers_common.cpp +++ b/modules/dnn/src/layers/layers_common.cpp @@ -45,4 +45,4 @@ void getKernelParams(LayerParams ¶ms, int &kernelH, int &kernelW, int &padH, } } -} \ No newline at end of file +} diff --git a/modules/dnn/src/layers/layers_common.hpp b/modules/dnn/src/layers/layers_common.hpp index 117c3b676..d717ad684 100644 --- a/modules/dnn/src/layers/layers_common.hpp +++ b/modules/dnn/src/layers/layers_common.hpp @@ -12,4 +12,4 @@ void getKernelParams(LayerParams ¶ms, int &kernelH, int &kernelW, int &padH, } } -#endif \ No newline at end of file +#endif diff --git a/modules/dnn/src/layers/pooling_layer.cpp b/modules/dnn/src/layers/pooling_layer.cpp index 61d5366c9..cadb114c6 100644 --- a/modules/dnn/src/layers/pooling_layer.cpp +++ b/modules/dnn/src/layers/pooling_layer.cpp @@ -64,8 +64,8 @@ namespace dnn { CV_Assert(inputs.size() > 0); - inH = inputs[0]->cols(); - inW = inputs[0]->rows(); + inW = inputs[0]->cols(); + inH = inputs[0]->rows(); computeOutputShape(inH, inW); outputs.resize(inputs.size()); @@ -150,4 +150,4 @@ namespace dnn } } } -} \ No newline at end of file +} diff --git a/modules/dnn/test/npy_blob.hpp b/modules/dnn/test/npy_blob.hpp new file mode 100644 index 000000000..997b36fa7 --- /dev/null +++ b/modules/dnn/test/npy_blob.hpp @@ -0,0 +1,33 @@ +#ifndef __OPENCV_DNN_TEST_NPY_BLOB_HPP__ +#define __OPENCV_DNN_TEST_NPY_BLOB_HPP__ +#include "test_precomp.hpp" + +#ifdef __GNUC__ +# pragma GCC diagnostic ignored "-Wunused-parameter" +# pragma GCC diagnostic push +#endif + +#include "cnpy.h" + +#ifdef __GNUC__ +# pragma GCC diagnostic pop +#endif + +inline cv::dnn::Blob blobFromNPY(const cv::String &path) +{ + cnpy::NpyArray npyBlob = cnpy::npy_load(path.c_str()); + + cv::dnn::Blob blob; + blob.fill((int)npyBlob.shape.size(), (int*)&npyBlob.shape[0], CV_32F, npyBlob.data); + + npyBlob.destruct(); + return blob; +} + +inline void saveBlobToNPY(cv::dnn::Blob &blob, const cv::String &path) +{ + cv::Vec4i shape = blob.shape(); + cnpy::npy_save(path.c_str(), blob.ptr(), (unsigned*)&shape[0], 4); +} + +#endif diff --git a/modules/dnn/test/test_alexnet.cpp b/modules/dnn/test/test_alexnet.cpp new file mode 100644 index 000000000..6c9cab51d --- /dev/null +++ b/modules/dnn/test/test_alexnet.cpp @@ -0,0 +1,75 @@ +#include "test_precomp.hpp" +#include "npy_blob.hpp" + +namespace cvtest +{ + +using namespace std; +using namespace testing; +using namespace cv; +using namespace cv::dnn; + +static std::string getOpenCVExtraDir() +{ + return cvtest::TS::ptr()->get_data_path(); +} + +template +static std::string getTestFile(TStr filename) +{ + return (getOpenCVExtraDir() + "/dnn/") + filename; +} + +inline void normAssert(InputArray ref, InputArray get, const char *comment = "") +{ + double normL1 = cvtest::norm(ref, get, NORM_L1)/ ref.getMat().total(); + EXPECT_LE(normL1, 0.0001) << comment; + + double normInf = cvtest::norm(ref, get, NORM_INF); + EXPECT_LE(normInf, 0.001) << comment; +} + +inline void normAssert(Blob ref, Blob test, const char *comment = "") +{ + normAssert(ref.getMatRef(), test.getMatRef(), comment); +} + +TEST(Reproducibility_AlexNet, Accuracy) +{ + Net net; + { + Ptr importer = createCaffeImporter(getTestFile("bvlc_alexnet.prototxt"), getTestFile("bvlc_alexnet.caffemodel")); + ASSERT_TRUE(importer != NULL); + importer->populateNet(net); + } + + std::vector inpMats(2); + inpMats[0] = imread(getTestFile("alexnet_0.png")); + inpMats[1] = imread(getTestFile("alexnet_1.png")); + ASSERT_TRUE(!inpMats[0].empty() && !inpMats[1].empty()); + + inpMats[0].convertTo(inpMats[0], CV_32F); + Blob inp(inpMats[0]); + + net.setBlob("data", inp); + + net.forward("conv1"); + normAssert(blobFromNPY(getTestFile("alexnet_conv1.npy")), net.getBlob("conv1"), "conv1"); + //saveBlobToNPY(convBlob, getTestFile("alexnet_conv1_my.npy")); + + net.forward("relu1"); + normAssert(blobFromNPY(getTestFile("alexnet_relu1.npy")), net.getBlob("relu1"), "relu1"); + + net.forward("norm1"); + normAssert(blobFromNPY(getTestFile("alexnet_norm1.npy")), net.getBlob("norm1"), "norm1"); + + net.forward(); + Blob out = net.getBlob("prob"); + Blob ref = blobFromNPY(getTestFile("alexnet.npy")); + std::cout << out.shape() << " vs " << ref.shape() << std::endl; + Mat mOut(1, 1000, CV_32F, ref.rawPtr()); + Mat mRef(1, 1000, CV_32F, ref.rawPtr()); + normAssert(mOut, mRef); +} + +} diff --git a/modules/dnn/test/test_caffe_importer.cpp b/modules/dnn/test/test_caffe_importer.cpp index 77536eb40..94ec2cff2 100644 --- a/modules/dnn/test/test_caffe_importer.cpp +++ b/modules/dnn/test/test_caffe_importer.cpp @@ -4,7 +4,6 @@ namespace cvtest { using namespace std; -using namespace std::tr1; using namespace testing; using namespace cv; using namespace cv::dnn; @@ -25,39 +24,17 @@ TEST(ReadCaffe_GTSRB, Accuracy) Net net; { Ptr importer = createCaffeImporter(getTestFile("gtsrb.prototxt"), ""); + ASSERT_TRUE(importer != NULL); importer->populateNet(net); } - -// Mat img = imread(getTestFile("sign_50.ppm")); -// CV_Assert(!img.empty()); -// img.convertTo(img, CV_32F, 1.0 / 255); -// resize(img, img, cv::Size(48, 48)); -// Blob imgBlob(img); - -// net.setBlob("input", imgBlob); -// net.forward(); - -// Blob res = net.getBlob("loss"); -// for (int n = 0; n < 1; n++) -// { -// Mat slice = Mat(res.channels() * res.rows(), res.cols(), CV_32F, res.ptr(n)); - -// double maxv; -// std::vector maxIdx; -// minMaxLoc(slice, NULL, &maxv, NULL, &maxIdx); - -// int bestClass = maxIdx[0]; -// std::cout << "Best class: #" << bestClass << std::endl; - -// //imwrite(getTestFile("vis.png"), slice*(255.0 / maxv)); -// } } -TEST(ReadCaffe_GoogleNet, Accuracy) +TEST(ReadCaffe_GoogLeNet, Accuracy) { Net net; { - Ptr importer = createCaffeImporter(getTestFile("googlenet_deploy.prototxt"), ""); + Ptr importer = createCaffeImporter(getTestFile("bvlc_googlenet.prototxt"), ""); + ASSERT_TRUE(importer != NULL); importer->populateNet(net); } } diff --git a/modules/dnn/test/test_layers.cpp b/modules/dnn/test/test_layers.cpp index a0af9407f..67ae1a0c9 100644 --- a/modules/dnn/test/test_layers.cpp +++ b/modules/dnn/test/test_layers.cpp @@ -1,6 +1,6 @@ #include "test_precomp.hpp" #include -#include "cnpy.h" +#include "npy_blob.hpp" namespace cvtest { @@ -32,21 +32,10 @@ bool isEqual(const cv::Vec &l, const cv::Vec &r) return true; } -Blob loadNpyBlob(String name) -{ - cnpy::NpyArray npyBlob = cnpy::npy_load(getTestFile(name)); - - Blob blob; - blob.fill((int)npyBlob.shape.size(), (int*)&npyBlob.shape[0], CV_32F, npyBlob.data); - - npyBlob.destruct(); - return blob; -} - static void testLayer(String proto, String caffemodel = String()) { - Blob inp = loadNpyBlob("blob.npy"); - Blob ref = loadNpyBlob(proto + ".caffe.npy"); + Blob inp = blobFromNPY(getTestFile("blob.npy")); + Blob ref = blobFromNPY(getTestFile(proto + ".caffe.npy")); Net net; { diff --git a/modules/dnn/testdata/dnn/bvlc_alexnet.prototxt b/modules/dnn/testdata/dnn/bvlc_alexnet.prototxt new file mode 100644 index 000000000..8a565803c --- /dev/null +++ b/modules/dnn/testdata/dnn/bvlc_alexnet.prototxt @@ -0,0 +1,276 @@ +name: "AlexNet" +input: "data" +input_dim: 10 +input_dim: 3 +input_dim: 227 +input_dim: 227 +layer { + name: "conv1" + type: "Convolution" + bottom: "data" + top: "conv1" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 96 + kernel_size: 11 + stride: 4 + } +} +layer { + name: "relu1" + type: "ReLU" + bottom: "conv1" + top: "relu1" +} +layer { + name: "norm1" + type: "LRN" + bottom: "relu1" + top: "norm1" + lrn_param { + local_size: 5 + alpha: 0.0001 + beta: 0.75 + } +} +layer { + name: "pool1" + type: "Pooling" + bottom: "norm1" + top: "pool1" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv2" + type: "Convolution" + bottom: "pool1" + top: "conv2" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 2 + kernel_size: 5 + group: 2 + } +} +layer { + name: "relu2" + type: "ReLU" + bottom: "conv2" + top: "conv2" +} +layer { + name: "norm2" + type: "LRN" + bottom: "conv2" + top: "norm2" + lrn_param { + local_size: 5 + alpha: 0.0001 + beta: 0.75 + } +} +layer { + name: "pool2" + type: "Pooling" + bottom: "norm2" + top: "pool2" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "conv3" + type: "Convolution" + bottom: "pool2" + top: "conv3" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 384 + pad: 1 + kernel_size: 3 + } +} +layer { + name: "relu3" + type: "ReLU" + bottom: "conv3" + top: "conv3" +} +layer { + name: "conv4" + type: "Convolution" + bottom: "conv3" + top: "conv4" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 384 + pad: 1 + kernel_size: 3 + group: 2 + } +} +layer { + name: "relu4" + type: "ReLU" + bottom: "conv4" + top: "conv4" +} +layer { + name: "conv5" + type: "Convolution" + bottom: "conv4" + top: "conv5" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + convolution_param { + num_output: 256 + pad: 1 + kernel_size: 3 + group: 2 + } +} +layer { + name: "relu5" + type: "ReLU" + bottom: "conv5" + top: "conv5" +} +layer { + name: "pool5" + type: "Pooling" + bottom: "conv5" + top: "pool5" + pooling_param { + pool: MAX + kernel_size: 3 + stride: 2 + } +} +layer { + name: "fc6" + type: "InnerProduct" + bottom: "pool5" + top: "fc6_0" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu6" + type: "ReLU" + bottom: "fc6_0" + top: "fc6_1" +} +layer { + name: "drop6" + type: "Dropout" + bottom: "fc6_1" + top: "fc6_2" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc7" + type: "InnerProduct" + bottom: "fc6_2" + top: "fc7_0" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 4096 + } +} +layer { + name: "relu7" + type: "ReLU" + bottom: "fc7_0" + top: "fc7_1" +} +layer { + name: "drop7" + type: "Dropout" + bottom: "fc7_1" + top: "fc7_2" + dropout_param { + dropout_ratio: 0.5 + } +} +layer { + name: "fc8" + type: "InnerProduct" + bottom: "fc7_2" + top: "fc8" + param { + lr_mult: 1 + decay_mult: 1 + } + param { + lr_mult: 2 + decay_mult: 0 + } + inner_product_param { + num_output: 1000 + } +} +layer { + name: "prob" + type: "Softmax" + bottom: "fc8" + top: "prob" +} diff --git a/modules/dnn/testdata/dnn/googlenet_deploy.prototxt b/modules/dnn/testdata/dnn/bvlc_googlenet.prototxt similarity index 100% rename from modules/dnn/testdata/dnn/googlenet_deploy.prototxt rename to modules/dnn/testdata/dnn/bvlc_googlenet.prototxt