Updated test. Added and successfuly passed AlexNet reproducibility test. Small fixes.

pull/265/head
Vitaliy Lyudvichenko 10 years ago
parent 761b037126
commit 6fd67d44b0
  1. 2
      modules/dnn/include/opencv2/dnn/dnn.hpp
  2. 18
      modules/dnn/src/caffe_importer.cpp
  3. 20
      modules/dnn/src/dnn.cpp
  4. 2
      modules/dnn/src/layers/layers_common.cpp
  5. 2
      modules/dnn/src/layers/layers_common.hpp
  6. 6
      modules/dnn/src/layers/pooling_layer.cpp
  7. 33
      modules/dnn/test/npy_blob.hpp
  8. 75
      modules/dnn/test/test_alexnet.cpp
  9. 31
      modules/dnn/test/test_caffe_importer.cpp
  10. 17
      modules/dnn/test/test_layers.cpp
  11. 276
      modules/dnn/testdata/dnn/bvlc_alexnet.prototxt
  12. 0
      modules/dnn/testdata/dnn/bvlc_googlenet.prototxt

@ -147,7 +147,7 @@ namespace dnn
Blob getBlob(BlobId outputName);
void setParam(LayerId layer, int numParam, const Blob &blob);
void getParam(LayerId layer, int numParam);
Blob getParam(LayerId layer, int numParam = 0);
private:

@ -68,32 +68,30 @@ namespace
bool isRepeated = field->is_repeated();
const std::string &name = field->name();
std::cout << field->type_name() << " " << name << ":";
#define GET_FIRST(Type) (isRepeated ? msgRefl->GetRepeated##Type(msg, field, 0) : msgRefl->Get##Type(msg, field))
switch (type)
{
case FieldDescriptor::CPPTYPE_INT32:
std::cout << params.set(name, GET_FIRST(Int32));
params.set(name, GET_FIRST(Int32));
break;
case FieldDescriptor::CPPTYPE_UINT32:
std::cout << params.set(name, GET_FIRST(UInt32));
params.set(name, GET_FIRST(UInt32));
break;
case FieldDescriptor::CPPTYPE_DOUBLE:
std::cout << params.set(name, GET_FIRST(Double));
params.set(name, GET_FIRST(Double));
break;
case FieldDescriptor::CPPTYPE_FLOAT:
std::cout << params.set(name, GET_FIRST(Float));
params.set(name, GET_FIRST(Float));
break;
case FieldDescriptor::CPPTYPE_ENUM:
std::cout << params.set(name, GET_FIRST(Enum)->name());
params.set(name, GET_FIRST(Enum)->name());
break;
case FieldDescriptor::CPPTYPE_BOOL:
std::cout << params.set(name, GET_FIRST(Bool));
params.set(name, GET_FIRST(Bool));
break;
default:
std::cout << "unknown";
CV_Error(cv::Error::StsError, "Unknown type \"" + String(field->type_name()) + "\" in prototxt");
break;
}
@ -214,8 +212,6 @@ namespace
tops.assign(layer.top().begin(), layer.top().end());
bottomsVec[li].assign(layer.bottom().begin(), layer.bottom().end());
std::cout << std::endl << "LAYER: " << name << std::endl;
extractLayerParams(layer, layerParams);
extractBinaryLayerParms(layer, layerParams);

@ -312,10 +312,13 @@ struct Net::Impl
{
LayerData& ld = it->second;
std::cout << ld.name << std::endl;
std::cout << "Connected:" << std::endl;
for (std::set<int>::iterator j = ld.inputLayersId.begin(); j != ld.inputLayersId.end(); j++)
std::cout << layers[*j].name << std::endl;
std::cout << "Layer \"" << ld.name << "\"" << std::endl;
if (ld.inputBlobsId.size() > 0)
{
std::cout << "Connected to:" << std::endl;
for (std::set<int>::iterator j = ld.inputLayersId.begin(); j != ld.inputLayersId.end(); j++)
std::cout << layers[*j].name << std::endl;
}
std::cout << std::endl;
}
}
@ -519,6 +522,15 @@ Blob Net::getBlob(BlobId outputName)
return ld.outputBlobs[found.oid];
}
Blob Net::getParam(LayerId layer, int numParam)
{
LayerData &ld = impl->getLayerData(layer);
std::vector<Blob> &layerBlobs = ld.layerInstance->learnedParams;
CV_Assert(numParam < (int)layerBlobs.size());
return layerBlobs[numParam];
}
Importer::~Importer()
{

@ -45,4 +45,4 @@ void getKernelParams(LayerParams &params, int &kernelH, int &kernelW, int &padH,
}
}
}
}

@ -12,4 +12,4 @@ void getKernelParams(LayerParams &params, int &kernelH, int &kernelW, int &padH,
}
}
#endif
#endif

@ -64,8 +64,8 @@ namespace dnn
{
CV_Assert(inputs.size() > 0);
inH = inputs[0]->cols();
inW = inputs[0]->rows();
inW = inputs[0]->cols();
inH = inputs[0]->rows();
computeOutputShape(inH, inW);
outputs.resize(inputs.size());
@ -150,4 +150,4 @@ namespace dnn
}
}
}
}
}

@ -0,0 +1,33 @@
#ifndef __OPENCV_DNN_TEST_NPY_BLOB_HPP__
#define __OPENCV_DNN_TEST_NPY_BLOB_HPP__
#include "test_precomp.hpp"
#ifdef __GNUC__
# pragma GCC diagnostic ignored "-Wunused-parameter"
# pragma GCC diagnostic push
#endif
#include "cnpy.h"
#ifdef __GNUC__
# pragma GCC diagnostic pop
#endif
inline cv::dnn::Blob blobFromNPY(const cv::String &path)
{
cnpy::NpyArray npyBlob = cnpy::npy_load(path.c_str());
cv::dnn::Blob blob;
blob.fill((int)npyBlob.shape.size(), (int*)&npyBlob.shape[0], CV_32F, npyBlob.data);
npyBlob.destruct();
return blob;
}
inline void saveBlobToNPY(cv::dnn::Blob &blob, const cv::String &path)
{
cv::Vec4i shape = blob.shape();
cnpy::npy_save(path.c_str(), blob.ptr<float>(), (unsigned*)&shape[0], 4);
}
#endif

@ -0,0 +1,75 @@
#include "test_precomp.hpp"
#include "npy_blob.hpp"
namespace cvtest
{
using namespace std;
using namespace testing;
using namespace cv;
using namespace cv::dnn;
static std::string getOpenCVExtraDir()
{
return cvtest::TS::ptr()->get_data_path();
}
template<typename TStr>
static std::string getTestFile(TStr filename)
{
return (getOpenCVExtraDir() + "/dnn/") + filename;
}
inline void normAssert(InputArray ref, InputArray get, const char *comment = "")
{
double normL1 = cvtest::norm(ref, get, NORM_L1)/ ref.getMat().total();
EXPECT_LE(normL1, 0.0001) << comment;
double normInf = cvtest::norm(ref, get, NORM_INF);
EXPECT_LE(normInf, 0.001) << comment;
}
inline void normAssert(Blob ref, Blob test, const char *comment = "")
{
normAssert(ref.getMatRef(), test.getMatRef(), comment);
}
TEST(Reproducibility_AlexNet, Accuracy)
{
Net net;
{
Ptr<Importer> importer = createCaffeImporter(getTestFile("bvlc_alexnet.prototxt"), getTestFile("bvlc_alexnet.caffemodel"));
ASSERT_TRUE(importer != NULL);
importer->populateNet(net);
}
std::vector<Mat> inpMats(2);
inpMats[0] = imread(getTestFile("alexnet_0.png"));
inpMats[1] = imread(getTestFile("alexnet_1.png"));
ASSERT_TRUE(!inpMats[0].empty() && !inpMats[1].empty());
inpMats[0].convertTo(inpMats[0], CV_32F);
Blob inp(inpMats[0]);
net.setBlob("data", inp);
net.forward("conv1");
normAssert(blobFromNPY(getTestFile("alexnet_conv1.npy")), net.getBlob("conv1"), "conv1");
//saveBlobToNPY(convBlob, getTestFile("alexnet_conv1_my.npy"));
net.forward("relu1");
normAssert(blobFromNPY(getTestFile("alexnet_relu1.npy")), net.getBlob("relu1"), "relu1");
net.forward("norm1");
normAssert(blobFromNPY(getTestFile("alexnet_norm1.npy")), net.getBlob("norm1"), "norm1");
net.forward();
Blob out = net.getBlob("prob");
Blob ref = blobFromNPY(getTestFile("alexnet.npy"));
std::cout << out.shape() << " vs " << ref.shape() << std::endl;
Mat mOut(1, 1000, CV_32F, ref.rawPtr());
Mat mRef(1, 1000, CV_32F, ref.rawPtr());
normAssert(mOut, mRef);
}
}

@ -4,7 +4,6 @@ namespace cvtest
{
using namespace std;
using namespace std::tr1;
using namespace testing;
using namespace cv;
using namespace cv::dnn;
@ -25,39 +24,17 @@ TEST(ReadCaffe_GTSRB, Accuracy)
Net net;
{
Ptr<Importer> importer = createCaffeImporter(getTestFile("gtsrb.prototxt"), "");
ASSERT_TRUE(importer != NULL);
importer->populateNet(net);
}
// Mat img = imread(getTestFile("sign_50.ppm"));
// CV_Assert(!img.empty());
// img.convertTo(img, CV_32F, 1.0 / 255);
// resize(img, img, cv::Size(48, 48));
// Blob imgBlob(img);
// net.setBlob("input", imgBlob);
// net.forward();
// Blob res = net.getBlob("loss");
// for (int n = 0; n < 1; n++)
// {
// Mat slice = Mat(res.channels() * res.rows(), res.cols(), CV_32F, res.ptr<float>(n));
// double maxv;
// std::vector<int> maxIdx;
// minMaxLoc(slice, NULL, &maxv, NULL, &maxIdx);
// int bestClass = maxIdx[0];
// std::cout << "Best class: #" << bestClass << std::endl;
// //imwrite(getTestFile("vis.png"), slice*(255.0 / maxv));
// }
}
TEST(ReadCaffe_GoogleNet, Accuracy)
TEST(ReadCaffe_GoogLeNet, Accuracy)
{
Net net;
{
Ptr<Importer> importer = createCaffeImporter(getTestFile("googlenet_deploy.prototxt"), "");
Ptr<Importer> importer = createCaffeImporter(getTestFile("bvlc_googlenet.prototxt"), "");
ASSERT_TRUE(importer != NULL);
importer->populateNet(net);
}
}

@ -1,6 +1,6 @@
#include "test_precomp.hpp"
#include <iostream>
#include "cnpy.h"
#include "npy_blob.hpp"
namespace cvtest
{
@ -32,21 +32,10 @@ bool isEqual(const cv::Vec<T, n> &l, const cv::Vec<T, n> &r)
return true;
}
Blob loadNpyBlob(String name)
{
cnpy::NpyArray npyBlob = cnpy::npy_load(getTestFile(name));
Blob blob;
blob.fill((int)npyBlob.shape.size(), (int*)&npyBlob.shape[0], CV_32F, npyBlob.data);
npyBlob.destruct();
return blob;
}
static void testLayer(String proto, String caffemodel = String())
{
Blob inp = loadNpyBlob("blob.npy");
Blob ref = loadNpyBlob(proto + ".caffe.npy");
Blob inp = blobFromNPY(getTestFile("blob.npy"));
Blob ref = blobFromNPY(getTestFile(proto + ".caffe.npy"));
Net net;
{

@ -0,0 +1,276 @@
name: "AlexNet"
input: "data"
input_dim: 10
input_dim: 3
input_dim: 227
input_dim: 227
layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 96
kernel_size: 11
stride: 4
}
}
layer {
name: "relu1"
type: "ReLU"
bottom: "conv1"
top: "relu1"
}
layer {
name: "norm1"
type: "LRN"
bottom: "relu1"
top: "norm1"
lrn_param {
local_size: 5
alpha: 0.0001
beta: 0.75
}
}
layer {
name: "pool1"
type: "Pooling"
bottom: "norm1"
top: "pool1"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "conv2"
type: "Convolution"
bottom: "pool1"
top: "conv2"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 256
pad: 2
kernel_size: 5
group: 2
}
}
layer {
name: "relu2"
type: "ReLU"
bottom: "conv2"
top: "conv2"
}
layer {
name: "norm2"
type: "LRN"
bottom: "conv2"
top: "norm2"
lrn_param {
local_size: 5
alpha: 0.0001
beta: 0.75
}
}
layer {
name: "pool2"
type: "Pooling"
bottom: "norm2"
top: "pool2"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "conv3"
type: "Convolution"
bottom: "pool2"
top: "conv3"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 384
pad: 1
kernel_size: 3
}
}
layer {
name: "relu3"
type: "ReLU"
bottom: "conv3"
top: "conv3"
}
layer {
name: "conv4"
type: "Convolution"
bottom: "conv3"
top: "conv4"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 384
pad: 1
kernel_size: 3
group: 2
}
}
layer {
name: "relu4"
type: "ReLU"
bottom: "conv4"
top: "conv4"
}
layer {
name: "conv5"
type: "Convolution"
bottom: "conv4"
top: "conv5"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
group: 2
}
}
layer {
name: "relu5"
type: "ReLU"
bottom: "conv5"
top: "conv5"
}
layer {
name: "pool5"
type: "Pooling"
bottom: "conv5"
top: "pool5"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "fc6"
type: "InnerProduct"
bottom: "pool5"
top: "fc6_0"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
inner_product_param {
num_output: 4096
}
}
layer {
name: "relu6"
type: "ReLU"
bottom: "fc6_0"
top: "fc6_1"
}
layer {
name: "drop6"
type: "Dropout"
bottom: "fc6_1"
top: "fc6_2"
dropout_param {
dropout_ratio: 0.5
}
}
layer {
name: "fc7"
type: "InnerProduct"
bottom: "fc6_2"
top: "fc7_0"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
inner_product_param {
num_output: 4096
}
}
layer {
name: "relu7"
type: "ReLU"
bottom: "fc7_0"
top: "fc7_1"
}
layer {
name: "drop7"
type: "Dropout"
bottom: "fc7_1"
top: "fc7_2"
dropout_param {
dropout_ratio: 0.5
}
}
layer {
name: "fc8"
type: "InnerProduct"
bottom: "fc7_2"
top: "fc8"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
inner_product_param {
num_output: 1000
}
}
layer {
name: "prob"
type: "Softmax"
bottom: "fc8"
top: "prob"
}
Loading…
Cancel
Save