Implement ambiguous blobs naming scheme like: "layerName[.OutputName]".

Old Caffe-like blob naming scheme was deleted.
pull/265/head
Vitaliy Lyudvichenko 10 years ago
parent 85ad43d325
commit 75e09fdc63
  1. 20
      modules/dnn/include/opencv2/dnn/dnn.hpp
  2. 89
      modules/dnn/src/caffe_importer.cpp
  3. 413
      modules/dnn/src/dnn.cpp
  4. 2
      modules/dnn/test/test_alexnet.cpp
  5. 2
      modules/dnn/test/test_googlenet.cpp
  6. 7
      modules/dnn/test/test_layers.cpp
  7. 32
      modules/dnn/testdata/dnn/bvlc_alexnet.prototxt
  8. 2
      modules/dnn/testdata/dnn/layers/lrn_channels.prototxt
  9. 3
      modules/dnn/testdata/dnn/layers/lrn_spatial.prototxt
  10. 2
      modules/dnn/testdata/dnn/layers/softmax.prototxt

@ -24,7 +24,7 @@ namespace dnn
class CV_EXPORTS Layer
{
public:
//TODO: this field must be declared as public if we want support possibility to change these params in runtime
//learned params of layer must be stored here to allow externally read them
std::vector<Blob> learnedParams;
virtual ~Layer();
@ -34,11 +34,9 @@ namespace dnn
virtual void forward(std::vector<Blob*> &inputs, std::vector<Blob> &outputs) = 0;
virtual int getNumInputs();
virtual int getNumOutputs();
//each input/output can be labeled to easily identify their using "layer_name.output_name"
virtual String getInputName(int inputNum);
virtual String getOutputName(int outputNum);
virtual int inputNameToIndex(String inputName);
virtual int outputNameToIndex(String outputName);
};
//containers for String and int
@ -56,14 +54,10 @@ namespace dnn
int getLayerId(LayerId layer);
void deleteLayer(LayerId layer);
//each output of each layer can be labeled by unique string label (as in Caffe)
//if label not specified then %layer_name%.%layer_output_id% can be used
void setOutputNames(LayerId layer, const std::vector<String> &outputNames);
void setLayerInputs(const std::vector<String> &outputs, LayerId layer);
void setNetInputs(const std::vector<String> &inputBlobNames);
void connect(BlobId input, BlobId output);
void connect(const std::vector<BlobId> &outputs, const std::vector<BlobId> &inputs);
void connect(String outPin, String inpPin);
void connect(int outLayerId, int outNum, int inLayerId, int inNum);
void forward();
void forward(LayerId toLayer);
@ -74,8 +68,8 @@ namespace dnn
void forwardOpt(LayerId toLayer);
void forwardOpt(const std::vector<LayerId> &toLayers);
void setBlob(BlobId outputName, const Blob &blob);
Blob getBlob(BlobId outputName);
void setBlob(String outputName, const Blob &blob);
Blob getBlob(String outputName);
void setParam(LayerId layer, int numParam, const Blob &blob);
Blob getParam(LayerId layer, int numParam = 0);

@ -91,11 +91,9 @@ namespace
params.set(name, GET_FIRST(Bool));
break;
default:
CV_Error(cv::Error::StsError, "Unknown type \"" + String(field->type_name()) + "\" in prototxt");
CV_Error(Error::StsError, "Unknown type \"" + String(field->type_name()) + "\" in prototxt");
break;
}
std::cout << std::endl;
}
void extractLayerParams(const Message &msg, cv::dnn::LayerParams &params)
@ -146,7 +144,7 @@ namespace
}
else
{
CV_Error(cv::Error::StsAssert, "Unknown shape of input blob");
CV_Error(Error::StsError, "Unknown shape of input blob");
return BlobShape(-1);
}
}
@ -187,53 +185,100 @@ namespace
}
}
struct BlobNote
{
BlobNote(const std::string &_name, int _layerId, int _outNum) :
name(_name), layerId(_layerId), outNum(_outNum) {}
const std::string &name;
int layerId, outNum;
};
void populateNet(Net dstNet)
{
int layersSize = net.layer_size();
std::vector<BlobNote> addedBlobs;
addedBlobs.reserve(layersSize + 1);
//setup input layer names
{
std::vector<String> netInputs(net.input_size());
for (int ii = 0; ii < net.input_size(); ii++)
netInputs[ii] = net.input(ii);
for (int inNum = 0; inNum < net.input_size(); inNum++)
{
addedBlobs.push_back(BlobNote(net.input(inNum), 0, inNum));
netInputs[inNum] = net.input(inNum);
}
dstNet.setNetInputs(netInputs);
}
int layersSize = net.layer_size();
std::vector<String> layersName(layersSize);
std::vector<int> layersId(layersSize);
std::vector<std::vector<String> > bottomsVec(layersSize);
for (int li = 0; li < layersSize; li++)
{
const caffe::LayerParameter layer = net.layer(li);
const caffe::LayerParameter &layer = net.layer(li);
String name = layer.name();
String type = layer.type();
LayerParams layerParams;
std::vector<String> tops;
tops.assign(layer.top().begin(), layer.top().end());
bottomsVec[li].assign(layer.bottom().begin(), layer.bottom().end());
extractLayerParams(layer, layerParams);
extractBinaryLayerParms(layer, layerParams);
int id = dstNet.addLayer(name, type, layerParams);
dstNet.setOutputNames(id, tops);
for (int inNum = 0; inNum < layer.bottom_size(); inNum++)
addInput(layer.bottom(inNum), id, inNum, dstNet, addedBlobs);
layersName[li] = name;
layersId[li] = id;
for (int outNum = 0; outNum < layer.top_size(); outNum++)
addOutput(layer, id, outNum, dstNet, addedBlobs);
}
}
for (int li = 0; li < layersSize; li++)
void addOutput(const caffe::LayerParameter &layer, int layerId, int outNum, Net &dstNet, std::vector<BlobNote> &addedBlobs)
{
const std::string &name = layer.top(outNum);
bool haveDups = false;
for (int idx = (int)addedBlobs.size() - 1; idx >= 0; idx--)
{
dstNet.setLayerInputs(bottomsVec[li], layersId[li]);
if (addedBlobs[idx].name == name)
{
haveDups = true;
break;
}
}
if (haveDups)
{
bool isInplace = layer.bottom_size() > outNum && layer.bottom(outNum) == name;
if (!isInplace)
CV_Error(Error::StsBadArg, "Duplicate blobs produced by multiple sources");
}
addedBlobs.push_back(BlobNote(name, layerId, outNum));
}
void addInput(const std::string &name, int layerId, int inNum, Net &dstNet, std::vector<BlobNote> &addedBlobs)
{
int idx;
for (idx = (int)addedBlobs.size() - 1; idx >= 0; idx--)
{
if (addedBlobs[idx].name == name)
break;
}
if (idx < 0)
{
CV_Error(Error::StsError, "Can't found output blob \"" + name + "\"");
return;
}
dstNet.connect(addedBlobs[idx].layerId, addedBlobs[idx].outNum, layerId, inNum);
}
~CaffeImporter()
{
}
};
}

@ -2,6 +2,7 @@
#include <set>
#include <algorithm>
#include <iostream>
#include <sstream>
using namespace cv;
using namespace cv::dnn;
@ -16,38 +17,45 @@ namespace cv
namespace dnn
{
struct LayerOutId
template<typename T>
String toString(const T &v)
{
std::ostringstream ss;
ss << v;
return ss.str();
}
struct LayerPin
{
int lid;
int oid;
LayerOutId() {}
LayerOutId(int layerId, int outputId)
LayerPin(int layerId = -1, int outputId = -1)
: lid(layerId), oid(outputId) {}
bool valid() const
{
return (lid >= 0 && oid >= 0);
}
bool equal(const LayerPin &r) const
{
return (lid == r.lid && oid == r.oid);
}
};
struct LayerData
{
LayerData() {}
LayerData(const String &_name, const String &_type, LayerParams &_params)
: name(_name), type(_type), params(_params) {}
LayerData(int _id, const String &_name, const String &_type, LayerParams &_params)
: id(_id), name(_name), type(_type), params(_params) {}
int id;
String name;
String type;
LayerParams params;
std::vector<String> outputNames;
std::vector<String> inputNames;
bool hasNamedOutput(const String &name)
{
return std::find(outputNames.begin(), outputNames.end(), name) != outputNames.end();
}
bool hasNemedInput(const String &name)
{
return std::find(inputNames.begin(), inputNames.end(), name) != inputNames.end();
}
std::vector<LayerOutId> inputBlobsId;
std::vector<LayerPin> inputBlobsId;
std::set<int> inputLayersId;
std::set<int> requiredOutputs;
@ -56,23 +64,64 @@ struct LayerData
std::vector<Blob*> inputBlobs;
int flag;
Ptr<Layer> getLayerInstance()
{
if (layerInstance)
return layerInstance;
layerInstance = LayerRegister::createLayerInstance(type, params);
if (!layerInstance)
{
CV_Error(Error::StsError, "Can't create layer \"" + name + "\" of type \"" + type + "\"");
}
return layerInstance;
}
};
//fake layer containing network input blobs
struct NetInputLayer : public Layer
{
void allocate(const std::vector<Blob*>&, std::vector<Blob>&) {}
void forward(std::vector<Blob*>&, std::vector<Blob>&) {}
int outputNameToIndex(String tgtName)
{
int idx = (int)(std::find(outNames.begin(), outNames.end(), tgtName) - outNames.begin());
return (idx < (int)outNames.size()) ? idx : -1;
}
void setNames(const std::vector<String> &names)
{
outNames.assign(names.begin(), names.end());
}
private:
std::vector<String> outNames;
};
struct Net::Impl
{
Impl()
{
LayerParams paramsEmpty;
layers.insert(make_pair(0, LayerData("_input", "_noType", paramsEmpty)));
{
//allocate fake net input layer
netInputLayer = Ptr<NetInputLayer>(new NetInputLayer());
LayerData &inpl = layers.insert( make_pair(0, LayerData()) ).first->second;
inpl.id = 0;
inpl.name = "_input";
inpl.type = "__NetInputLayer__";
inpl.layerInstance = netInputLayer;
lastLayerId = 1;
netWasAllocated = false;
}
Ptr<NetInputLayer> netInputLayer;
std::vector<int> netOutputs;
typedef std::map<int, LayerData> MapIdToLayerData;
std::map<int, LayerData> layers;
std::map<String, int> layerNameToId;
int lastLayerId;
@ -83,9 +132,8 @@ struct Net::Impl
{
if (!netWasAllocated)
{
connectInputs();
allocateLayers();
computeNetOutputs();
computeNetOutputLayers();
netWasAllocated = true;
}
@ -97,124 +145,130 @@ struct Net::Impl
return (it != layerNameToId.end()) ? it->second : -1;
}
int getLayerId(const DictValue &v)
int getLayerId(int id)
{
MapIdToLayerData::iterator it = layers.find(id);
return (it != layers.end()) ? id : -1;
}
int getLayerId(DictValue &layerDesc)
{
if (layerDesc.isInt())
return getLayerId(layerDesc.get<int>());
else if (layerDesc.isString())
return getLayerId(layerDesc.get<String>());
CV_Assert(layerDesc.isInt() || layerDesc.isString());
return -1;
}
String getLayerName(int id)
{
MapIdToLayerData::iterator it = layers.find(id);
return (it != layers.end()) ? it->second.name : "(unknown layer)";
}
LayerData& getLayerData(int id)
{
MapIdToLayerData::iterator it = layers.find(id);
if (it == layers.end())
CV_Error(Error::StsError, "Layer with requested id=" + toString(id) + " not found");
return it->second;
}
LayerData& getLayerData(const String &layerName)
{
int id = getLayerId(layerName);
if (id < 0)
CV_Error(Error::StsError, "Requsted layer \"" + layerName + "\" not found");
return getLayerData(id);
}
LayerData& getLayerData(const DictValue &layerDesc)
{
if (layerDesc.isInt())
return getLayerData(layerDesc.get<int>());
else if (layerDesc.isString())
return getLayerData(layerDesc.get<String>());
CV_Assert(layerDesc.isInt() || layerDesc.isString());
return *((LayerData*)NULL);
}
static void addLayerInput(LayerData &ld, int inNum, LayerPin from)
{
if (v.isString())
return getLayerId(v.get<String>());
else if (v.isInt())
return v.get<int>();
if ((int)ld.inputBlobsId.size() <= inNum)
{
ld.inputBlobsId.resize(inNum + 1);
}
else
{
CV_Assert(v.isString() || v.isInt());
return -1;
LayerPin storedFrom = ld.inputBlobsId[inNum];
if (storedFrom.valid() && !storedFrom.equal(from))
CV_Error(Error::StsError, "Input #" + toString(inNum) + "of layer \"" + ld.name + "\" already was connected");
}
ld.inputBlobsId[inNum] = from;
}
LayerData& getLayerData(const DictValue &v)
static void splitPin(const String &pinAlias, String &layerName, String &outName)
{
int id = getLayerId(v);
std::map<int, LayerData>::iterator it = layers.find(id);
CV_Assert(id >= 0 && it != layers.end());
return it->second;
size_t delimPos = pinAlias.find('.');
layerName = pinAlias.substr(0, delimPos);
outName = (delimPos == String::npos) ? String() : pinAlias.substr(delimPos + 1);
}
int findOutputsByName(const String &name, LayerOutId *found, int maxCount = 1)
int resolvePinOutputName(LayerData &ld, const String &outName, bool isOutPin)
{
int count = 0;
if (outName.empty())
return 0;
MapIdToLayerData::iterator it;
for (it = layers.begin(); it != layers.end() && count < maxCount; it++)
if (std::isdigit(outName[0]))
{
int lid = it->first;
LayerData &ld = it->second;
char *lastChar;
long inum = std::strtol(outName.c_str(), &lastChar, 10);
for (size_t oi = 0; oi < ld.outputNames.size() && count < maxCount; oi++)
if (*lastChar == 0)
{
if (ld.outputNames[oi] == name)
found[count++] = LayerOutId(lid, (int)oi);
CV_Assert(inum == (int)inum);
return (int)inum;
}
}
return count;
if (isOutPin)
return ld.getLayerInstance()->outputNameToIndex(outName);
else
return ld.getLayerInstance()->inputNameToIndex(outName);
}
void connectInputs()
LayerPin getPinByAlias(const String &pinAlias, bool isOutPin = true)
{
LayerOutId foundOutputs[3], out;
LayerPin pin;
String layerName, outName;
splitPin(pinAlias, layerName, outName);
MapIdToLayerData::iterator it;
for (it = layers.begin(); it != layers.end(); it++)
{
LayerData &ld = it->second;
pin.lid = (layerName.empty()) ? 0 : getLayerId(layerName);
ld.inputBlobs.resize(ld.inputNames.size());
ld.inputBlobsId.resize(ld.inputNames.size());
ld.inputLayersId.clear();
if (pin.lid >= 0)
pin.oid = resolvePinOutputName(getLayerData(pin.lid), outName, isOutPin);
for (size_t ii = 0; ii < ld.inputNames.size(); ii++)
{
const String &tgtName = ld.inputNames[ii];
int foundCount = findOutputsByName(tgtName, foundOutputs, 3);
if (foundCount > 2)
{
CV_Error(cv::Error::StsNotImplemented, "Two or more non-inplace blobs have the same name \"" + tgtName + "\"");
}
else if (foundCount == 2)
{
bool inPlace[2];
inPlace[0] = layers[ foundOutputs[0].lid ].hasNemedInput(tgtName);
inPlace[1] = layers[ foundOutputs[1].lid ].hasNemedInput(tgtName);
if (!inPlace[0] && !inPlace[1])
{
CV_Error(cv::Error::StsNotImplemented, "Two or more non-inplace blobs have the same name \"" + tgtName + "\"");
}
else if (inPlace[0] && inPlace[1])
{
CV_Error(cv::Error::StsNotImplemented, "Two or more blobs has same in-place blob \"" + tgtName + "\"");
}
else
{
if (ld.hasNamedOutput(tgtName))
out = (inPlace[0]) ? foundOutputs[1] : foundOutputs[0];
else
out = (inPlace[0]) ? foundOutputs[0] : foundOutputs[1];
}
}
else if (foundCount == 0)
{
CV_Error(cv::Error::StsBadArg, "Can't find specified input blob \"" + tgtName + "\" for layer \"" + ld.name + "\"");
continue;
}
else
{
out = foundOutputs[0];
}
ld.inputBlobsId[ii] = out;
ld.inputLayersId.insert(out.lid);
layers[out.lid].requiredOutputs.insert(out.oid);
}
}
return pin;
}
for (it = layers.begin(); it != layers.end(); it++)
{
LayerData& ld = it->second;
void connect(int outLayerId, int outNum, int inLayerId, int inNum)
{
LayerData &ldOut = getLayerData(outLayerId);
LayerData &ldInp = getLayerData(inLayerId);
std::cout << "Layer \"" << ld.name << "\"" << std::endl;
if (ld.inputBlobsId.size() > 0)
{
std::cout << "Connected to:" << std::endl;
for (std::set<int>::iterator j = ld.inputLayersId.begin(); j != ld.inputLayersId.end(); j++)
std::cout << layers[*j].name << std::endl;
}
std::cout << std::endl;
}
addLayerInput(ldInp, inNum, LayerPin(outLayerId, outNum));
ldOut.requiredOutputs.insert(outNum);
}
void computeNetOutputs()
void computeNetOutputLayers()
{
netOutputs.clear();
@ -245,29 +299,18 @@ struct Net::Impl
for (set<int>::iterator i = ld.inputLayersId.begin(); i != ld.inputLayersId.end(); i++)
allocateLayer(*i);
//create instance
if (ld.layerInstance == NULL && lid != 0)
{
ld.layerInstance = LayerRegister::createLayerInstance(ld.type, ld.params);
if (ld.layerInstance == NULL)
{
std::cerr << "Can't create layer \"" << ld.name << "\" of type \"" << ld.type << "\"" << std::endl;
}
}
//bind inputs
ld.inputBlobs.resize(ld.inputBlobsId.size());
for (size_t i = 0; i < ld.inputBlobsId.size(); i++)
{
int srcLId = ld.inputBlobsId[i].lid;
int srcOId = ld.inputBlobsId[i].oid;
ld.inputBlobs[i] = &layers[srcLId].outputBlobs[srcOId];
LayerPin from = ld.inputBlobsId[i];
CV_Assert(from.valid());
ld.inputBlobs[i] = &layers[from.lid].outputBlobs[from.oid];
}
//allocate layer
ld.outputBlobs.resize(ld.outputNames.size());
if (ld.layerInstance)
ld.layerInstance->allocate(ld.inputBlobs, ld.outputBlobs);
ld.outputBlobs.resize(ld.requiredOutputs.size());
ld.getLayerInstance()->allocate(ld.inputBlobs, ld.outputBlobs);
ld.flag = 1;
}
@ -285,7 +328,7 @@ struct Net::Impl
}
}
void forwardLayer(int layerId, bool clearFlags = true)
void forwardLayer(LayerData &ld, bool clearFlags = true)
{
if (clearFlags)
{
@ -294,8 +337,6 @@ struct Net::Impl
it->second.flag = 0;
}
LayerData &ld = layers[layerId];
//already was forwarded
if (ld.flag)
return;
@ -303,15 +344,11 @@ struct Net::Impl
//forward parents
for (set<int>::iterator i = ld.inputLayersId.begin(); i != ld.inputLayersId.end(); i++)
{
forwardLayer(*i, false);
forwardLayer(layers[*i], false);
}
//forward itself
if (ld.layerInstance && layerId != 0)
{
//std::cout << ld.name << " shape:" << ld.outputBlobs[0].shape4() << std::endl;
ld.layerInstance->forward(ld.inputBlobs, ld.outputBlobs);
}
ld.layerInstance->forward(ld.inputBlobs, ld.outputBlobs);
ld.flag = 1;
}
@ -323,7 +360,7 @@ struct Net::Impl
it->second.flag = 0;
for (it = layers.begin(); it != layers.end(); it++)
forwardLayer(it->first, false);
forwardLayer(it->second, false);
}
};
@ -339,35 +376,38 @@ Net::~Net()
int Net::addLayer(const String &name, const String &type, LayerParams &params)
{
if (name.find('.') != String::npos)
{
CV_Error(Error::StsBadArg, "Added layer name \"" + name + "\" should not contain dot symbol");
return -1;
}
if (impl->getLayerId(name) >= 0)
{
CV_Error(cv::Error::StsBadArg, "Layer \"" + name + "\" already into net");
CV_Error(Error::StsBadArg, "Layer \"" + name + "\" already into net");
return -1;
}
int id = ++impl->lastLayerId;
impl->layerNameToId.insert(std::make_pair(name, id));
impl->layers.insert(std::make_pair(id, LayerData(name, type, params)));
impl->layers.insert(std::make_pair(id, LayerData(id, name, type, params)));
return id;
}
//void Net::connect(BlobId input, BlobId output)
//{
//}
void Net::setOutputNames(LayerId layer, const std::vector<String> &outputNames)
void Net::connect(int outLayerId, int outNum, int inLayerId, int inNum)
{
LayerData &ld = impl->getLayerData(layer);
CV_Assert(ld.outputNames.size() == 0);
ld.outputNames.assign(outputNames.begin(), outputNames.end());
impl->connect(outLayerId, outNum, inLayerId, inNum);
}
void Net::setLayerInputs(const std::vector<String> &outputs, LayerId layer)
void Net::connect(String _outPin, String _inPin)
{
LayerData &ld = impl->getLayerData(layer);
ld.inputNames.assign(outputs.begin(), outputs.end());
LayerPin outPin = impl->getPinByAlias(_outPin);
LayerPin inpPin = impl->getPinByAlias(_inPin);
CV_Assert(outPin.valid() && inpPin.valid());
impl->connect(outPin.lid, outPin.oid, inpPin.lid, inpPin.oid);
}
void Net::forward()
@ -379,37 +419,34 @@ void Net::forward()
void Net::forward(LayerId toLayer)
{
impl->setUpNet();
impl->forwardLayer(impl->getLayerId(toLayer));
impl->forwardLayer(impl->getLayerData(toLayer));
}
void Net::setNetInputs(const std::vector<String> &inputBlobNames)
{
setOutputNames(0, inputBlobNames);
impl->netInputLayer->setNames(inputBlobNames);
}
void Net::setBlob(BlobId outputName, const Blob &blob)
void Net::setBlob(String outputName, const Blob &blob)
{
String name = outputName.get<String>();
LayerOutId found;
if (!impl->findOutputsByName(name, &found, 1))
CV_Error(cv::Error::StsObjectNotFound, "Request blob \"" + name + "\" not found");
LayerPin pin = impl->getPinByAlias(outputName);
if (!pin.valid())
CV_Error(Error::StsObjectNotFound, "Request blob \"" + outputName + "\" not found");
LayerData &ld = impl->layers[found.lid];
ld.outputBlobs.resize(ld.outputNames.size());
ld.outputBlobs[found.oid] = blob;
LayerData &ld = impl->layers[pin.lid];
ld.outputBlobs.resize( std::max(pin.oid+1, (int)ld.requiredOutputs.size()) );
ld.outputBlobs[pin.oid] = blob;
}
Blob Net::getBlob(BlobId outputName)
Blob Net::getBlob(String outputName)
{
String name = outputName.get<String>();
LayerOutId found;
LayerPin pin = impl->getPinByAlias(outputName);
if (!pin.valid())
CV_Error(Error::StsObjectNotFound, "Request blob \"" + outputName + "\" not found");
if (!impl->findOutputsByName(name, &found, 1))
CV_Error(cv::Error::StsObjectNotFound, "Request blob \"" + name + "\" not found");
LayerData &ld = impl->layers[found.lid];
return ld.outputBlobs[found.oid];
LayerData &ld = impl->layers[pin.lid];
CV_Assert(pin.oid < (int)ld.outputBlobs.size());
return ld.outputBlobs[pin.oid];
}
Blob Net::getParam(LayerId layer, int numParam)
@ -426,36 +463,14 @@ Importer::~Importer()
}
//////////////////////////////////////////////////////////////////////////
#include <sstream>
template<typename T>
String toString(const T &v)
{
std::stringstream ss;
ss << v;
return ss.str();
}
int Layer::getNumInputs()
{
return 1;
}
int Layer::getNumOutputs()
int Layer::inputNameToIndex(String inputName)
{
return 1;
return -1;
}
cv::String Layer::getInputName(int inputNum)
{
return "input" + toString(inputNum);
}
cv::String Layer::getOutputName(int outputNum)
int Layer::outputNameToIndex(String outputName)
{
return "output" + toString(outputNum);
return -1;
}
Layer::~Layer()

@ -29,7 +29,7 @@ TEST(Reproducibility_AlexNet, Accuracy)
inpMats.push_back( imread(getTestFile("alexnet_1.png")) );
ASSERT_TRUE(!inpMats[0].empty() && !inpMats[1].empty());
net.setBlob("data", Blob(inpMats));
net.setBlob(".data", Blob(inpMats));
net.forward();
Blob out = net.getBlob("prob");

@ -30,7 +30,7 @@ TEST(Reproducibility_GoogLeNet, Accuracy)
ASSERT_TRUE(!inpMats[0].empty() && !inpMats[1].empty());
Blob inp(inpMats);
net.setBlob("data", inp);
net.setBlob(".data", inp);
net.forward();
Blob out = net.getBlob("prob");

@ -44,17 +44,16 @@ static void testLayer(String proto, String caffemodel = String())
importer->populateNet(net);
}
net.setBlob("input", inp);
net.setBlob(".input", inp);
net.forward();
Blob out = net.getBlob("output");
EXPECT_TRUE(isEqual(ref.shape4(), out.shape4()));
EXPECT_EQ(ref.shape(), out.shape());
Mat &mRef = ref.getMatRef();
Mat &mOut = out.getMatRef();
size_t N = ref.total();
double normL1 = cvtest::norm(mRef, mOut, NORM_L1)/N;
double normL1 = cvtest::norm(mRef, mOut, NORM_L1) / ref.total();
EXPECT_LE(normL1, 0.0001);
double normInf = cvtest::norm(mRef, mOut, NORM_INF);

@ -27,12 +27,12 @@ layer {
name: "relu1"
type: "ReLU"
bottom: "conv1"
top: "relu1"
top: "conv1"
}
layer {
name: "norm1"
type: "LRN"
bottom: "relu1"
bottom: "conv1"
top: "norm1"
lrn_param {
local_size: 5
@ -75,12 +75,12 @@ layer {
name: "relu2"
type: "ReLU"
bottom: "conv2"
top: "relu2"
top: "conv2"
}
layer {
name: "norm2"
type: "LRN"
bottom: "relu2"
bottom: "conv2"
top: "norm2"
lrn_param {
local_size: 5
@ -191,7 +191,7 @@ layer {
name: "fc6"
type: "InnerProduct"
bottom: "pool5"
top: "fc6_0"
top: "fc6"
param {
lr_mult: 1
decay_mult: 1
@ -207,14 +207,14 @@ layer {
layer {
name: "relu6"
type: "ReLU"
bottom: "fc6_0"
top: "fc6_1"
bottom: "fc6"
top: "fc6"
}
layer {
name: "drop6"
type: "Dropout"
bottom: "fc6_1"
top: "fc6_2"
bottom: "fc6"
top: "fc6"
dropout_param {
dropout_ratio: 0.5
}
@ -222,8 +222,8 @@ layer {
layer {
name: "fc7"
type: "InnerProduct"
bottom: "fc6_2"
top: "fc7_0"
bottom: "fc6"
top: "fc7"
param {
lr_mult: 1
decay_mult: 1
@ -239,14 +239,14 @@ layer {
layer {
name: "relu7"
type: "ReLU"
bottom: "fc7_0"
top: "fc7_1"
bottom: "fc7"
top: "fc7"
}
layer {
name: "drop7"
type: "Dropout"
bottom: "fc7_1"
top: "fc7_2"
bottom: "fc7"
top: "fc7"
dropout_param {
dropout_ratio: 0.5
}
@ -254,7 +254,7 @@ layer {
layer {
name: "fc8"
type: "InnerProduct"
bottom: "fc7_2"
bottom: "fc7"
top: "fc8"
param {
lr_mult: 1

@ -7,7 +7,6 @@ input_dim: 75
input_dim: 113
layer {
name: "norm1"
type: "LRN"
lrn_param {
norm_region: ACROSS_CHANNELS;
@ -16,6 +15,7 @@ layer {
beta: 0.75
}
name: "output"
bottom: "input"
top: "output"
}

@ -7,8 +7,8 @@ input_dim: 75
input_dim: 113
layer {
name: "norm1"
type: "LRN"
lrn_param {
norm_region: WITHIN_CHANNEL;
local_size: 5
@ -16,6 +16,7 @@ layer {
beta: 0.75
}
name: "output"
bottom: "input"
top: "output"
}

@ -7,9 +7,9 @@ input_dim: 75
input_dim: 113
layer {
name: "Softmax"
type: "Softmax"
name: "output"
bottom: "input"
top: "output"
}
Loading…
Cancel
Save