Updated classification example

pull/265/head
Vitaliy Lyudvichenko 10 years ago
parent 6548839454
commit db4ff2172a
  1. 281
      modules/dnn/samples/bvlc_alexnet.prototxt
  2. 25
      modules/dnn/samples/classify_with_googlenet.cpp
  3. BIN
      modules/dnn/samples/space_shuttle.jpg
  4. 1000
      modules/dnn/samples/synset_words.txt
  5. 3
      modules/dnn/test/cnpy.cpp

@ -1,281 +0,0 @@
name: "AlexNet"
input: "data"
input_dim: 10
input_dim: 3
input_dim: 227
input_dim: 227
layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 96
kernel_size: 11
stride: 4
}
}
layer {
name: "relu1"
type: "ReLU"
bottom: "conv1"
top: "conv1"
}
layer {
name: "norm1"
type: "LRN"
bottom: "conv1"
top: "norm1"
lrn_param {
local_size: 5
alpha: 0.0001
beta: 0.75
}
}
layer {
name: "pool1"
type: "Pooling"
bottom: "norm1"
top: "pool1"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "conv2"
type: "Convolution"
bottom: "pool1"
top: "conv2"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 256
pad: 2
kernel_size: 5
group: 2
}
}
layer {
name: "relu2"
type: "ReLU"
bottom: "conv2"
top: "conv2"
}
layer {
name: "norm2"
type: "LRN"
bottom: "conv2"
top: "norm2"
lrn_param {
local_size: 5
alpha: 0.0001
beta: 0.75
}
}
layer {
name: "pool2"
type: "Pooling"
bottom: "norm2"
top: "pool2"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "conv3"
type: "Convolution"
bottom: "pool2"
top: "conv3"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 384
pad: 1
kernel_size: 3
}
}
layer {
name: "relu3"
type: "ReLU"
bottom: "conv3"
top: "conv3"
}
layer {
name: "conv4"
type: "Convolution"
bottom: "conv3"
top: "conv4"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 384
pad: 1
kernel_size: 3
group: 2
}
}
layer {
name: "relu4"
type: "ReLU"
bottom: "conv4"
top: "conv4"
}
layer {
name: "conv5"
type: "Convolution"
bottom: "conv4"
top: "conv5"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
convolution_param {
num_output: 256
pad: 1
kernel_size: 3
group: 2
}
}
layer {
name: "relu5"
type: "ReLU"
bottom: "conv5"
top: "conv5"
}
layer {
name: "pool5"
type: "Pooling"
bottom: "conv5"
top: "pool5"
pooling_param {
pool: MAX
kernel_size: 3
stride: 2
}
}
layer {
name: "fc6"
type: "InnerProduct"
bottom: "pool5"
top: "fc6"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
inner_product_param {
num_output: 4096
}
}
layer {
name: "relu6"
type: "ReLU"
bottom: "fc6"
top: "fc6"
}
layer {
name: "drop6"
type: "Dropout"
bottom: "fc6"
top: "fc61"
dropout_param {
dropout_ratio: 0.5
}
}
layer {
name: "fc7"
type: "InnerProduct"
bottom: "fc61"
top: "fc7"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
inner_product_param {
num_output: 4096
}
}
layer {
name: "relu7"
type: "ReLU"
bottom: "fc7"
top: "fc7"
}
layer {
name: "drop7"
type: "Dropout"
bottom: "fc7"
top: "fc71"
dropout_param {
dropout_ratio: 0.5
}
}
layer {
name: "fc8"
type: "InnerProduct"
bottom: "fc71"
top: "fc8"
param {
lr_mult: 1
decay_mult: 1
}
param {
lr_mult: 2
decay_mult: 0
}
inner_product_param {
num_output: 1000
}
}
layer {
name: "prob"
type: "Softmax"
bottom: "fc8"
top: "prob"
}

@ -25,50 +25,49 @@ std::vector<String> CLASES_NAMES;
void initClassesNames() void initClassesNames()
{ {
std::ifstream fp("ILSVRC2012_synsets.txt"); std::ifstream fp("synset_words.txt");
CV_Assert(fp.is_open()); CV_Assert(fp.is_open());
std::string name; std::string name;
while (!fp.eof()) while (!fp.eof())
{ {
std::getline(fp, name); std::getline(fp, name);
CLASES_NAMES.push_back(name); if (name.length())
CLASES_NAMES.push_back( name.substr(name.find(' ')+1) );
} }
CV_Assert(CLASES_NAMES.size() == 1000);
fp.close(); fp.close();
} }
int main(void) int main(int argc, char **argv)
{ {
Net net; Net net;
{ {
Ptr<Importer> importer = createCaffeImporter("bvlc_alexnet.prototxt", "bvlc_alexnet.caffemodel"); Ptr<Importer> importer = createCaffeImporter("bvlc_googlenet.prototxt", "bvlc_googlenet.caffemodel");
importer->populateNet(net); importer->populateNet(net);
} }
Mat img = imread("zebra.jpg"); String filename = (argc > 1) ? argv[1] : "space_shuttle.jpg";
Mat img = imread(filename);
CV_Assert(!img.empty()); CV_Assert(!img.empty());
cvtColor(img, img, COLOR_BGR2RGB); cvtColor(img, img, COLOR_BGR2RGB);
img.convertTo(img, CV_32F);
resize(img, img, Size(227, 227)); resize(img, img, Size(227, 227));
subtract(img, cv::mean(img), img);
Blob imgBlob(img); Blob imgBlob(img);
net.setBlob("data", imgBlob); net.setBlob(".data", imgBlob);
net.forward(); net.forward();
Blob probBlob = net.getBlob("prob"); Blob prob = net.getBlob("prob");
ClassProb bc = getMaxClass(probBlob); ClassProb bc = getMaxClass(prob);
initClassesNames(); initClassesNames();
std::string className = (bc.first < (int)CLASES_NAMES.size()) ? CLASES_NAMES[bc.first] : "unnamed"; std::string className = (bc.first < (int)CLASES_NAMES.size()) ? CLASES_NAMES[bc.first] : "unnamed";
std::cout << "Best class:"; std::cout << "Best class:";
std::cout << " #" << bc.first; std::cout << " #" << bc.first;
std::cout << " (from " << probBlob.total(1) << ")"; std::cout << " (from " << prob.total(1) << ")";
std::cout << " \"" + className << "\""; std::cout << " \"" + className << "\"";
std::cout << std::endl; std::cout << std::endl;
std::cout << "Prob: " << bc.second * 100 << "%" << std::endl; std::cout << "Prob: " << bc.second * 100 << "%" << std::endl;

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

File diff suppressed because it is too large Load Diff

@ -245,6 +245,3 @@ cnpy::NpyArray cnpy::npy_load(std::string fname) {
fclose(fp); fclose(fp);
return arr; return arr;
} }

Loading…
Cancel
Save