Deleted trailing whitespaces

pull/265/head
Vitaliy Lyudvichenko 10 years ago
parent 2717765841
commit d1179832f7
  1. 4
      modules/dnn/include/opencv2/dnn.hpp
  2. 6
      modules/dnn/include/opencv2/dnn/dnn.hpp
  3. 4
      modules/dnn/include/opencv2/dnn/dnn.inl.hpp
  4. 4
      modules/dnn/samples/alexnet.cpp
  5. 8
      modules/dnn/src/caffe/glog_emulator.hpp
  6. 12
      modules/dnn/src/caffe_importer.cpp
  7. 8
      modules/dnn/src/dnn.cpp
  8. 2
      modules/dnn/src/layers/convolution_layer.cpp
  9. 4
      modules/dnn/src/layers/lrn_layer.cpp
  10. 2
      modules/dnn/src/layers/pooling_layer.cpp
  11. 4
      modules/dnn/src/layers/softmax_layer.cpp

@ -8,6 +8,4 @@
#include <opencv2/dnn/dnn.hpp>
#endif /* __OPENCV_DNN_HPP__ */
#endif /* __OPENCV_DNN_HPP__ */

@ -109,7 +109,7 @@ namespace dnn
virtual String getInputName(int inputNum);
virtual String getOutputName(int outputNum);
};
//containers for String and int
typedef DictValue LayerId;
typedef DictValue BlobId;
@ -124,7 +124,7 @@ namespace dnn
int addLayer(const String &name, const String &type, LayerParams &params);
int getLayerId(LayerId layer);
void deleteLayer(LayerId layer);
//each output of each layer can be labeled by unique string label (as in Caffe)
//if label not specified then %layer_name%.%layer_output_id% can be used
void setOutputNames(LayerId layer, const std::vector<String> &outputNames);
@ -145,7 +145,7 @@ namespace dnn
void setBlob(BlobId outputName, const Blob &blob);
Blob getBlob(BlobId outputName);
void setParam(LayerId layer, int numParam, const Blob &blob);
void getParam(LayerId layer, int numParam);

@ -44,13 +44,13 @@ namespace dnn
{
return Size(cols(), rows());
}
inline int Blob::channels() const
{
CV_DbgAssert(m.dims >= 3);
return m.size[m.dims-3];
}
inline int Blob::num() const
{
CV_DbgAssert(m.dims == 4);

@ -61,7 +61,7 @@ int main(void)
Blob probBlob = net.getBlob("prob");
ClassProb bc = getMaxClass(probBlob);
initClassesNames();
std::string className = (bc.first < (int)CLASES_NAMES.size()) ? CLASES_NAMES[bc.first] : "unnamed";
@ -71,6 +71,6 @@ int main(void)
std::cout << " \"" + className << "\"";
std::cout << std::endl;
std::cout << "Prob: " << bc.second * 100 << "%" << std::endl;
return 0;
}

@ -27,12 +27,12 @@ class GLogWrapper
public:
GLogWrapper(const char *_file, const char *_func, int _line,
const char *_type,
GLogWrapper(const char *_file, const char *_func, int _line,
const char *_type,
const char *_cond_str = NULL, bool _cond_status = true
) :
stream(selectStream(_type)),
file(_file), func(_func), line(_line),
stream(selectStream(_type)),
file(_file), func(_func), line(_line),
type(_type), cond_str(_cond_str), cond_staus(_cond_status) {}
template<typename T>

@ -26,7 +26,7 @@ namespace
{
caffe::NetParameter net;
caffe::NetParameter netBinary;
public:
CaffeImporter(const char *pototxt, const char *caffeModel)
@ -69,7 +69,7 @@ namespace
const std::string &name = field->name();
std::cout << field->type_name() << " " << name << ":";
#define GET_FIRST(Type) (isRepeated ? msgRefl->GetRepeated##Type(msg, field, 0) : msgRefl->Get##Type(msg, field))
switch (type)
@ -97,7 +97,7 @@ namespace
break;
}
std::cout << std::endl;
std::cout << std::endl;
}
void extractLayerParams(const Message &msg, cv::dnn::LayerParams &params)
@ -109,7 +109,7 @@ namespace
{
const FieldDescriptor *fd = msgDesc->field(fieldId);
bool hasData = fd->is_required() ||
bool hasData = fd->is_required() ||
(fd->is_optional() && (msgRefl->HasField(msg, fd) /*|| fd->has_default_value()*/)) ||
(fd->is_repeated() && msgRefl->FieldSize(msg, fd) > 0);
@ -119,7 +119,7 @@ namespace
if (fd->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE)
{
if (fd->is_repeated()) //Extract only first item!
extractLayerParams(msgRefl->GetRepeatedMessage(msg, fd, 0), params);
extractLayerParams(msgRefl->GetRepeatedMessage(msg, fd, 0), params);
else
extractLayerParams(msgRefl->GetMessage(msg, fd), params);
}
@ -218,7 +218,7 @@ namespace
extractLayerParams(layer, layerParams);
extractBinaryLayerParms(layer, layerParams);
int id = dstNet.addLayer(name, type, layerParams);
dstNet.setOutputNames(id, tops);

@ -131,7 +131,7 @@ struct LayerOutId
String name;
LayerOutId() {}
LayerOutId(int layerId, int outputId, const String &outputName = String())
LayerOutId(int layerId, int outputId, const String &outputName = String())
: lid(layerId), oid(outputId), name(outputName) {}
};
@ -323,7 +323,7 @@ struct Net::Impl
void computeNetOutputs()
{
netOutputs.clear();
MapIdToLayerData::iterator it;
for (it = layers.begin(); it != layers.end(); it++)
{
@ -346,7 +346,7 @@ struct Net::Impl
//already allocated
if (ld.flag)
return;
//allocate parents
for (set<int>::iterator i = ld.inputLayersId.begin(); i != ld.inputLayersId.end(); i++)
allocateLayer(*i);
@ -461,7 +461,7 @@ int Net::addLayer(const String &name, const String &type, LayerParams &params)
void Net::connect(BlobId input, BlobId output)
{
}
void Net::setOutputNames(LayerId layer, const std::vector<String> &outputNames)

@ -86,7 +86,7 @@ namespace dnn
biasOnesMat = Mat::ones(1, outH * outW, CV_32F);
}
}
template <typename Dtype>
void im2col_cpu(const Dtype* data_im, const int channels,
const int height, const int width, const int kernel_h, const int kernel_w,

@ -24,7 +24,7 @@ namespace dnn
void spatialNormalization(Blob &src, Blob &dst);
public:
LRNLayer(LayerParams &params);
void allocate(const std::vector<Blob*> &inputs, std::vector<Blob> &outputs);
void forward(std::vector<Blob*> &inputs, std::vector<Blob> &outputs);
@ -101,7 +101,7 @@ namespace dnn
accum.convertTo(accum, accum.type(), alpha/channels, 1);
cv::pow(accum, beta, accum);
for (int cn = channels - 1; cn >= 0; cn--)
{
cv::divide(srcBlob.getMat(n, cn), accum, dstBlob.getMat(n, cn));

@ -10,7 +10,7 @@ namespace dnn
{
class PoolingLayer : public Layer
{
enum
enum
{
MAX,
AVE,

@ -32,7 +32,7 @@ namespace dnn
void SoftMaxLayer::allocate(const std::vector<Blob*> &inputs, std::vector<Blob> &outputs)
{
CV_Assert(inputs.size() == 1);
Vec4i shape = inputs[0]->shape();
outputs.resize(1);
outputs[0].create(shape);
@ -91,7 +91,7 @@ namespace dnn
{
size_t srcOffset = outerDim * outerStep;
size_t bufOffset = outerDim * cnStep;
//sum exp along axis
for (size_t i = 0; i < innerSize; i++)
bufPtr[bufOffset + i] = 0.f;

Loading…
Cancel
Save