Added tracking module

Modified Copyrights

Moved plantuml source files under doc

disabled tests

Added include of precomp.hpp
pull/1/head
Antonella Cascitelli 12 years ago
parent e3aa8bf98e
commit 9e30b50d26
  1. 2
      modules/tracking/CMakeLists.txt
  2. 264
      modules/tracking/doc/common_interfaces_tracker.rst
  3. 343
      modules/tracking/doc/common_interfaces_tracker_feature_set.rst
  4. 506
      modules/tracking/doc/common_interfaces_tracker_model.rst
  5. 293
      modules/tracking/doc/common_interfaces_tracker_sampler.rst
  6. 43
      modules/tracking/doc/misc/Tracker.txt
  7. 55
      modules/tracking/doc/misc/TrackerFeature.txt
  8. 62
      modules/tracking/doc/misc/TrackerModel.txt
  9. 44
      modules/tracking/doc/misc/TrackerSampler.txt
  10. 19
      modules/tracking/doc/misc/packages.txt
  11. BIN
      modules/tracking/doc/pics/Tracker.png
  12. BIN
      modules/tracking/doc/pics/TrackerFeature.png
  13. BIN
      modules/tracking/doc/pics/TrackerModel.png
  14. BIN
      modules/tracking/doc/pics/TrackerSampler.png
  15. BIN
      modules/tracking/doc/pics/package.png
  16. 121
      modules/tracking/doc/tracker_algorithms.rst
  17. 86
      modules/tracking/doc/tracking.rst
  18. 52
      modules/tracking/include/opencv2/tracking.hpp
  19. 410
      modules/tracking/include/opencv2/tracking/feature.hpp
  20. 282
      modules/tracking/include/opencv2/tracking/onlineBoosting.hpp
  21. 114
      modules/tracking/include/opencv2/tracking/onlineMIL.hpp
  22. 1021
      modules/tracking/include/opencv2/tracking/tracker.hpp
  23. 46
      modules/tracking/include/opencv2/tracking/tracking.hpp
  24. 3
      modules/tracking/perf/perf_main.cpp
  25. 21
      modules/tracking/perf/perf_precomp.hpp
  26. 46
      modules/tracking/perf/perf_tracking.cpp
  27. 148
      modules/tracking/samples/tracker.cpp
  28. 1072
      modules/tracking/src/feature.cpp
  29. 735
      modules/tracking/src/onlineBoosting.cpp
  30. 379
      modules/tracking/src/onlineMIL.cpp
  31. 49
      modules/tracking/src/precomp.hpp
  32. 107
      modules/tracking/src/tracker.cpp
  33. 308
      modules/tracking/src/trackerBoosting.cpp
  34. 123
      modules/tracking/src/trackerBoostingModel.cpp
  35. 109
      modules/tracking/src/trackerBoostingModel.hpp
  36. 325
      modules/tracking/src/trackerFeature.cpp
  37. 142
      modules/tracking/src/trackerFeatureSet.cpp
  38. 273
      modules/tracking/src/trackerMIL.cpp
  39. 126
      modules/tracking/src/trackerMILModel.cpp
  40. 103
      modules/tracking/src/trackerMILModel.hpp
  41. 178
      modules/tracking/src/trackerModel.cpp
  42. 142
      modules/tracking/src/trackerSampler.cpp
  43. 382
      modules/tracking/src/trackerSamplerAlgorithm.cpp
  44. 445
      modules/tracking/src/trackerStateEstimator.cpp
  45. 61
      modules/tracking/src/tracking_init.cpp
  46. 3
      modules/tracking/test/test_main.cpp
  47. 18
      modules/tracking/test/test_precomp.hpp
  48. 130
      modules/tracking/test/test_tracker.cpp

@ -0,0 +1,2 @@
set(the_description "Tracking API")
ocv_define_module(tracking opencv_imgproc)

@ -0,0 +1,264 @@
Common Interfaces of Tracker
============================
.. highlight:: cpp
Tracker : Algorithm
-------------------
.. ocv:class:: Tracker
Base abstract class for the long-term tracker::
class CV_EXPORTS_W Tracker : public virtual Algorithm
{
virtual ~Tracker();
bool init( const Mat& image, const Rect& boundingBox );
bool update( const Mat& image, Rect& boundingBox );
static Ptr<Tracker> create( const String& trackerType );
};
Tracker::init
-------------
Initialize the tracker with a know bounding box that surrounding the target
.. ocv:function:: bool Tracker::init( const Mat& image, const Rect& boundingBox )
:param image: The initial frame
:param boundingBox: The initial boundig box
Tracker::update
---------------
Update the tracker, find the new most likely bounding box for the target
.. ocv:function:: bool Tracker::update( const Mat& image, Rect& boundingBox )
:param image: The current frame
:param boundingBox: The boundig box that represent the new target location
Tracker::create
---------------
Creates a tracker by its name.
.. ocv:function:: static Ptr<Tracker> Tracker::create( const String& trackerType )
:param trackerType: Tracker type
The following detector types are supported:
* ``"MIL"`` -- :ocv:class:`TrackerMIL`
* ``"BOOSTING"`` -- :ocv:class:`TrackerBoosting`
Creating Own Tracker
--------------------
If you want create a new tracker, you should follow some simple rules.
First, your tracker should be inherit from :ocv:class:`Tracker`, so you must implement two method:
* Tracker: initImpl, it should be called once in the first frame, here you should initialize all structures. The second argument is the initial bounding box of the target.
* Tracker:updateImpl, it should be called at the begin of in loop through video frames. Here you should overwrite the bounding box with new location.
Example of creating specialized Tracker ``TrackerMIL`` : ::
class CV_EXPORTS_W TrackerMIL : public Tracker
{
public:
TrackerMIL( const TrackerMIL::Params &parameters = TrackerMIL::Params() );
virtual ~TrackerMIL();
...
protected:
bool initImpl( const Mat& image, const Rect& boundingBox );
bool updateImpl( const Mat& image, Rect& boundingBox );
...
};
Every tracker has three component :ocv:class:`TrackerSampler`, :ocv:class:`TrackerFeatureSet` and :ocv:class:`TrackerModel`.
The first two are instantiated from Tracker base class, instead the last component is abstract, so you must implement your TrackerModel.
Finally add your tracker in the file tracking_init.cpp
TrackerSampler
..............
TrackerSampler is already instantiated, but you should define the sampling algorithm and add the classes (or single class) to TrackerSampler.
You can choose one of the ready implementation as TrackerSamplerCSC or you can implement your sampling method, in this case
the class must inherit :ocv:class:`TrackerSamplerAlgorithm`. Fill the samplingImpl method that writes the result in "sample" output argument.
Example of creating specialized TrackerSamplerAlgorithm ``TrackerSamplerCSC`` : ::
class CV_EXPORTS_W TrackerSamplerCSC : public TrackerSamplerAlgorithm
{
public:
TrackerSamplerCSC( const TrackerSamplerCSC::Params &parameters = TrackerSamplerCSC::Params() );
~TrackerSamplerCSC();
...
protected:
bool samplingImpl( const Mat& image, Rect boundingBox, std::vector<Mat>& sample );
...
};
Example of adding TrackerSamplerAlgorithm to TrackerSampler : ::
//sampler is the TrackerSampler
Ptr<TrackerSamplerAlgorithm> CSCSampler = new TrackerSamplerCSC( CSCparameters );
if( !sampler->addTrackerSamplerAlgorithm( CSCSampler ) )
return false;
//or add CSC sampler with default parameters
//sampler->addTrackerSamplerAlgorithm( "CSC" );
.. seealso::
:ocv:class:`TrackerSamplerCSC`, :ocv:class:`TrackerSamplerAlgorithm`
TrackerFeatureSet
.................
TrackerFeatureSet is already instantiated (as first) , but you should define what kinds of features you'll use in your tracker.
You can use multiple feature types, so you can add a ready implementation as :ocv:class:`TrackerFeatureHAAR` in your TrackerFeatureSet or develop your own implementation.
In this case, in the computeImpl method put the code that extract the features and
in the selection method optionally put the code for the refinement and selection of the features.
Example of creating specialized TrackerFeature ``TrackerFeatureHAAR`` : ::
class CV_EXPORTS_W TrackerFeatureHAAR : public TrackerFeature
{
public:
TrackerFeatureHAAR( const TrackerFeatureHAAR::Params &parameters = TrackerFeatureHAAR::Params() );
~TrackerFeatureHAAR();
void selection( Mat& response, int npoints );
...
protected:
bool computeImpl( const std::vector<Mat>& images, Mat& response );
...
};
Example of adding TrackerFeature to TrackerFeatureSet : ::
//featureSet is the TrackerFeatureSet
Ptr<TrackerFeature> trackerFeature = new TrackerFeatureHAAR( HAARparameters );
featureSet->addTrackerFeature( trackerFeature );
.. seealso::
:ocv:class:`TrackerFeatureHAAR`, :ocv:class:`TrackerFeatureSet`
TrackerModel
............
TrackerModel is abstract, so in your implementation you must develop your TrackerModel that inherit from :ocv:class:`TrackerModel`.
Fill the method for the estimation of the state "modelEstimationImpl", that estimates the most likely target location,
see [AAM]_ table I (ME) for further information. Fill "modelUpdateImpl" in order to update the model, see [AAM]_ table I (MU).
In this class you can use the :c:type:`ConfidenceMap` and :c:type:`Trajectory` to storing the model. The first represents the model on the all
possible candidate states and the second represents the list of all estimated states.
Example of creating specialized TrackerModel ``TrackerMILModel`` : ::
class TrackerMILModel : public TrackerModel
{
public:
TrackerMILModel( const Rect& boundingBox );
~TrackerMILModel();
...
protected:
void modelEstimationImpl( const std::vector<Mat>& responses );
void modelUpdateImpl();
...
};
And add it in your Tracker : ::
bool TrackerMIL::initImpl( const Mat& image, const Rect& boundingBox )
{
...
//model is the general TrackerModel field od the general Tracker
model = new TrackerMILModel( boundingBox );
...
}
In the last step you should define the TrackerStateEstimator based on your implementation or you can use one of ready class as :ocv:class:`TrackerStateEstimatorMILBoosting`.
It represent the statistical part of the model that estimates the most likely target state.
Example of creating specialized TrackerStateEstimator ``TrackerStateEstimatorMILBoosting`` : ::
class CV_EXPORTS_W TrackerStateEstimatorMILBoosting : public TrackerStateEstimator
{
class TrackerMILTargetState : public TrackerTargetState
{
...
};
public:
TrackerStateEstimatorMILBoosting( int nFeatures = 250 );
~TrackerStateEstimatorMILBoosting();
...
protected:
Ptr<TrackerTargetState> estimateImpl( const std::vector<ConfidenceMap>& confidenceMaps );
void updateImpl( std::vector<ConfidenceMap>& confidenceMaps );
...
};
And add it in your TrackerModel : ::
//model is the TrackerModel of your Tracker
Ptr<TrackerStateEstimatorMILBoosting> stateEstimator = new TrackerStateEstimatorMILBoosting( params.featureSetNumFeatures );
model->setTrackerStateEstimator( stateEstimator );
.. seealso::
:ocv:class:`TrackerModel`, :ocv:class:`TrackerStateEstimatorMILBoosting`, :ocv:class:`TrackerTargetState`
During this step, you should define your TrackerTargetState based on your implementation. :ocv:class:`TrackerTargetState` base class has only the bounding box (upper-left position, width and height), you can
enrich it adding scale factor, target rotation, etc.
Example of creating specialized TrackerTargetState ``TrackerMILTargetState`` : ::
class TrackerMILTargetState : public TrackerTargetState
{
public:
TrackerMILTargetState( const Point2f& position, int targetWidth, int targetHeight, bool foreground, const Mat& features );
~TrackerMILTargetState();
...
private:
bool isTarget;
Mat targetFeatures;
...
};
Try it
......
To try your tracker you can use the demo at https://github.com/lenlen/opencv/blob/tracking_api/samples/cpp/tracker.cpp.
The first argument is the name of the tracker and the second is a video source.

@ -0,0 +1,343 @@
Common Interfaces of TrackerFeatureSet
======================================
.. highlight:: cpp
TrackerFeatureSet
-----------------
Class that manages the extraction and selection of features
[AAM]_ Feature Extraction and Feature Set Refinement (Feature Processing and Feature Selection). See table I and section III C
[AMVOT]_ Appearance modelling -> Visual representation (Table II, section 3.1 - 3.2)
.. ocv:class:: TrackerFeatureSet
TrackerFeatureSet class::
class CV_EXPORTS_W TrackerFeatureSet
{
public:
TrackerFeatureSet();
~TrackerFeatureSet();
void extraction( const std::vector<Mat>& images );
void selection();
void removeOutliers();
bool addTrackerFeature( String trackerFeatureType );
bool addTrackerFeature( Ptr<TrackerFeature>& feature );
const std::vector<std::pair<String, Ptr<TrackerFeature> > >& getTrackerFeature() const;
const std::vector<Mat>& getResponses() const;
};
TrackerFeatureSet is an aggregation of :ocv:class:`TrackerFeature`
.. seealso::
:ocv:class:`TrackerFeature`
TrackerFeatureSet::extraction
-----------------------------
Extract features from the images collection
.. ocv:function:: void TrackerFeatureSet::extraction( const std::vector<Mat>& images )
:param images: The input images
TrackerFeatureSet::selection
----------------------------
Identify most effective features for all feature types (optional)
.. ocv:function:: void TrackerFeatureSet::selection()
TrackerFeatureSet::removeOutliers
---------------------------------
Remove outliers for all feature types (optional)
.. ocv:function:: void TrackerFeatureSet::removeOutliers()
TrackerFeatureSet::addTrackerFeature
------------------------------------
Add TrackerFeature in the collection. Return true if TrackerFeature is added, false otherwise
.. ocv:function:: bool TrackerFeatureSet::addTrackerFeature( String trackerFeatureType )
:param trackerFeatureType: The TrackerFeature name
.. ocv:function:: bool TrackerFeatureSet::addTrackerFeature( Ptr<TrackerFeature>& feature )
:param feature: The TrackerFeature class
The modes available now:
* ``"HAAR"`` -- Haar Feature-based
The modes available soon:
* ``"HOG"`` -- Histogram of Oriented Gradients features
* ``"LBP"`` -- Local Binary Pattern features
* ``"FEATURE2D"`` -- All types of Feature2D
Example ``TrackerFeatureSet::addTrackerFeature`` : ::
//sample usage:
Ptr<TrackerFeature> trackerFeature = new TrackerFeatureHAAR( HAARparameters );
featureSet->addTrackerFeature( trackerFeature );
//or add CSC sampler with default parameters
//featureSet->addTrackerFeature( "HAAR" );
.. note:: If you use the second method, you must initialize the TrackerFeature
TrackerFeatureSet::getTrackerFeature
------------------------------------
Get the TrackerFeature collection (TrackerFeature name, TrackerFeature pointer)
.. ocv:function:: const std::vector<std::pair<String, Ptr<TrackerFeature> > >& TrackerFeatureSet::getTrackerFeature() const
TrackerFeatureSet::getResponses
-------------------------------
Get the responses
.. ocv:function:: const std::vector<Mat>& TrackerFeatureSet::getResponses() const
.. note:: Be sure to call extraction before getResponses
Example ``TrackerFeatureSet::getResponses`` : ::
//get the patches from sampler
std::vector<Mat> detectSamples = sampler->getSamples();
if( detectSamples.empty() )
return false;
//features extraction
featureSet->extraction( detectSamples );
//get responses
std::vector<Mat> response = featureSet->getResponses();
TrackerFeature
--------------
Abstract base class for TrackerFeature that represents the feature.
.. ocv:class:: TrackerFeature
TrackerFeature class::
class CV_EXPORTS_W TrackerFeature
{
public:
virtual ~TrackerFeature();
static Ptr<TrackerFeature> create( const String& trackerFeatureType );
void compute( const std::vector<Mat>& images, Mat& response );
virtual void selection( Mat& response, int npoints ) = 0;
String getClassName() const;
};
TrackerFeature::create
----------------------
Create TrackerFeature by tracker feature type
.. ocv:function:: static Ptr<TrackerFeature> TrackerFeature::create( const String& trackerFeatureType )
:param trackerFeatureType: The TrackerFeature name
The modes available now:
* ``"HAAR"`` -- Haar Feature-based
The modes available soon:
* ``"HOG"`` -- Histogram of Oriented Gradients features
* ``"LBP"`` -- Local Binary Pattern features
* ``"FEATURE2D"`` -- All types of Feature2D
TrackerFeature::compute
-----------------------
Compute the features in the images collection
.. ocv:function:: void TrackerFeature::compute( const std::vector<Mat>& images, Mat& response )
:param images: The images
:param response: The output response
TrackerFeature::selection
-------------------------
Identify most effective features
.. ocv:function:: void TrackerFeature::selection( Mat& response, int npoints )
:param response: Collection of response for the specific TrackerFeature
:param npoints: Max number of features
.. note:: This method modifies the response parameter
TrackerFeature::getClassName
----------------------------
Get the name of the specific TrackerFeature
.. ocv:function:: String TrackerFeature::getClassName() const
Specialized TrackerFeature
==========================
In [AAM]_ table I and section III C are described the most known features type. At moment only :ocv:class:`TrackerFeatureHAAR` is implemented.
TrackerFeatureHAAR : TrackerFeature
-----------------------------------
TrackerFeature based on HAAR features, used by TrackerMIL and many others algorithms
.. ocv:class:: TrackerFeatureHAAR
TrackerFeatureHAAR class::
class CV_EXPORTS_W TrackerFeatureHAAR : TrackerFeature
{
public:
TrackerFeatureHAAR( const TrackerFeatureHAAR::Params &parameters = TrackerFeatureHAAR::Params() );
~TrackerFeatureHAAR();
void selection( Mat& response, int npoints );
bool extractSelected( const std::vector<int> selFeatures, const std::vector<Mat>& images, Mat& response );
std::vector<std::pair<float, float> >& getMeanSigmaPairs();
bool swapFeature( int source, int target );
bool swapFeature( int id, CvHaarEvaluator::FeatureHaar& feature );
CvHaarEvaluator::FeatureHaar& getFeatureAt( int id );
};
.. note:: HAAR features implementation is copied from apps/traincascade and modified according to MIL implementation
TrackerFeatureHAAR::Params
--------------------------
.. ocv:struct:: TrackerFeatureHAAR::Params
List of TrackerFeatureHAAR parameters::
struct CV_EXPORTS Params
{
Params();
int numFeatures; // # of rects
Size rectSize; // rect size
bool isIntegral; // true if input images are integral, false otherwise
};
TrackerFeatureHAAR::TrackerFeatureHAAR
--------------------------------------
Constructor
.. ocv:function:: TrackerFeatureHAAR::TrackerFeatureHAAR( const TrackerFeatureHAAR::Params &parameters = TrackerFeatureHAAR::Params() )
:param parameters: TrackerFeatureHAAR parameters :ocv:struct:`TrackerFeatureHAAR::Params`
TrackerFeatureHAAR::selection
-----------------------------
Identify most effective features
.. ocv:function:: void TrackerFeatureHAAR::selection( Mat& response, int npoints )
:param response: Collection of response for the specific TrackerFeature
:param npoints: Max number of features
.. note:: This method modifies the response parameter
TrackerFeatureHAAR::extractSelected
-----------------------------------
Compute the features only for the selected indices in the images collection
.. ocv:function:: bool TrackerFeatureHAAR::extractSelected( const std::vector<int> selFeatures, const std::vector<Mat>& images, Mat& response )
:param selFeatures: indices of selected features
:param images: The images
:param response: Collection of response for the specific TrackerFeature
TrackerFeatureHAAR::getMeanSigmaPairs
-------------------------------------
Get the list of mean/sigma. Return the list of mean/sigma
.. ocv:function:: std::vector<std::pair<float, float> >& TrackerFeatureHAAR::getMeanSigmaPairs()
TrackerFeatureHAAR::swapFeature
-------------------------------
Swap the feature in position source with the feature in position target
.. ocv:function:: bool TrackerFeatureHAAR::swapFeature( int source, int target )
:param source: The source position
:param target: The target position
Swap the feature in position id with the feature input
.. ocv:function:: bool TrackerFeatureHAAR::swapFeature( int id, CvHaarEvaluator::FeatureHaar& feature )
:param id: The position
:param feature: The feature
TrackerFeatureHAAR::getFeatureAt
--------------------------------
Get the feature in position id
.. ocv:function:: CvHaarEvaluator::FeatureHaar& TrackerFeatureHAAR::getFeatureAt( int id )
:param id: The position
TrackerFeatureHOG
-----------------
TODO To be implemented
TrackerFeatureLBP
-----------------
TODO To be implemented
TrackerFeatureFeature2d
-----------------------
TODO To be implemented

@ -0,0 +1,506 @@
Common Interfaces of TrackerModel
=================================
.. highlight:: cpp
ConfidenceMap
-------------
Represents the model of the target at frame :math:`k` (all states and scores)
[AAM]_ The set of the pair :math:`\langle \hat{x}^{i}_{k}, C^{i}_{k} \rangle`
.. c:type:: ConfidenceMap
ConfidenceMap::
typedef std::vector<std::pair<Ptr<TrackerTargetState>, float> > ConfidenceMap;
.. seealso::
:ocv:class:`TrackerTargetState`
Trajectory
----------
Represents the estimate states for all frames
[AAM]_ :math:`x_{k}` is the trajectory of the target up to time :math:`k`
.. c:type:: Trajectory
Trajectory::
typedef std::vector<Ptr<TrackerTargetState> > Trajectory;
.. seealso::
:ocv:class:`TrackerTargetState`
TrackerTargetState
------------------
Abstract base class for TrackerTargetState that represents a possible state of the target.
[AAM]_ :math:`\hat{x}^{i}_{k}` all the states candidates.
Inherits this class with your Target state
.. ocv:class:: TrackerTargetState
TrackerTargetState class::
class CV_EXPORTS_W TrackerTargetState
{
public:
virtual ~TrackerTargetState(){};
Point2f getTargetPosition() const;
void setTargetPosition( const Point2f& position );
int getTargetWidth() const;
void setTargetWidth( int width );
int getTargetHeight() const;
void setTargetHeight( int height );
};
In own implementation you can add scale variation, width, height, orientation, etc.
TrackerStateEstimator
---------------------
Abstract base class for TrackerStateEstimator that estimates the most likely target state.
[AAM]_ State estimator
[AMVOT]_ Statistical modeling (Fig. 3), Table III (generative) - IV (discriminative) - V (hybrid)
.. ocv:class:: TrackerStateEstimator
TrackerStateEstimator class::
class CV_EXPORTS_W TrackerStateEstimator
{
public:
virtual ~TrackerStateEstimator();
static Ptr<TrackerStateEstimator> create( const String& trackeStateEstimatorType );
Ptr<TrackerTargetState> estimate( const std::vector<ConfidenceMap>& confidenceMaps );
void update( std::vector<ConfidenceMap>& confidenceMaps );
String getClassName() const;
};
TrackerStateEstimator::create
-----------------------------
Create TrackerStateEstimator by tracker state estimator type
.. ocv:function:: static Ptr<TrackerStateEstimator> TrackerStateEstimator::create( const String& trackeStateEstimatorType )
:param trackeStateEstimatorType: The TrackerStateEstimator name
The modes available now:
* ``"BOOSTING"`` -- Boosting-based discriminative appearance models. See [AMVOT]_ section 4.4
The modes available soon:
* ``"SVM"`` -- SVM-based discriminative appearance models. See [AMVOT]_ section 4.5
TrackerStateEstimator::estimate
-------------------------------
Estimate the most likely target state, return the estimated state
.. ocv:function:: Ptr<TrackerTargetState> TrackerStateEstimator::estimate( const std::vector<ConfidenceMap>& confidenceMaps )
:param confidenceMaps: The overall appearance model as a list of :c:type:`ConfidenceMap`
TrackerStateEstimator::update
-----------------------------
Update the ConfidenceMap with the scores
.. ocv:function:: void TrackerStateEstimator::update( std::vector<ConfidenceMap>& confidenceMaps )
:param confidenceMaps: The overall appearance model as a list of :c:type:`ConfidenceMap`
TrackerStateEstimator::getClassName
-----------------------------------
Get the name of the specific TrackerStateEstimator
.. ocv:function:: String TrackerStateEstimator::getClassName() const
TrackerModel
------------
Abstract class that represents the model of the target. It must be instantiated by specialized tracker
[AAM]_ Ak
Inherits this with your TrackerModel
.. ocv:class:: TrackerModel
TrackerModel class::
class CV_EXPORTS_W TrackerModel
{
public:
TrackerModel();
virtual ~TrackerModel();
void modelEstimation( const std::vector<Mat>& responses );
void modelUpdate();
bool runStateEstimator();
bool setTrackerStateEstimator( Ptr<TrackerStateEstimator> trackerStateEstimator );
void setLastTargetState( const Ptr<TrackerTargetState>& lastTargetState );
Ptr<TrackerTargetState> getLastTargetState() const;
const std::vector<ConfidenceMap>& getConfidenceMaps() const;
const ConfidenceMap& getLastConfidenceMap() const;
Ptr<TrackerStateEstimator> getTrackerStateEstimator() const;
};
TrackerModel::modelEstimation
-----------------------------
Estimate the most likely target location
[AAM]_ ME, Model Estimation table I
.. ocv:function:: void TrackerModel::modelEstimation( const std::vector<Mat>& responses )
:param responses: Features extracted from :ocv:class:`TrackerFeatureSet`
TrackerModel::modelUpdate
-------------------------
Update the model
[AAM]_ MU, Model Update table I
.. ocv:function:: void TrackerModel::modelUpdate()
TrackerModel::runStateEstimator
-------------------------------
Run the TrackerStateEstimator, return true if is possible to estimate a new state, false otherwise
.. ocv:function:: bool TrackerModel::runStateEstimator()
TrackerModel::setTrackerStateEstimator
--------------------------------------
Set TrackerEstimator, return true if the tracker state estimator is added, false otherwise
.. ocv:function:: bool TrackerModel::setTrackerStateEstimator( Ptr<TrackerStateEstimator> trackerStateEstimator )
:param trackerStateEstimator: The :ocv:class:`TrackerStateEstimator`
.. note:: You can add only one :ocv:class:`TrackerStateEstimator`
TrackerModel::setLastTargetState
--------------------------------
Set the current :ocv:class:`TrackerTargetState` in the :c:type:`Trajectory`
.. ocv:function:: void TrackerModel::setLastTargetState( const Ptr<TrackerTargetState>& lastTargetState )
:param lastTargetState: The current :ocv:class:`TrackerTargetState`
TrackerModel::getLastTargetState
--------------------------------
Get the last :ocv:class:`TrackerTargetState` from :c:type:`Trajectory`
.. ocv:function:: Ptr<TrackerTargetState> TrackerModel::getLastTargetState() const
TrackerModel::getConfidenceMaps
-------------------------------
Get the list of the :c:type:`ConfidenceMap`
.. ocv:function:: const std::vector<ConfidenceMap>& TrackerModel::getConfidenceMaps() const
TrackerModel::getLastConfidenceMap
----------------------------------
Get the last :c:type:`ConfidenceMap` for the current frame
.. ocv:function:: const ConfidenceMap& TrackerModel::getLastConfidenceMap() const
TrackerModel::getTrackerStateEstimator
--------------------------------------
Get the :ocv:class:`TrackerStateEstimator`
.. ocv:function:: Ptr<TrackerStateEstimator> TrackerModel::getTrackerStateEstimator() const
Specialized TrackerStateEstimator
=================================
In [AMVOT]_ Statistical modeling (Fig. 3), Table III (generative) - IV (discriminative) - V (hybrid) are described the most known statistical model.
At moment :ocv:class:`TrackerStateEstimatorMILBoosting` and :ocv:class:`TrackerStateEstimatorAdaBoosting` are implemented.
TrackerStateEstimatorMILBoosting : TrackerStateEstimator
--------------------------------------------------------
TrackerStateEstimator based on Boosting
.. ocv:class:: TrackerStateEstimatorMILBoosting
TrackerStateEstimatorMILBoosting class::
class CV_EXPORTS_W TrackerStateEstimatorMILBoosting : public TrackerStateEstimator
{
public:
class TrackerMILTargetState : public TrackerTargetState
{
...
};
TrackerStateEstimatorMILBoosting( int nFeatures = 250 );
~TrackerStateEstimatorMILBoosting();
void setCurrentConfidenceMap( ConfidenceMap& confidenceMap );
};
TrackerMILTargetState : TrackerTargetState
------------------------------------------
Implementation of the target state for TrackerMILTargetState
.. ocv:class:: TrackerMILTargetState
TrackerMILTargetState class::
class TrackerMILTargetState : public TrackerTargetState
{
public:
TrackerMILTargetState( const Point2f& position, int targetWidth, int targetHeight, bool foreground, const Mat& features );
~TrackerMILTargetState(){};
void setTargetFg( bool foreground );
void setFeatures( const Mat& features );
bool isTargetFg() const;
Mat getFeatures() const;
};
TrackerStateEstimatorMILBoosting::TrackerMILTargetState::setTargetFg
--------------------------------------------------------------------
Set label: true for target foreground, false for background
.. ocv:function:: void TrackerStateEstimatorMILBoosting::TrackerMILTargetState::setTargetFg( bool foreground )
:param foreground: Label for background/foreground
TrackerStateEstimatorMILBoosting::TrackerMILTargetState::setFeatures
--------------------------------------------------------------------
Set the features extracted from :ocv:class:`TrackerFeatureSet`
.. ocv:function:: void TrackerStateEstimatorMILBoosting::TrackerMILTargetState::setFeatures( const Mat& features )
:param features: The features extracted
TrackerStateEstimatorMILBoosting::TrackerMILTargetState::isTargetFg
-------------------------------------------------------------------
Get the label. Return true for target foreground, false for background
.. ocv:function:: bool TrackerStateEstimatorMILBoosting::TrackerMILTargetState::isTargetFg() const
TrackerStateEstimatorMILBoosting::TrackerMILTargetState::getFeatures
--------------------------------------------------------------------
Get the features extracted
.. ocv:function:: void TrackerStateEstimatorMILBoosting::TrackerMILTargetState::setFeatures( const Mat& features )
TrackerStateEstimatorMILBoosting::TrackerStateEstimatorMILBoosting
------------------------------------------------------------------
Constructor
.. ocv:function:: TrackerStateEstimatorMILBoosting::TrackerStateEstimatorMILBoosting( int nFeatures=250 )
:param nFeatures: Number of features for each sample
TrackerStateEstimatorMILBoosting::setCurrentConfidenceMap
---------------------------------------------------------
Set the current confidenceMap
.. ocv:function:: void TrackerStateEstimatorMILBoosting::setCurrentConfidenceMap( ConfidenceMap& confidenceMap )
:param confidenceMap: The current :c:type:`ConfidenceMap`
TrackerStateEstimatorAdaBoosting : TrackerStateEstimator
--------------------------------------------------------
TrackerStateEstimatorAdaBoosting based on ADA-Boosting
.. ocv:class:: TrackerStateEstimatorAdaBoosting
TrackerStateEstimatorAdaBoosting class::
class CV_EXPORTS_W TrackerStateEstimatorAdaBoosting : public TrackerStateEstimator
{
public:
class TrackerAdaBoostingTargetState : public TrackerTargetState
{
...
};
TrackerStateEstimatorAdaBoosting( int numClassifer, int initIterations, int nFeatures, Size patchSize, const Rect& ROI, const std::vector<std::pair<float, float> >& meanSigma );
~TrackerStateEstimatorAdaBoosting();
Rect getSampleROI() const;
void setSampleROI( const Rect& ROI );
void setCurrentConfidenceMap( ConfidenceMap& confidenceMap );
std::vector<int> computeSelectedWeakClassifier();
std::vector<int> computeReplacedClassifier();
std::vector<int> computeSwappedClassifier();
void setMeanSigmaPair( const std::vector<std::pair<float, float> >& meanSigmaPair );
};
TrackerAdaBoostingTargetState : TrackerTargetState
--------------------------------------------------
Implementation of the target state for TrackerAdaBoostingTargetState
.. ocv:class:: TrackerAdaBoostingTargetState
TrackerAdaBoostingTargetState class::
class TrackerAdaBoostingTargetState : public TrackerTargetState
{
public:
TrackerAdaBoostingTargetState( const Point2f& position, int width, int height, bool foreground, const Mat& responses );
~TrackerAdaBoostingTargetState(){};
void setTargetResponses( const Mat& responses );
void setTargetFg( bool foreground );
Mat getTargetResponses() const;
bool isTargetFg() const;
};
TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::setTargetFg
----------------------------------------------------------------------------
Set label: true for target foreground, false for background
.. ocv:function:: void TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::setTargetFg( bool foreground )
:param foreground: Label for background/foreground
TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::setTargetResponses
-----------------------------------------------------------------------------------
Set the features extracted from :ocv:class:`TrackerFeatureSet`
.. ocv:function:: void TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::setTargetResponses( const Mat& responses )
:param responses: The features extracted
TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::isTargetFg
---------------------------------------------------------------------------
Get the label. Return true for target foreground, false for background
.. ocv:function:: bool TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::isTargetFg() const
TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::getTargetResponses
-----------------------------------------------------------------------------------
Get the features extracted
.. ocv:function:: Mat TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::getTargetResponses()
TrackerStateEstimatorAdaBoosting::TrackerStateEstimatorAdaBoosting
------------------------------------------------------------------
Constructor
.. ocv:function:: TrackerStateEstimatorAdaBoosting::TrackerStateEstimatorAdaBoosting( int numClassifer, int initIterations, int nFeatures, Size patchSize, const Rect& ROI, const std::vector<std::pair<float, float> >& meanSigma )
:param numClassifer: Number of base classifiers
:param initIterations: Number of iterations in the initialization
:param nFeatures: Number of features/weak classifiers
:param patchSize: tracking rect
:param ROI: initial ROI
:param meanSigma: pairs of mean/sigma
TrackerStateEstimatorAdaBoosting::setCurrentConfidenceMap
---------------------------------------------------------
Set the current confidenceMap
.. ocv:function:: void TrackerStateEstimatorAdaBoosting::setCurrentConfidenceMap( ConfidenceMap& confidenceMap )
:param confidenceMap: The current :c:type:`ConfidenceMap`
TrackerStateEstimatorAdaBoosting::getSampleROI
----------------------------------------------
Get the sampling ROI
.. ocv:function:: Rect TrackerStateEstimatorAdaBoosting::getSampleROI() const
TrackerStateEstimatorAdaBoosting::setSampleROI
----------------------------------------------
Set the sampling ROI
.. ocv:function:: void TrackerStateEstimatorAdaBoosting::setSampleROI( const Rect& ROI )
:param ROI: the sampling ROI
TrackerStateEstimatorAdaBoosting::computeSelectedWeakClassifier
---------------------------------------------------------------
Get the list of the selected weak classifiers for the classification step
.. ocv:function:: std::vector<int> TrackerStateEstimatorAdaBoosting::computeSelectedWeakClassifier()
TrackerStateEstimatorAdaBoosting::computeReplacedClassifier
-----------------------------------------------------------
Get the list of the weak classifiers that should be replaced
.. ocv:function:: std::vector<int> TrackerStateEstimatorAdaBoosting::computeReplacedClassifier()
TrackerStateEstimatorAdaBoosting::computeSwappedClassifier
----------------------------------------------------------
Get the list of the weak classifiers that replace those to be replaced
.. ocv:function:: std::vector<int> TrackerStateEstimatorAdaBoosting::computeSwappedClassifier()
TrackerStateEstimatorAdaBoosting::setMeanSigmaPair
--------------------------------------------------
Set the mean/sigma to instantiate possibly new classifiers
.. ocv:function:: void TrackerStateEstimatorAdaBoosting::setMeanSigmaPair( const std::vector<std::pair<float, float> >& meanSigmaPair )
:param meanSigmaPair: the mean/sigma pairs

@ -0,0 +1,293 @@
Common Interfaces of TrackerSampler
===================================
.. highlight:: cpp
TrackerSampler
--------------
Class that manages the sampler in order to select regions for the update the model of the tracker
[AAM]_ Sampling e Labeling. See table I and section III B
.. ocv:class:: TrackerSampler
TrackerSampler class::
class CV_EXPORTS_W TrackerSampler
{
public:
TrackerSampler();
~TrackerSampler();
void sampling( const Mat& image, Rect boundingBox );
const std::vector<std::pair<String, Ptr<TrackerSamplerAlgorithm> > >& getSamplers() const;
const std::vector<Mat>& getSamples() const;
bool addTrackerSamplerAlgorithm( String trackerSamplerAlgorithmType );
bool addTrackerSamplerAlgorithm( Ptr<TrackerSamplerAlgorithm>& sampler );
};
TrackerSampler is an aggregation of :ocv:class:`TrackerSamplerAlgorithm`
.. seealso::
:ocv:class:`TrackerSamplerAlgorithm`
TrackerSampler::sampling
------------------------
Computes the regions starting from a position in an image
.. ocv:function:: void TrackerSampler::sampling( const Mat& image, Rect boundingBox )
:param image: The current frame
:param boundingBox: The bounding box from which regions can be calculated
TrackerSampler::getSamplers
---------------------------
Return the collection of the :ocv:class:`TrackerSamplerAlgorithm`
.. ocv:function:: const std::vector<std::pair<String, Ptr<TrackerSamplerAlgorithm> > >& TrackerSampler::getSamplers() const
TrackerSampler::getSamples
--------------------------
Return the samples from all :ocv:class:`TrackerSamplerAlgorithm`, [AAM]_ Fig. 1 variable Sk
.. ocv:function:: const std::vector<Mat>& TrackerSampler::getSamples() const
TrackerSampler::addTrackerSamplerAlgorithm
------------------------------------------
Add TrackerSamplerAlgorithm in the collection.
Return true if sampler is added, false otherwise
.. ocv:function:: bool TrackerSampler::addTrackerSamplerAlgorithm( String trackerSamplerAlgorithmType )
:param trackerSamplerAlgorithmType: The TrackerSamplerAlgorithm name
.. ocv:function:: bool TrackerSampler::addTrackerSamplerAlgorithm( Ptr<TrackerSamplerAlgorithm>& sampler )
:param sampler: The TrackerSamplerAlgorithm class
The modes available now:
* ``"CSC"`` -- Current State Center
* ``"CS"`` -- Current State
Example ``TrackerSamplerAlgorithm::addTrackerSamplerAlgorithm`` : ::
//sample usage:
TrackerSamplerCSC::Params CSCparameters;
Ptr<TrackerSamplerAlgorithm> CSCSampler = new TrackerSamplerCSC( CSCparameters );
if( !sampler->addTrackerSamplerAlgorithm( CSCSampler ) )
return false;
//or add CSC sampler with default parameters
//sampler->addTrackerSamplerAlgorithm( "CSC" );
.. note:: If you use the second method, you must initialize the TrackerSamplerAlgorithm
TrackerSamplerAlgorithm
-----------------------
Abstract base class for TrackerSamplerAlgorithm that represents the algorithm for the specific sampler.
.. ocv:class:: TrackerSamplerAlgorithm
TrackerSamplerAlgorithm class::
class CV_EXPORTS_W TrackerSamplerAlgorithm
{
public:
virtual ~TrackerSamplerAlgorithm();
static Ptr<TrackerSamplerAlgorithm> create( const String& trackerSamplerType );
bool sampling( const Mat& image, Rect boundingBox, std::vector<Mat>& sample );
String getClassName() const;
};
TrackerSamplerAlgorithm::create
-------------------------------
Create TrackerSamplerAlgorithm by tracker sampler type.
.. ocv:function:: static Ptr<TrackerSamplerAlgorithm> TrackerSamplerAlgorithm::create( const String& trackerSamplerType )
:param trackerSamplerType: The trackerSamplerType name
The modes available now:
* ``"CSC"`` -- Current State Center
* ``"CS"`` -- Current State
TrackerSamplerAlgorithm::sampling
---------------------------------
Computes the regions starting from a position in an image. Return true if samples are computed, false otherwise
.. ocv:function:: bool TrackerSamplerAlgorithm::sampling( const Mat& image, Rect boundingBox, std::vector<Mat>& sample )
:param image: The current frame
:param boundingBox: The bounding box from which regions can be calculated
:sample: The computed samples [AAM]_ Fig. 1 variable Sk
TrackerSamplerAlgorithm::getClassName
-------------------------------------
Get the name of the specific TrackerSamplerAlgorithm
.. ocv:function:: String TrackerSamplerAlgorithm::getClassName() const
Specialized TrackerSamplerAlgorithm
===================================
In [AAM]_ table I are described the most known sampling strategies. At moment :ocv:class:`TrackerSamplerCSC` and :ocv:class:`TrackerSamplerCS` are implemented.
TrackerSamplerCSC : TrackerSamplerAlgorithm
-------------------------------------------
TrackerSampler based on CSC (current state centered), used by MIL algorithm TrackerMIL
.. ocv:class:: TrackerSamplerCSC
TrackerSamplerCSC class::
class CV_EXPORTS_W TrackerSamplerCSC
{
public:
TrackerSamplerCSC( const TrackerSamplerCSC::Params &parameters = TrackerSamplerCSC::Params() );
void setMode( int samplingMode );
~TrackerSamplerCSC();
};
TrackerSamplerCSC::Params
-------------------------
.. ocv:struct:: TrackerSamplerCSC::Params
List of TrackerSamplerCSC parameters::
struct CV_EXPORTS Params
{
Params();
float initInRad; // radius for gathering positive instances during init
float trackInPosRad; // radius for gathering positive instances during tracking
float searchWinSize; // size of search window
int initMaxNegNum; // # negative samples to use during init
int trackMaxPosNum; // # positive samples to use during training
int trackMaxNegNum; // # negative samples to use during training
};
TrackerSamplerCSC::TrackerSamplerCSC
------------------------------------
Constructor
.. ocv:function:: TrackerSamplerCSC::TrackerSamplerCSC( const TrackerSamplerCSC::Params &parameters = TrackerSamplerCSC::Params() )
:param parameters: TrackerSamplerCSC parameters :ocv:struct:`TrackerSamplerCSC::Params`
TrackerSamplerCSC::setMode
--------------------------
Set the sampling mode of TrackerSamplerCSC
.. ocv:function:: void TrackerSamplerCSC::setMode( int samplingMode )
:param samplingMode: The sampling mode
The modes are:
* ``"MODE_INIT_POS = 1"`` -- for the positive sampling in initialization step
* ``"MODE_INIT_NEG = 2"`` -- for the negative sampling in initialization step
* ``"MODE_TRACK_POS = 3"`` -- for the positive sampling in update step
* ``"MODE_TRACK_NEG = 4"`` -- for the negative sampling in update step
* ``"MODE_DETECT = 5"`` -- for the sampling in detection step
TrackerSamplerCS : TrackerSamplerAlgorithm
-------------------------------------------
TrackerSampler based on CS (current state), used by algorithm TrackerBoosting
.. ocv:class:: TrackerSamplerCS
TrackerSamplerCS class::
class CV_EXPORTS_W TrackerSamplerCS
{
public:
TrackerSamplerCS( const TrackerSamplerCS::Params &parameters = TrackerSamplerCS::Params() );
void setMode( int samplingMode );
~TrackerSamplerCS();
};
TrackerSamplerCS::Params
-------------------------
.. ocv:struct:: TrackerSamplerCS::Params
List of TrackerSamplerCS parameters::
struct CV_EXPORTS Params
{
Params();
float overlap; //overlapping for the search windows
float searchFactor; //search region parameter
};
TrackerSamplerCS::TrackerSamplerCS
------------------------------------
Constructor
.. ocv:function:: TrackerSamplerCS::TrackerSamplerCS( const TrackerSamplerCS::Params &parameters = TrackerSamplerCS::Params() )
:param parameters: TrackerSamplerCS parameters :ocv:struct:`TrackerSamplerCS::Params`
TrackerSamplerCS::setMode
--------------------------
Set the sampling mode of TrackerSamplerCS
.. ocv:function:: void TrackerSamplerCS::setMode( int samplingMode )
:param samplingMode: The sampling mode
The modes are:
* ``"MODE_POSITIVE = 1"`` -- for the positive sampling
* ``"MODE_NEGATIVE = 2"`` -- for the negative sampling
* ``"MODE_CLASSIFY = 3"`` -- for the sampling in classification step

@ -0,0 +1,43 @@
@startuml
package "Tracker package" #DDDDDD {
class Algorithm {
}
class Tracker{
Ptr<TrackerFeatureSet> featureSet;
Ptr<TrackerSampler> sampler;
Ptr<TrackerModel> model;
---
+static Ptr<Tracker> create(const string& trackerType);
+bool init(const Mat& image, const Rect& boundingBox);
+bool update(const Mat& image, Rect& boundingBox);
}
class Tracker
note right: Tracker is the general interface for each specialized trackers
class TrackerMIL{
+Params
---
TrackerMIL(TrackerMIL::Params parameters);
+bool init(const Mat& image, const Rect& boundingBox);
+bool update(const Mat& image, Rect& boundingBox);
}
class TrackerBoosting{
+Params
---
TrackerBoosting(TrackerBoosting::Params parameters);
+bool init(const Mat& image, const Rect& boundingBox);
+bool update(const Mat& image, Rect& boundingBox);
}
Algorithm <|-- Tracker : virtual inheritance
Tracker <|-- TrackerMIL
Tracker <|-- TrackerBoosting
note "Single instance of the Tracker" as N1
TrackerBoosting .. N1
TrackerMIL .. N1
}
@enduml

@ -0,0 +1,55 @@
@startuml
package "TrackerFeature package" #DDDDDD {
class TrackerFeatureSet{
-vector<pair<string, Ptr<TrackerFeature> > > features
-vector<Mat> responses
...
TrackerFeatureSet();
~TrackerFeatureSet();
--
+extraction(const std::vector<Mat>& images);
+selection();
+removeOutliers();
+vector<Mat> response getResponses();
+vector<pair<string TrackerFeatureType, Ptr<TrackerFeature> > > getTrackerFeatures();
+bool addTrackerFeature(string trackerFeatureType);
+bool addTrackerFeature(Ptr<TrackerFeature>& feature);
-clearResponses();
}
class TrackerFeature <<virtual>>{
static Ptr<TrackerFeature> = create(const string& trackerFeatureType);
compute(const std::vector<Mat>& images, Mat& response);
selection(Mat& response, int npoints);
}
note bottom: Can be specialized as in table II\nA tracker can use more types of features
class TrackerFeatureFeature2D{
-vector<Keypoints> keypoints
---
TrackerFeatureFeature2D(string detectorType, string descriptorType);
~TrackerFeatureFeature2D();
---
compute(const std::vector<Mat>& images, Mat& response);
selection( Mat& response, int npoints);
}
class TrackerFeatureHOG{
TrackerFeatureHOG();
~TrackerFeatureHOG();
---
compute(const std::vector<Mat>& images, Mat& response);
selection(Mat& response, int npoints);
}
TrackerFeatureSet *-- TrackerFeature
TrackerFeature <|-- TrackerFeatureHOG
TrackerFeature <|-- TrackerFeatureFeature2D
note "Per readability and simplicity in this diagram\n there are only two TrackerFeature but you\n can considering the implementation of the other TrackerFeature" as N1
TrackerFeatureHOG .. N1
TrackerFeatureFeature2D .. N1
}
@enduml

@ -0,0 +1,62 @@
@startuml
package "TrackerModel package" #DDDDDD {
class Typedef << (T,#FF7700) >>{
ConfidenceMap
Trajectory
}
class TrackerModel{
-vector<ConfidenceMap> confidenceMaps;
-Trajectory trajectory;
-Ptr<TrackerStateEstimator> stateEstimator;
...
TrackerModel();
~TrackerModel();
+bool setTrackerStateEstimator(Ptr<TrackerStateEstimator> trackerStateEstimator);
+Ptr<TrackerStateEstimator> getTrackerStateEstimator();
+void modelEstimation(const vector<Mat>& responses);
+void modelUpdate();
+void setLastTargetState(const Ptr<TrackerTargetState> lastTargetState);
+void runStateEstimator();
+const vector<ConfidenceMap>& getConfidenceMaps();
+const ConfidenceMap& getLastConfidenceMap();
}
class TrackerTargetState <<virtual>>{
Point2f targetPosition;
---
Point2f getTargetPosition();
void setTargetPosition(Point2f position);
}
class TrackerTargetState
note bottom: Each TrackerStateEstimator can create own state
class TrackerStateEstimator <<virtual>>{
~TrackerStateEstimator();
static Ptr<TrackerStateEstimator> create(const String& trackeStateEstimatorType);
Ptr<TrackerTargetState> estimate(const vector<ConfidenceMap>& confidenceMaps)
void update(vector<ConfidenceMap>& confidenceMaps)
}
class TrackerStateEstimatorSVM{
TrackerStateEstimatorSVM()
~TrackerStateEstimatorSVM()
Ptr<TrackerTargetState> estimate(const vector<ConfidenceMap>& confidenceMaps)
void update(vector<ConfidenceMap>& confidenceMaps)
}
class TrackerStateEstimatorMILBoosting{
TrackerStateEstimatorMILBoosting()
~TrackerStateEstimatorMILBoosting()
Ptr<TrackerTargetState> estimate(const vector<ConfidenceMap>& confidenceMaps)
void update(vector<ConfidenceMap>& confidenceMaps)
}
TrackerModel -> TrackerStateEstimator: create
TrackerModel *-- TrackerTargetState
TrackerStateEstimator <|-- TrackerStateEstimatorMILBoosting
TrackerStateEstimator <|-- TrackerStateEstimatorSVM
}
@enduml

@ -0,0 +1,44 @@
@startuml
package "TrackerSampler package" #DDDDDD {
class TrackerSampler{
-vector<pair<String, Ptr<TrackerSamplerAlgorithm> > > samplers
-vector<Mat> samples;
...
TrackerSampler();
~TrackerSampler();
+sampling(const Mat& image, Rect boundingBox);
+const vector<pair<String, Ptr<TrackerSamplerAlgorithm> > >& getSamplers();
+const vector<Mat>& getSamples();
+bool addTrackerSamplerAlgorithm(String trackerSamplerAlgorithmType);
+bool addTrackerSamplerAlgorithm(Ptr<TrackerSamplerAlgorithm>& sampler);
---
-void clearSamples();
}
class TrackerSamplerAlgorithm{
~TrackerSamplerAlgorithm();
+static Ptr<TrackerSamplerAlgorithm> create(const String& trackerSamplerType);
+bool sampling(const Mat& image, Rect boundingBox, vector<Mat>& sample);
}
note bottom: A tracker could sample the target\nor it could sample the target and the background
class TrackerSamplerCS{
TrackerSamplerCS();
~TrackerSamplerCS();
+bool sampling(const Mat& image, Rect boundingBox, vector<Mat>& sample);
}
class TrackerSamplerCSC{
TrackerSamplerCSC();
~TrackerSamplerCSC();
+bool sampling(const Mat& image, Rect boundingBox, vector<Mat>& sample);
}
TrackerSampler *-- TrackerSamplerAlgorithm
TrackerSamplerAlgorithm <|-- TrackerSamplerCS
TrackerSamplerAlgorithm <|-- TrackerSamplerCSC
}
@enduml

@ -0,0 +1,19 @@
@startuml
package "Tracker" #DDDDDD {
}
package "TrackerFeature" #DDDDDD {
}
package "TrackerSampler" #DDDDDD {
}
package "TrackerModel" #DDDDDD {
}
Tracker -> TrackerModel: create
Tracker -> TrackerSampler: create
Tracker -> TrackerFeature: create
@enduml

Binary file not shown.

After

Width:  |  Height:  |  Size: 32 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 59 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 56 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 47 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.5 KiB

@ -0,0 +1,121 @@
Tracker Algorithms
==================
.. highlight:: cpp
Two algorithms will be implemented soon, the first is MIL (Multiple Instance Learning) [MIL]_ and second is Online Boosting [OLB]_.
.. [MIL] B Babenko, M-H Yang, and S Belongie, Visual Tracking with Online Multiple Instance Learning, In CVPR, 2009
.. [OLB] H Grabner, M Grabner, and H Bischof, Real-time tracking via on-line boosting, In Proc. BMVC, volume 1, pages 47– 56, 2006
TrackerBoosting
---------------
This is a real-time object tracking based on a novel on-line version of the AdaBoost algorithm.
The classifier uses the surrounding background as negative examples in update step to avoid the drifting problem.
.. ocv:class:: TrackerBoosting
Implementation of TrackerBoosting from :ocv:class:`Tracker`::
class CV_EXPORTS_W TrackerBoosting : public Tracker
{
public:
TrackerBoosting( const TrackerBoosting::Params &parameters = TrackerBoosting::Params() );
virtual ~TrackerBoosting();
void read( const FileNode& fn );
void write( FileStorage& fs ) const;
};
TrackerMIL
----------
The MIL algorithm trains a classifier in an online manner to separate the object from the background. Multiple Instance Learning avoids the drift problem for a robust tracking.
Original code can be found here http://vision.ucsd.edu/~bbabenko/project_miltrack.shtml
.. ocv:class:: TrackerMIL
Implementation of TrackerMIL from :ocv:class:`Tracker`::
class CV_EXPORTS_W TrackerMIL : public Tracker
{
public:
TrackerMIL( const TrackerMIL::Params &parameters = TrackerMIL::Params() );
virtual ~TrackerMIL();
void read( const FileNode& fn );
void write( FileStorage& fs ) const;
};
TrackerMIL::Params
------------------
.. ocv:struct:: TrackerMIL::Params
List of MIL parameters::
struct CV_EXPORTS Params
{
Params();
//parameters for sampler
float samplerInitInRadius; // radius for gathering positive instances during init
int samplerInitMaxNegNum; // # negative samples to use during init
float samplerSearchWinSize; // size of search window
float samplerTrackInRadius; // radius for gathering positive instances during tracking
int samplerTrackMaxPosNum; // # positive samples to use during tracking
int samplerTrackMaxNegNum; // # negative samples to use during tracking
int featureSetNumFeatures; // # features
void read( const FileNode& fn );
void write( FileStorage& fs ) const;
};
TrackerMIL::TrackerMIL
----------------------
Constructor
.. ocv:function:: bool TrackerMIL::TrackerMIL( const TrackerMIL::Params &parameters = TrackerMIL::Params() )
:param parameters: MIL parameters :ocv:struct:`TrackerMIL::Params`
TrackerBoosting::Params
------------------
.. ocv:struct:: TrackerBoosting::Params
List of BOOSTING parameters::
struct CV_EXPORTS Params
{
Params();
int numClassifiers; //the number of classifiers to use in a OnlineBoosting algorithm
float samplerOverlap; //search region parameters to use in a OnlineBoosting algorithm
float samplerSearchFactor; // search region parameters to use in a OnlineBoosting algorithm
int iterationInit; //the initial iterations
int featureSetNumFeatures; // #features
void read( const FileNode& fn );
void write( FileStorage& fs ) const;
};
TrackerBoosting::TrackerBoosting
----------------------
Constructor
.. ocv:function:: bool TrackerBoosting::TrackerBoosting( const TrackerBoosting::Params &parameters = TrackerBoosting::Params() )
:param parameters: BOOSTING parameters :ocv:struct:`TrackerBoosting::Params`

@ -0,0 +1,86 @@
Tracking API
============
.. highlight:: cpp
Long-term optical tracking API
------------------------------
Long-term optical tracking is one of most important issue for many computer vision applications in real world scenario.
The development in this area is very fragmented and this API is an unique interface useful for plug several algorithms and compare them.
This work is partially based on [AAM]_ and [AMVOT]_.
This algorithms start from a bounding box of the target and with their internal representation they avoid the drift during the tracking.
These long-term trackers are able to evaluate online the quality of the location of the target in the new frame, without ground truth.
There are three main components: the TrackerSampler, the TrackerFeatureSet and the TrackerModel. The first component is the object that computes the patches over the frame based on the last target location.
The TrackerFeatureSet is the class that manages the Features, is possible plug many kind of these (HAAR, HOG, LBP, Feature2D, etc).
The last component is the internal representation of the target, it is the appearence model. It stores all state candidates and compute the trajectory (the most likely target states). The class TrackerTargetState represents a possible state of the target.
The TrackerSampler and the TrackerFeatureSet are the visual representation of the target, instead the TrackerModel is the statistical model.
A recent benchmark between these algorithms can be found in [OOT]_.
UML design:
-----------
**General diagram**
.. image:: pics/package.png
:width: 50%
:alt: General diagram
:align: center
**Tracker diagram**
.. image:: pics/Tracker.png
:width: 80%
:alt: Tracker diagram
:align: center
**TrackerSampler diagram**
.. image:: pics/TrackerSampler.png
:width: 100%
:alt: TrackerSampler diagram
:align: center
**TrackerFeatureSet diagram**
.. image:: pics/TrackerFeature.png
:width: 100%
:alt: TrackerFeatureSet diagram
:align: center
**TrackerModel diagram**
.. image:: pics/TrackerModel.png
:width: 100%
:alt: TrackerModel diagram
:align: center
To see how API works, try tracker demo:
https://github.com/lenlen/opencv/blob/tracking_api/samples/cpp/tracker.cpp
.. note:: This Tracking API has been designed with PlantUML. If you modify this API please change UML files under modules/tracking/misc/
The following reference was used in the API
.. [AAM] S Salti, A Cavallaro, L Di Stefano, Adaptive Appearance Modeling for Video Tracking: Survey and Evaluation, IEEE Transactions on Image Processing, Vol. 21, Issue 10, October 2012, pp. 4334-4348
.. [AMVOT] X Li, W Hu, C Shen, Z Zhang, A Dick, A van den Hengel, A Survey of Appearance Models in Visual Object Tracking, ACM Transactions on Intelligent Systems and Technology (TIST), 2013
.. [OOT] Yi Wu and Jongwoo Lim and Ming-Hsuan Yang, Online Object Tracking: A Benchmark, The IEEE Conference on Computer Vision and Pattern Recognition (CVPR), 2013
Tracker classes:
----------------
.. toctree::
:maxdepth: 2
tracker_algorithms
common_interfaces_tracker
common_interfaces_tracker_sampler
common_interfaces_tracker_feature_set
common_interfaces_tracker_model

@ -0,0 +1,52 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#ifndef __OPENCV_TRACKING_HPP__
#define __OPENCV_TRACKING_HPP__
#include "opencv2/tracking/tracker.hpp"
namespace cv
{
CV_EXPORTS bool initModule_tracking(void);
}
#endif //__OPENCV_TRACKING_HPP__

@ -0,0 +1,410 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#ifndef __OPENCV_FEATURE_HPP__
#define __OPENCV_FEATURE_HPP__
#include "opencv2/core.hpp"
#include "opencv2/imgproc.hpp"
#include <iostream>
#include <string>
#include <time.h>
/*
* TODO This implementation is based on apps/traincascade/
* TODO Changed CvHaarEvaluator based on ADABOOSTING implementation (Grabner et al.)
*/
namespace cv
{
#define FEATURES "features"
#define CC_FEATURES FEATURES
#define CC_FEATURE_PARAMS "featureParams"
#define CC_MAX_CAT_COUNT "maxCatCount"
#define CC_FEATURE_SIZE "featSize"
#define CC_NUM_FEATURES "numFeat"
#define CC_ISINTEGRAL "isIntegral"
#define CC_RECTS "rects"
#define CC_TILTED "tilted"
#define CC_RECT "rect"
#define LBPF_NAME "lbpFeatureParams"
#define HOGF_NAME "HOGFeatureParams"
#define HFP_NAME "haarFeatureParams"
#define CV_HAAR_FEATURE_MAX 3
#define N_BINS 9
#define N_CELLS 4
#define CV_SUM_OFFSETS( p0, p1, p2, p3, rect, step ) \
/* (x, y) */ \
(p0) = (rect).x + (step) * (rect).y; \
/* (x + w, y) */ \
(p1) = (rect).x + (rect).width + (step) * (rect).y; \
/* (x + w, y) */ \
(p2) = (rect).x + (step) * ((rect).y + (rect).height); \
/* (x + w, y + h) */ \
(p3) = (rect).x + (rect).width + (step) * ((rect).y + (rect).height);
#define CV_TILTED_OFFSETS( p0, p1, p2, p3, rect, step ) \
/* (x, y) */ \
(p0) = (rect).x + (step) * (rect).y; \
/* (x - h, y + h) */ \
(p1) = (rect).x - (rect).height + (step) * ((rect).y + (rect).height);\
/* (x + w, y + w) */ \
(p2) = (rect).x + (rect).width + (step) * ((rect).y + (rect).width); \
/* (x + w - h, y + w + h) */ \
(p3) = (rect).x + (rect).width - (rect).height \
+ (step) * ((rect).y + (rect).width + (rect).height);
float calcNormFactor( const Mat& sum, const Mat& sqSum );
template<class Feature>
void _writeFeatures( const std::vector<Feature> features, FileStorage &fs, const Mat& featureMap )
{
fs << FEATURES << "[";
const Mat_<int>& featureMap_ = (const Mat_<int>&) featureMap;
for ( int fi = 0; fi < featureMap.cols; fi++ )
if( featureMap_( 0, fi ) >= 0 )
{
fs << "{";
features[fi].write( fs );
fs << "}";
}
fs << "]";
}
class CvParams
{
public:
CvParams();
virtual ~CvParams()
{
}
// from|to file
virtual void write( FileStorage &fs ) const = 0;
virtual bool read( const FileNode &node ) = 0;
// from|to screen
virtual void printDefaults() const;
virtual void printAttrs() const;
virtual bool scanAttr( const std::string prmName, const std::string val );
std::string name;
};
class CvFeatureParams : public CvParams
{
public:
enum
{
HAAR = 0,
LBP = 1,
HOG = 2
};
CvFeatureParams();
virtual void init( const CvFeatureParams& fp );
virtual void write( FileStorage &fs ) const;
virtual bool read( const FileNode &node );
static Ptr<CvFeatureParams> create( int featureType );
int maxCatCount; // 0 in case of numerical features
int featSize; // 1 in case of simple features (HAAR, LBP) and N_BINS(9)*N_CELLS(4) in case of Dalal's HOG features
int numFeatures;
};
class CvFeatureEvaluator
{
public:
virtual ~CvFeatureEvaluator()
{
}
virtual void init( const CvFeatureParams *_featureParams, int _maxSampleCount, Size _winSize );
virtual void setImage( const Mat& img, uchar clsLabel, int idx );
virtual void writeFeatures( FileStorage &fs, const Mat& featureMap ) const = 0;
virtual float operator()( int featureIdx, int sampleIdx ) = 0;
static Ptr<CvFeatureEvaluator> create( int type );
int getNumFeatures() const
{
return numFeatures;
}
int getMaxCatCount() const
{
return featureParams->maxCatCount;
}
int getFeatureSize() const
{
return featureParams->featSize;
}
const Mat& getCls() const
{
return cls;
}
float getCls( int si ) const
{
return cls.at<float>( si, 0 );
}
protected:
virtual void generateFeatures() = 0;
int npos, nneg;
int numFeatures;
Size winSize;
CvFeatureParams *featureParams;
Mat cls;
};
class CvHaarFeatureParams : public CvFeatureParams
{
public:
CvHaarFeatureParams();
virtual void init( const CvFeatureParams& fp );
virtual void write( FileStorage &fs ) const;
virtual bool read( const FileNode &node );
virtual void printDefaults() const;
virtual void printAttrs() const;
virtual bool scanAttr( const std::string prm, const std::string val );
bool isIntegral;
};
class CvHaarEvaluator : public CvFeatureEvaluator
{
public:
class FeatureHaar
{
public:
FeatureHaar( Size patchSize );
bool eval( const Mat& image, Rect ROI, float* result ) const;
int getNumAreas();
const std::vector<float>& getWeights() const;
const std::vector<Rect>& getAreas() const;
void write( FileStorage ) const
{
}
;
float getInitMean() const;
float getInitSigma() const;
private:
int m_type;
int m_numAreas;
std::vector<float> m_weights;
float m_initMean;
float m_initSigma;
void generateRandomFeature( Size imageSize );
float getSum( const Mat& image, Rect imgROI ) const;
std::vector<Rect> m_areas; // areas within the patch over which to compute the feature
cv::Size m_initSize; // size of the patch used during training
cv::Size m_curSize; // size of the patches currently under investigation
float m_scaleFactorHeight; // scaling factor in vertical direction
float m_scaleFactorWidth; // scaling factor in horizontal direction
std::vector<Rect> m_scaleAreas; // areas after scaling
std::vector<float> m_scaleWeights; // weights after scaling
};
virtual void init( const CvFeatureParams *_featureParams, int _maxSampleCount, Size _winSize );
virtual void setImage( const Mat& img, uchar clsLabel = 0, int idx = 1 );
virtual float operator()( int featureIdx, int sampleIdx );
virtual void writeFeatures( FileStorage &fs, const Mat& featureMap ) const;
void writeFeature( FileStorage &fs ) const; // for old file format
const std::vector<CvHaarEvaluator::FeatureHaar>& getFeatures() const;
inline CvHaarEvaluator::FeatureHaar& getFeatures( int idx )
{
return features[idx];
}
void setWinSize( Size patchSize );
Size setWinSize() const;
virtual void generateFeatures();
/**
* TODO new method
* \brief Overload the original generateFeatures in order to limit the number of the features
* @param numFeatures Number of the features
*/
virtual void generateFeatures( int numFeatures );
protected:
bool isIntegral;
/* TODO Added from MIL implementation */
Mat _ii_img;
void compute_integral( const cv::Mat & img, std::vector<cv::Mat_<float> > & ii_imgs )
{
Mat ii_img;
integral( img, ii_img, CV_32F );
split( ii_img, ii_imgs );
}
std::vector<FeatureHaar> features;
Mat sum; /* sum images (each row represents image) */
};
struct CvHOGFeatureParams : public CvFeatureParams
{
CvHOGFeatureParams();
};
class CvHOGEvaluator : public CvFeatureEvaluator
{
public:
virtual ~CvHOGEvaluator()
{
}
virtual void init( const CvFeatureParams *_featureParams, int _maxSampleCount, Size _winSize );
virtual void setImage( const Mat& img, uchar clsLabel, int idx );
virtual float operator()( int varIdx, int sampleIdx );
virtual void writeFeatures( FileStorage &fs, const Mat& featureMap ) const;
protected:
virtual void generateFeatures();
virtual void integralHistogram( const Mat &img, std::vector<Mat> &histogram, Mat &norm, int nbins ) const;
class Feature
{
public:
Feature();
Feature( int offset, int x, int y, int cellW, int cellH );
float calc( const std::vector<Mat> &_hists, const Mat &_normSum, size_t y, int featComponent ) const;
void write( FileStorage &fs ) const;
void write( FileStorage &fs, int varIdx ) const;
Rect rect[N_CELLS]; //cells
struct
{
int p0, p1, p2, p3;
} fastRect[N_CELLS];
};
std::vector<Feature> features;
Mat normSum; //for nomalization calculation (L1 or L2)
std::vector<Mat> hist;
};
inline float CvHOGEvaluator::operator()( int varIdx, int sampleIdx )
{
int featureIdx = varIdx / ( N_BINS * N_CELLS );
int componentIdx = varIdx % ( N_BINS * N_CELLS );
//return features[featureIdx].calc( hist, sampleIdx, componentIdx);
return features[featureIdx].calc( hist, normSum, sampleIdx, componentIdx );
}
inline float CvHOGEvaluator::Feature::calc( const std::vector<Mat>& _hists, const Mat& _normSum, size_t y, int featComponent ) const
{
float normFactor;
float res;
int binIdx = featComponent % N_BINS;
int cellIdx = featComponent / N_BINS;
const float *phist = _hists[binIdx].ptr<float>( (int) y );
res = phist[fastRect[cellIdx].p0] - phist[fastRect[cellIdx].p1] - phist[fastRect[cellIdx].p2] + phist[fastRect[cellIdx].p3];
const float *pnormSum = _normSum.ptr<float>( (int) y );
normFactor = (float) ( pnormSum[fastRect[0].p0] - pnormSum[fastRect[1].p1] - pnormSum[fastRect[2].p2] + pnormSum[fastRect[3].p3] );
res = ( res > 0.001f ) ? ( res / ( normFactor + 0.001f ) ) : 0.f; //for cutting negative values, which apper due to floating precision
return res;
}
struct CvLBPFeatureParams : CvFeatureParams
{
CvLBPFeatureParams();
};
class CvLBPEvaluator : public CvFeatureEvaluator
{
public:
virtual ~CvLBPEvaluator()
{
}
virtual void init( const CvFeatureParams *_featureParams, int _maxSampleCount, Size _winSize );
virtual void setImage( const Mat& img, uchar clsLabel, int idx );
virtual float operator()( int featureIdx, int sampleIdx )
{
return (float) features[featureIdx].calc( sum, sampleIdx );
}
virtual void writeFeatures( FileStorage &fs, const Mat& featureMap ) const;
protected:
virtual void generateFeatures();
class Feature
{
public:
Feature();
Feature( int offset, int x, int y, int _block_w, int _block_h );
uchar calc( const Mat& _sum, size_t y ) const;
void write( FileStorage &fs ) const;
Rect rect;
int p[16];
};
std::vector<Feature> features;
Mat sum;
};
inline uchar CvLBPEvaluator::Feature::calc( const Mat &_sum, size_t y ) const
{
const int* psum = _sum.ptr<int>( (int) y );
int cval = psum[p[5]] - psum[p[6]] - psum[p[9]] + psum[p[10]];
return (uchar) ( ( psum[p[0]] - psum[p[1]] - psum[p[4]] + psum[p[5]] >= cval ? 128 : 0 ) | // 0
( psum[p[1]] - psum[p[2]] - psum[p[5]] + psum[p[6]] >= cval ? 64 : 0 ) | // 1
( psum[p[2]] - psum[p[3]] - psum[p[6]] + psum[p[7]] >= cval ? 32 : 0 ) | // 2
( psum[p[6]] - psum[p[7]] - psum[p[10]] + psum[p[11]] >= cval ? 16 : 0 ) | // 5
( psum[p[10]] - psum[p[11]] - psum[p[14]] + psum[p[15]] >= cval ? 8 : 0 ) | // 8
( psum[p[9]] - psum[p[10]] - psum[p[13]] + psum[p[14]] >= cval ? 4 : 0 ) | // 7
( psum[p[8]] - psum[p[9]] - psum[p[12]] + psum[p[13]] >= cval ? 2 : 0 ) | // 6
( psum[p[4]] - psum[p[5]] - psum[p[8]] + psum[p[9]] >= cval ? 1 : 0 ) ); // 3
}
} /* namespace cv */
#endif

@ -0,0 +1,282 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#ifndef __OPENCV_ONLINEBOOSTING_HPP__
#define __OPENCV_ONLINEBOOSTING_HPP__
#include "opencv2/core.hpp"
namespace cv
{
//TODO based on the original implementation
//http://vision.ucsd.edu/~bbabenko/project_miltrack.shtml
class BaseClassifier;
class WeakClassifierHaarFeature;
class EstimatedGaussDistribution;
class ClassifierThreshold;
class Detector;
class StrongClassifierDirectSelection
{
public:
StrongClassifierDirectSelection( int numBaseClf, int numWeakClf, Size patchSz, const Rect& sampleROI, bool useFeatureEx = false, int iterationInit =
0 );
virtual ~StrongClassifierDirectSelection();
void initBaseClassifier();
bool update( const Mat& image, int target, float importance = 1.0 );
float eval( const Mat& response );
std::vector<int> getSelectedWeakClassifier();
float classifySmooth( const std::vector<Mat>& images, const Rect& sampleROI, int& idx );
int getNumBaseClassifier();
Size getPatchSize() const;
Rect getROI() const;
bool getUseFeatureExchange() const;
int getReplacedClassifier() const;
void replaceWeakClassifier( int idx );
int getSwappedClassifier() const;
private:
//StrongClassifier
int numBaseClassifier;
int numAllWeakClassifier;
int numWeakClassifier;
int iterInit;
BaseClassifier** baseClassifier;
std::vector<float> alpha;
cv::Size patchSize;
bool useFeatureExchange;
//StrongClassifierDirectSelection
std::vector<bool> m_errorMask;
std::vector<float> m_errors;
std::vector<float> m_sumErrors;
Detector* detector;
Rect ROI;
int replacedClassifier;
int swappedClassifier;
};
class BaseClassifier
{
public:
BaseClassifier( int numWeakClassifier, int iterationInit );
BaseClassifier( int numWeakClassifier, int iterationInit, WeakClassifierHaarFeature** weakCls );
WeakClassifierHaarFeature** getReferenceWeakClassifier()
{
return weakClassifier;
}
;
void trainClassifier( const Mat& image, int target, float importance, std::vector<bool>& errorMask );
int selectBestClassifier( std::vector<bool>& errorMask, float importance, std::vector<float> & errors );
int computeReplaceWeakestClassifier( const std::vector<float> & errors );
void replaceClassifierStatistic( int sourceIndex, int targetIndex );
int getIdxOfNewWeakClassifier()
{
return m_idxOfNewWeakClassifier;
}
;
int eval( const Mat& image );
virtual ~BaseClassifier();
float getError( int curWeakClassifier );
void getErrors( float* errors );
int getSelectedClassifier() const;
void replaceWeakClassifier( int index );
protected:
void generateRandomClassifier();
WeakClassifierHaarFeature** weakClassifier;
bool m_referenceWeakClassifier;
int m_numWeakClassifier;
int m_selectedClassifier;
int m_idxOfNewWeakClassifier;
std::vector<float> m_wCorrect;
std::vector<float> m_wWrong;
int m_iterationInit;
};
class EstimatedGaussDistribution
{
public:
EstimatedGaussDistribution();
EstimatedGaussDistribution( float P_mean, float R_mean, float P_sigma, float R_sigma );
virtual ~EstimatedGaussDistribution();
void update( float value ); //, float timeConstant = -1.0);
float getMean();
float getSigma();
void setValues( float mean, float sigma );
private:
float m_mean;
float m_sigma;
float m_P_mean;
float m_P_sigma;
float m_R_mean;
float m_R_sigma;
};
class WeakClassifierHaarFeature
{
public:
WeakClassifierHaarFeature();
virtual ~WeakClassifierHaarFeature();
bool update( float value, int target );
int eval( float value );
private:
float sigma;
float mean;
ClassifierThreshold* m_classifier;
void getInitialDistribution( EstimatedGaussDistribution *distribution );
void generateRandomClassifier( EstimatedGaussDistribution* m_posSamples, EstimatedGaussDistribution* m_negSamples );
};
class Detector
{
public:
Detector( StrongClassifierDirectSelection* classifier );
virtual
~Detector( void );
void
classifySmooth( const std::vector<Mat>& image, float minMargin = 0 );
int
getNumDetections();
float
getConfidence( int patchIdx );
float
getConfidenceOfDetection( int detectionIdx );
float getConfidenceOfBestDetection()
{
return m_maxConfidence;
}
;
int
getPatchIdxOfBestDetection();
int
getPatchIdxOfDetection( int detectionIdx );
const std::vector<int> &
getIdxDetections() const
{
return m_idxDetections;
}
;
const std::vector<float> &
getConfidences() const
{
return m_confidences;
}
;
const cv::Mat &
getConfImageDisplay() const
{
return m_confImageDisplay;
}
private:
void
prepareConfidencesMemory( int numPatches );
void
prepareDetectionsMemory( int numDetections );
StrongClassifierDirectSelection* m_classifier;
std::vector<float> m_confidences;
int m_sizeConfidences;
int m_numDetections;
std::vector<int> m_idxDetections;
int m_sizeDetections;
int m_idxBestDetection;
float m_maxConfidence;
cv::Mat_<float> m_confMatrix;
cv::Mat_<float> m_confMatrixSmooth;
cv::Mat_<unsigned char> m_confImageDisplay;
};
class ClassifierThreshold
{
public:
ClassifierThreshold( EstimatedGaussDistribution* posSamples, EstimatedGaussDistribution* negSamples );
virtual ~ClassifierThreshold();
void update( float value, int target );
int eval( float value );
void* getDistribution( int target );
private:
EstimatedGaussDistribution* m_posSamples;
EstimatedGaussDistribution* m_negSamples;
float m_threshold;
int m_parity;
};
} /* namespace cv */
#endif

@ -0,0 +1,114 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#ifndef __OPENCV_ONLINEMIL_HPP__
#define __OPENCV_ONLINEMIL_HPP__
#include "opencv2/core.hpp"
#include <limits>
namespace cv
{
//TODO based on the original implementation
//http://vision.ucsd.edu/~bbabenko/project_miltrack.shtml
#define sign(s) ((s > 0 ) ? 1 : ((s<0) ? -1 : 0))
class ClfOnlineStump;
class ClfMilBoost
{
public:
struct CV_EXPORTS Params
{
Params();
int _numSel;
int _numFeat;
float _lRate;
};
ClfMilBoost();
~ClfMilBoost();
void init( const ClfMilBoost::Params &parameters = ClfMilBoost::Params() );
void update( const Mat& posx, const Mat& negx );
std::vector<float> classify( const Mat& x, bool logR = true );
inline float sigmoid( float x )
{
return 1.0f / ( 1.0f + exp( -x ) );
}
private:
uint _numsamples;
ClfMilBoost::Params _myParams;
std::vector<int> _selectors;
std::vector<ClfOnlineStump*> _weakclf;
uint _counter;
};
class ClfOnlineStump
{
public:
float _mu0, _mu1, _sig0, _sig1;
float _q;
int _s;
float _log_n1, _log_n0;
float _e1, _e0;
float _lRate;
ClfOnlineStump();
ClfOnlineStump( int ind );
void init();
void update( const Mat& posx, const Mat& negx, const cv::Mat_<float> & posw = cv::Mat_<float>(), const cv::Mat_<float> & negw = cv::Mat_<float>() );
bool classify( const Mat& x, int i );
float classifyF( const Mat& x, int i );
std::vector<float> classifySetF( const Mat& x );
private:
bool _trained;
int _ind;
};
} /* namespace cv */
#endif

File diff suppressed because it is too large Load Diff

@ -0,0 +1,46 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#ifdef __OPENCV_BUILD
#error this is a compatibility header which should not be used inside the OpenCV library
#endif
#include "opencv2/tracking.hpp"

@ -0,0 +1,3 @@
#include "perf_precomp.hpp"
CV_PERF_TEST_MAIN(tracking)

@ -0,0 +1,21 @@
#ifdef __GNUC__
# pragma GCC diagnostic ignored "-Wmissing-declarations"
# if defined __clang__ || defined __APPLE__
# pragma GCC diagnostic ignored "-Wmissing-prototypes"
# pragma GCC diagnostic ignored "-Wextra"
# endif
#endif
#ifndef __OPENCV_TRACKING_PRECOMP_HPP__
#define __OPENCV_TRACKING_PRECOMP_HPP__
#include "opencv2/ts.hpp"
#include <opencv2/imgproc.hpp>
#include <opencv2/tracking.hpp>
#include <opencv2/highgui.hpp>
#ifdef GTEST_CREATE_SHARED_LIBRARY
#error no modules except ts should have GTEST_CREATE_SHARED_LIBRARY defined
#endif
#endif

@ -0,0 +1,46 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "perf_precomp.hpp"
using namespace std;
using namespace cv;
using namespace perf;

@ -0,0 +1,148 @@
#include <opencv2/core/utility.hpp>
#include <opencv2/tracking.hpp>
#include <opencv2/highgui.hpp>
#include <iostream>
using namespace std;
using namespace cv;
static Mat image;
static Rect boundingBox;
static bool paused;
static bool selectObject = false;
static bool startSelection = false;
static const char* keys =
{ "{@tracker_algorithm | | tracker algorithm }"
"{@video_name | | video name }" };
static void help()
{
cout << "\nThis example shows the functionality of \"Long-term optical tracking API\""
"-- pause video [p] and draw a bounding box around the target to start the tracker\n"
"Call:\n"
"./tracker <tracker_algorithm> <video_name>\n"
<< endl;
cout << "\n\nHot keys: \n"
"\tq - quit the program\n"
"\tp - pause video\n";
}
static void onMouse( int event, int x, int y, int, void* )
{
if( !selectObject )
{
switch ( event )
{
case EVENT_LBUTTONDOWN:
//set origin of the bounding box
startSelection = true;
boundingBox.x = x;
boundingBox.y = y;
break;
case EVENT_LBUTTONUP:
//sei with and height of the bounding box
boundingBox.width = std::abs( x - boundingBox.x );
boundingBox.height = std::abs( y - boundingBox.y );
paused = false;
selectObject = true;
break;
case EVENT_MOUSEMOVE:
if( startSelection && !selectObject )
{
//draw the bounding box
Mat currentFrame;
image.copyTo( currentFrame );
rectangle( currentFrame, Point( boundingBox.x, boundingBox.y ), Point( x, y ), Scalar( 255, 0, 0 ), 2, 1 );
imshow( "Tracking API", currentFrame );
}
break;
}
}
}
int main( int argc, char** argv )
{
CommandLineParser parser( argc, argv, keys );
String tracker_algorithm = parser.get<String>( 0 );
String video_name = parser.get<String>( 1 );
if( tracker_algorithm.empty() || video_name.empty() )
{
help();
return -1;
}
//open the capture
VideoCapture cap;
cap.open( video_name );
if( !cap.isOpened() )
{
help();
cout << "***Could not initialize capturing...***\n";
cout << "Current parameter's value: \n";
parser.printMessage();
return -1;
}
Mat frame;
paused = true;
namedWindow( "Tracking API", 1 );
setMouseCallback( "Tracking API", onMouse, 0 );
//instantiates the specific Tracker
Ptr<Tracker> tracker = Tracker::create( tracker_algorithm );
if( tracker == NULL )
{
cout << "***Error in the instantiation of the tracker...***\n";
return -1;
}
//get the first frame
cap >> frame;
frame.copyTo( image );
imshow( "Tracking API", image );
bool initialized = false;
for ( ;; )
{
if( !paused )
{
cap >> frame;
frame.copyTo( image );
if( !initialized && selectObject )
{
//initializes the tracker
if( !tracker->init( frame, boundingBox ) )
{
cout << "***Could not initialize tracker...***\n";
return -1;
}
initialized = true;
}
else if( initialized )
{
//updates the tracker
if( tracker->update( frame, boundingBox ) )
{
rectangle( image, boundingBox, Scalar( 255, 0, 0 ), 2, 1 );
}
}
imshow( "Tracking API", image );
}
char c = (char) waitKey( 2 );
if( c == 'q' )
break;
if( c == 'p' )
paused = !paused;
}
return 0;
}

File diff suppressed because it is too large Load Diff

@ -0,0 +1,735 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "precomp.hpp"
#include "opencv2/tracking/onlineBoosting.hpp"
namespace cv
{
StrongClassifierDirectSelection::StrongClassifierDirectSelection( int numBaseClf, int numWeakClf, Size patchSz, const Rect& sampleROI,
bool useFeatureEx, int iterationInit )
{
//StrongClassifier
numBaseClassifier = numBaseClf;
numAllWeakClassifier = numWeakClf + iterationInit;
iterInit = iterationInit;
numWeakClassifier = numWeakClf;
alpha.assign( numBaseClf, 0 );
patchSize = patchSz;
useFeatureExchange = useFeatureEx;
m_errorMask.resize( numAllWeakClassifier );
m_errors.resize( numAllWeakClassifier );
m_sumErrors.resize( numAllWeakClassifier );
ROI = sampleROI;
detector = new Detector( this );
}
void StrongClassifierDirectSelection::initBaseClassifier()
{
baseClassifier = new BaseClassifier*[numBaseClassifier];
baseClassifier[0] = new BaseClassifier( numWeakClassifier, iterInit );
for ( int curBaseClassifier = 1; curBaseClassifier < numBaseClassifier; curBaseClassifier++ )
baseClassifier[curBaseClassifier] = new BaseClassifier( numWeakClassifier, iterInit, baseClassifier[0]->getReferenceWeakClassifier() );
}
StrongClassifierDirectSelection::~StrongClassifierDirectSelection()
{
for ( int curBaseClassifier = 0; curBaseClassifier < numBaseClassifier; curBaseClassifier++ )
delete baseClassifier[curBaseClassifier];
delete[] baseClassifier;
alpha.clear();
delete detector;
}
Size StrongClassifierDirectSelection::getPatchSize() const
{
return patchSize;
}
Rect StrongClassifierDirectSelection::getROI() const
{
return ROI;
}
float StrongClassifierDirectSelection::classifySmooth( const std::vector<Mat>& images, const Rect& sampleROI, int& idx )
{
ROI = sampleROI;
idx = 0;
float confidence = 0;
//detector->classify (image, patches);
detector->classifySmooth( images );
//move to best detection
if( detector->getNumDetections() <= 0 )
{
confidence = 0;
return confidence;
}
idx = detector->getPatchIdxOfBestDetection();
confidence = detector->getConfidenceOfBestDetection();
return confidence;
}
bool StrongClassifierDirectSelection::getUseFeatureExchange() const
{
return useFeatureExchange;
}
int StrongClassifierDirectSelection::getReplacedClassifier() const
{
return replacedClassifier;
}
int StrongClassifierDirectSelection::getSwappedClassifier() const
{
return swappedClassifier;
}
bool StrongClassifierDirectSelection::update( const Mat& image, int target, float importance )
{
m_errorMask.assign( numAllWeakClassifier, 0 );
m_errors.assign( numAllWeakClassifier, 0 );
m_sumErrors.assign( numAllWeakClassifier, 0 );
baseClassifier[0]->trainClassifier( image, target, importance, m_errorMask );
for ( int curBaseClassifier = 0; curBaseClassifier < numBaseClassifier; curBaseClassifier++ )
{
int selectedClassifier = baseClassifier[curBaseClassifier]->selectBestClassifier( m_errorMask, importance, m_errors );
if( m_errors[selectedClassifier] >= 0.5 )
alpha[curBaseClassifier] = 0;
else
alpha[curBaseClassifier] = logf( ( 1.0f - m_errors[selectedClassifier] ) / m_errors[selectedClassifier] );
if( m_errorMask[selectedClassifier] )
importance *= (float) sqrt( ( 1.0f - m_errors[selectedClassifier] ) / m_errors[selectedClassifier] );
else
importance *= (float) sqrt( m_errors[selectedClassifier] / ( 1.0f - m_errors[selectedClassifier] ) );
//weight limitation
//if (importance > 100) importance = 100;
//sum up errors
for ( int curWeakClassifier = 0; curWeakClassifier < numAllWeakClassifier; curWeakClassifier++ )
{
if( m_errors[curWeakClassifier] != FLT_MAX && m_sumErrors[curWeakClassifier] >= 0 )
m_sumErrors[curWeakClassifier] += m_errors[curWeakClassifier];
}
//mark feature as used
m_sumErrors[selectedClassifier] = -1;
m_errors[selectedClassifier] = FLT_MAX;
}
if( useFeatureExchange )
{
replacedClassifier = baseClassifier[0]->computeReplaceWeakestClassifier( m_sumErrors );
swappedClassifier = baseClassifier[0]->getIdxOfNewWeakClassifier();
}
return true;
}
void StrongClassifierDirectSelection::replaceWeakClassifier( int idx )
{
if( useFeatureExchange && idx >= 0 )
{
baseClassifier[0]->replaceWeakClassifier( idx );
for ( int curBaseClassifier = 1; curBaseClassifier < numBaseClassifier; curBaseClassifier++ )
baseClassifier[curBaseClassifier]->replaceClassifierStatistic( baseClassifier[0]->getIdxOfNewWeakClassifier(), idx );
}
}
std::vector<int> StrongClassifierDirectSelection::getSelectedWeakClassifier()
{
std::vector<int> selected;
int curBaseClassifier = 0;
for ( curBaseClassifier = 0; curBaseClassifier < numBaseClassifier; curBaseClassifier++ )
{
selected.push_back( baseClassifier[curBaseClassifier]->getSelectedClassifier() );
}
return selected;
}
float StrongClassifierDirectSelection::eval( const Mat& response )
{
float value = 0.0f;
int curBaseClassifier = 0;
for ( curBaseClassifier = 0; curBaseClassifier < numBaseClassifier; curBaseClassifier++ )
value += baseClassifier[curBaseClassifier]->eval( response ) * alpha[curBaseClassifier];
return value;
}
int StrongClassifierDirectSelection::getNumBaseClassifier()
{
return numBaseClassifier;
}
BaseClassifier::BaseClassifier( int numWeakClassifier, int iterationInit )
{
this->m_numWeakClassifier = numWeakClassifier;
this->m_iterationInit = iterationInit;
weakClassifier = new WeakClassifierHaarFeature*[numWeakClassifier + iterationInit];
m_idxOfNewWeakClassifier = numWeakClassifier;
generateRandomClassifier();
m_referenceWeakClassifier = false;
m_selectedClassifier = 0;
m_wCorrect.assign( numWeakClassifier + iterationInit, 0 );
m_wWrong.assign( numWeakClassifier + iterationInit, 0 );
for ( int curWeakClassifier = 0; curWeakClassifier < numWeakClassifier + iterationInit; curWeakClassifier++ )
m_wWrong[curWeakClassifier] = m_wCorrect[curWeakClassifier] = 1;
}
BaseClassifier::BaseClassifier( int numWeakClassifier, int iterationInit, WeakClassifierHaarFeature** weakCls )
{
m_numWeakClassifier = numWeakClassifier;
m_iterationInit = iterationInit;
weakClassifier = weakCls;
m_referenceWeakClassifier = true;
m_selectedClassifier = 0;
m_idxOfNewWeakClassifier = numWeakClassifier;
m_wCorrect.assign( numWeakClassifier + iterationInit, 0 );
m_wWrong.assign( numWeakClassifier + iterationInit, 0 );
for ( int curWeakClassifier = 0; curWeakClassifier < numWeakClassifier + iterationInit; curWeakClassifier++ )
m_wWrong[curWeakClassifier] = m_wCorrect[curWeakClassifier] = 1;
}
BaseClassifier::~BaseClassifier()
{
if( !m_referenceWeakClassifier )
{
for ( int curWeakClassifier = 0; curWeakClassifier < m_numWeakClassifier + m_iterationInit; curWeakClassifier++ )
delete weakClassifier[curWeakClassifier];
delete[] weakClassifier;
}
m_wCorrect.clear();
m_wWrong.clear();
}
void BaseClassifier::generateRandomClassifier()
{
for ( int curWeakClassifier = 0; curWeakClassifier < m_numWeakClassifier + m_iterationInit; curWeakClassifier++ )
{
weakClassifier[curWeakClassifier] = new WeakClassifierHaarFeature();
}
}
int BaseClassifier::eval( const Mat& image )
{
return weakClassifier[m_selectedClassifier]->eval( image.at<float>( m_selectedClassifier ) );
}
int BaseClassifier::getSelectedClassifier() const
{
return m_selectedClassifier;
}
void BaseClassifier::trainClassifier( const Mat& image, int target, float importance, std::vector<bool>& errorMask )
{
//get poisson value
double A = 1;
int K = 0;
int K_max = 10;
while ( 1 )
{
double U_k = (double) rand() / RAND_MAX;
A *= U_k;
if( K > K_max || A < exp( -importance ) )
break;
K++;
}
for ( int curK = 0; curK <= K; curK++ )
{
for ( int curWeakClassifier = 0; curWeakClassifier < m_numWeakClassifier + m_iterationInit; curWeakClassifier++ )
{
errorMask[curWeakClassifier] = weakClassifier[curWeakClassifier]->update( image.at<float>( curWeakClassifier ), target );
}
}
}
float BaseClassifier::getError( int curWeakClassifier )
{
if( curWeakClassifier == -1 )
curWeakClassifier = m_selectedClassifier;
return m_wWrong[curWeakClassifier] / ( m_wWrong[curWeakClassifier] + m_wCorrect[curWeakClassifier] );
}
int BaseClassifier::selectBestClassifier( std::vector<bool>& errorMask, float importance, std::vector<float> & errors )
{
float minError = FLT_MAX;
int tmp_selectedClassifier = m_selectedClassifier;
for ( int curWeakClassifier = 0; curWeakClassifier < m_numWeakClassifier + m_iterationInit; curWeakClassifier++ )
{
if( errorMask[curWeakClassifier] )
{
m_wWrong[curWeakClassifier] += importance;
}
else
{
m_wCorrect[curWeakClassifier] += importance;
}
if( errors[curWeakClassifier] == FLT_MAX )
continue;
errors[curWeakClassifier] = m_wWrong[curWeakClassifier] / ( m_wWrong[curWeakClassifier] + m_wCorrect[curWeakClassifier] );
/*if(errors[curWeakClassifier] < 0.001 || !(errors[curWeakClassifier]>0.0))
{
errors[curWeakClassifier] = 0.001;
}
if(errors[curWeakClassifier] >= 1.0)
errors[curWeakClassifier] = 0.999;
assert (errors[curWeakClassifier] > 0.0);
assert (errors[curWeakClassifier] < 1.0);*/
if( curWeakClassifier < m_numWeakClassifier )
{
if( errors[curWeakClassifier] < minError )
{
minError = errors[curWeakClassifier];
tmp_selectedClassifier = curWeakClassifier;
}
}
}
m_selectedClassifier = tmp_selectedClassifier;
return m_selectedClassifier;
}
void BaseClassifier::getErrors( float* errors )
{
for ( int curWeakClassifier = 0; curWeakClassifier < m_numWeakClassifier + m_iterationInit; curWeakClassifier++ )
{
if( errors[curWeakClassifier] == FLT_MAX )
continue;
errors[curWeakClassifier] = m_wWrong[curWeakClassifier] / ( m_wWrong[curWeakClassifier] + m_wCorrect[curWeakClassifier] );
CV_Assert( errors[curWeakClassifier] > 0 );
}
}
void BaseClassifier::replaceWeakClassifier( int index )
{
delete weakClassifier[index];
weakClassifier[index] = weakClassifier[m_idxOfNewWeakClassifier];
m_wWrong[index] = m_wWrong[m_idxOfNewWeakClassifier];
m_wWrong[m_idxOfNewWeakClassifier] = 1;
m_wCorrect[index] = m_wCorrect[m_idxOfNewWeakClassifier];
m_wCorrect[m_idxOfNewWeakClassifier] = 1;
weakClassifier[m_idxOfNewWeakClassifier] = new WeakClassifierHaarFeature();
}
int BaseClassifier::computeReplaceWeakestClassifier( const std::vector<float> & errors )
{
float maxError = 0.0f;
int index = -1;
//search the classifier with the largest error
for ( int curWeakClassifier = m_numWeakClassifier - 1; curWeakClassifier >= 0; curWeakClassifier-- )
{
if( errors[curWeakClassifier] > maxError )
{
maxError = errors[curWeakClassifier];
index = curWeakClassifier;
}
}
CV_Assert( index > -1 );
CV_Assert( index != m_selectedClassifier );
//replace
m_idxOfNewWeakClassifier++;
if( m_idxOfNewWeakClassifier == m_numWeakClassifier + m_iterationInit )
m_idxOfNewWeakClassifier = m_numWeakClassifier;
if( maxError > errors[m_idxOfNewWeakClassifier] )
{
return index;
}
else
return -1;
}
void BaseClassifier::replaceClassifierStatistic( int sourceIndex, int targetIndex )
{
CV_Assert( targetIndex >= 0 );
CV_Assert( targetIndex != m_selectedClassifier );
CV_Assert( targetIndex < m_numWeakClassifier );
//replace
m_wWrong[targetIndex] = m_wWrong[sourceIndex];
m_wWrong[sourceIndex] = 1.0f;
m_wCorrect[targetIndex] = m_wCorrect[sourceIndex];
m_wCorrect[sourceIndex] = 1.0f;
}
EstimatedGaussDistribution::EstimatedGaussDistribution()
{
m_mean = 0;
m_sigma = 1;
this->m_P_mean = 1000;
this->m_R_mean = 0.01f;
this->m_P_sigma = 1000;
this->m_R_sigma = 0.01f;
}
EstimatedGaussDistribution::EstimatedGaussDistribution( float P_mean, float R_mean, float P_sigma, float R_sigma )
{
m_mean = 0;
m_sigma = 1;
this->m_P_mean = P_mean;
this->m_R_mean = R_mean;
this->m_P_sigma = P_sigma;
this->m_R_sigma = R_sigma;
}
EstimatedGaussDistribution::~EstimatedGaussDistribution()
{
}
void EstimatedGaussDistribution::update( float value )
{
//update distribution (mean and sigma) using a kalman filter for each
float K;
float minFactor = 0.001f;
//mean
K = m_P_mean / ( m_P_mean + m_R_mean );
if( K < minFactor )
K = minFactor;
m_mean = K * value + ( 1.0f - K ) * m_mean;
m_P_mean = m_P_mean * m_R_mean / ( m_P_mean + m_R_mean );
K = m_P_sigma / ( m_P_sigma + m_R_sigma );
if( K < minFactor )
K = minFactor;
float tmp_sigma = K * ( m_mean - value ) * ( m_mean - value ) + ( 1.0f - K ) * m_sigma * m_sigma;
m_P_sigma = m_P_sigma * m_R_mean / ( m_P_sigma + m_R_sigma );
m_sigma = static_cast<float>( sqrt( tmp_sigma ) );
if( m_sigma <= 1.0f )
m_sigma = 1.0f;
}
void EstimatedGaussDistribution::setValues( float mean, float sigma )
{
this->m_mean = mean;
this->m_sigma = sigma;
}
float EstimatedGaussDistribution::getMean()
{
return m_mean;
}
float EstimatedGaussDistribution::getSigma()
{
return m_sigma;
}
WeakClassifierHaarFeature::WeakClassifierHaarFeature()
{
sigma = 1;
mean = 0;
EstimatedGaussDistribution* m_posSamples = new EstimatedGaussDistribution();
EstimatedGaussDistribution* m_negSamples = new EstimatedGaussDistribution();
generateRandomClassifier( m_posSamples, m_negSamples );
getInitialDistribution( (EstimatedGaussDistribution*) m_classifier->getDistribution( -1 ) );
getInitialDistribution( (EstimatedGaussDistribution*) m_classifier->getDistribution( 1 ) );
}
WeakClassifierHaarFeature::~WeakClassifierHaarFeature()
{
delete m_classifier;
}
void WeakClassifierHaarFeature::getInitialDistribution( EstimatedGaussDistribution* distribution )
{
distribution->setValues( mean, sigma );
}
void WeakClassifierHaarFeature::generateRandomClassifier( EstimatedGaussDistribution* m_posSamples, EstimatedGaussDistribution* m_negSamples )
{
m_classifier = new ClassifierThreshold( m_posSamples, m_negSamples );
}
bool WeakClassifierHaarFeature::update( float value, int target )
{
m_classifier->update( value, target );
return ( m_classifier->eval( value ) != target );
}
int WeakClassifierHaarFeature::eval( float value )
{
return m_classifier->eval( value );
}
Detector::Detector( StrongClassifierDirectSelection* classifier ) :
m_sizeDetections( 0 )
{
this->m_classifier = classifier;
m_sizeConfidences = 0;
m_maxConfidence = -FLT_MAX;
m_numDetections = 0;
m_idxBestDetection = -1;
}
Detector::~Detector()
{
}
void Detector::prepareConfidencesMemory( int numPatches )
{
if( numPatches <= m_sizeConfidences )
return;
m_sizeConfidences = numPatches;
m_confidences.resize( numPatches );
}
void Detector::prepareDetectionsMemory( int numDetections )
{
if( numDetections <= m_sizeDetections )
return;
m_sizeDetections = numDetections;
m_idxDetections.resize( numDetections );
}
void Detector::classifySmooth( const std::vector<Mat>& images, float minMargin )
{
int numPatches = images.size();
prepareConfidencesMemory( numPatches );
m_numDetections = 0;
m_idxBestDetection = -1;
m_maxConfidence = -FLT_MAX;
//compute grid
//TODO 0.99 overlap from params
Size patchSz = m_classifier->getPatchSize();
int stepCol = (int) floor( ( 1.0f - 0.99f ) * (float) patchSz.width + 0.5f );
int stepRow = (int) floor( ( 1.0f - 0.99f ) * (float) patchSz.height + 0.5f );
if( stepCol <= 0 )
stepCol = 1;
if( stepRow <= 0 )
stepRow = 1;
Size patchGrid;
Rect ROI = m_classifier->getROI();
patchGrid.height = ( (int) ( (float) ( ROI.height - patchSz.height ) / stepRow ) + 1 );
patchGrid.width = ( (int) ( (float) ( ROI.width - patchSz.width ) / stepCol ) + 1 );
if( ( patchGrid.width != m_confMatrix.cols ) || ( patchGrid.height != m_confMatrix.rows ) )
{
m_confMatrix.create( patchGrid.height, patchGrid.width );
m_confMatrixSmooth.create( patchGrid.height, patchGrid.width );
m_confImageDisplay.create( patchGrid.height, patchGrid.width );
}
int curPatch = 0;
// Eval and filter
for ( int row = 0; row < patchGrid.height; row++ )
{
for ( int col = 0; col < patchGrid.width; col++ )
{
m_confidences[curPatch] = m_classifier->eval( images[curPatch] );
// fill matrix
m_confMatrix( row, col ) = m_confidences[curPatch];
curPatch++;
}
}
// Filter
//cv::GaussianBlur(m_confMatrix,m_confMatrixSmooth,cv::Size(3,3),0.8);
cv::GaussianBlur( m_confMatrix, m_confMatrixSmooth, cv::Size( 3, 3 ), 0 );
// Make display friendly
double min_val, max_val;
cv::minMaxLoc( m_confMatrixSmooth, &min_val, &max_val );
for ( int y = 0; y < m_confImageDisplay.rows; y++ )
{
unsigned char* pConfImg = m_confImageDisplay[y];
const float* pConfData = m_confMatrixSmooth[y];
for ( int x = 0; x < m_confImageDisplay.cols; x++, pConfImg++, pConfData++ )
{
*pConfImg = static_cast<unsigned char>( 255.0 * ( *pConfData - min_val ) / ( max_val - min_val ) );
}
}
// Get best detection
curPatch = 0;
for ( int row = 0; row < patchGrid.height; row++ )
{
for ( int col = 0; col < patchGrid.width; col++ )
{
// fill matrix
m_confidences[curPatch] = m_confMatrixSmooth( row, col );
if( m_confidences[curPatch] > m_maxConfidence )
{
m_maxConfidence = m_confidences[curPatch];
m_idxBestDetection = curPatch;
}
if( m_confidences[curPatch] > minMargin )
{
m_numDetections++;
}
curPatch++;
}
}
prepareDetectionsMemory( m_numDetections );
int curDetection = -1;
for ( int currentPatch = 0; currentPatch < numPatches; currentPatch++ )
{
if( m_confidences[currentPatch] > minMargin )
m_idxDetections[++curDetection] = currentPatch;
}
}
int Detector::getNumDetections()
{
return m_numDetections;
}
float Detector::getConfidence( int patchIdx )
{
return m_confidences[patchIdx];
}
float Detector::getConfidenceOfDetection( int detectionIdx )
{
return m_confidences[getPatchIdxOfDetection( detectionIdx )];
}
int Detector::getPatchIdxOfBestDetection()
{
return m_idxBestDetection;
}
int Detector::getPatchIdxOfDetection( int detectionIdx )
{
return m_idxDetections[detectionIdx];
}
ClassifierThreshold::ClassifierThreshold( EstimatedGaussDistribution* posSamples, EstimatedGaussDistribution* negSamples )
{
m_posSamples = posSamples;
m_negSamples = negSamples;
m_threshold = 0.0f;
m_parity = 0;
}
ClassifierThreshold::~ClassifierThreshold()
{
if( m_posSamples != NULL )
delete m_posSamples;
if( m_negSamples != NULL )
delete m_negSamples;
}
void*
ClassifierThreshold::getDistribution( int target )
{
if( target == 1 )
return m_posSamples;
else
return m_negSamples;
}
void ClassifierThreshold::update( float value, int target )
{
//update distribution
if( target == 1 )
m_posSamples->update( value );
else
m_negSamples->update( value );
//adapt threshold and parity
m_threshold = ( m_posSamples->getMean() + m_negSamples->getMean() ) / 2.0f;
m_parity = ( m_posSamples->getMean() > m_negSamples->getMean() ) ? 1 : -1;
}
int ClassifierThreshold::eval( float value )
{
return ( ( ( m_parity * ( value - m_threshold ) ) > 0 ) ? 1 : -1 );
}
} /* namespace cv */

@ -0,0 +1,379 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "precomp.hpp"
#include "opencv2/tracking/onlineMIL.hpp"
template<class T> class SortableElementRev
{
public:
T _val;
int _ind;
SortableElementRev() :
_ind( 0 )
{
}
SortableElementRev( T val, int ind )
{
_val = val;
_ind = ind;
}
bool operator<( SortableElementRev<T> &b )
{
return ( _val < b._val );
}
;
};
static bool CompareSortableElementRev( const SortableElementRev<float>& i, const SortableElementRev<float>& j )
{
return i._val < j._val;
}
template<class T> void sort_order_des( std::vector<T> &v, std::vector<int> &order )
{
uint n = (uint) v.size();
std::vector<SortableElementRev<T> > v2;
v2.resize( n );
order.clear();
order.resize( n );
for ( uint i = 0; i < n; i++ )
{
v2[i]._ind = i;
v2[i]._val = v[i];
}
//std::sort( v2.begin(), v2.end() );
std::sort( v2.begin(), v2.end(), CompareSortableElementRev );
for ( uint i = 0; i < n; i++ )
{
order[i] = v2[i]._ind;
v[i] = v2[i]._val;
}
}
;
namespace cv
{
//implementations for strong classifier
ClfMilBoost::Params::Params()
{
_numSel = 50;
_numFeat = 250;
_lRate = 0.85;
}
ClfMilBoost::ClfMilBoost()
{
_myParams = ClfMilBoost::Params();
_numsamples = 0;
}
ClfMilBoost::~ClfMilBoost()
{
_selectors.clear();
for ( size_t i = 0; i < _weakclf.size(); i++ )
delete _weakclf.at( i );
}
void ClfMilBoost::init( const ClfMilBoost::Params &parameters )
{
_myParams = parameters;
_numsamples = 0;
//_ftrs = Ftr::generate( _myParams->_ftrParams, _myParams->_numFeat );
// if( params->_storeFtrHistory )
// Ftr::toViz( _ftrs, "haarftrs" );
_weakclf.resize( _myParams._numFeat );
for ( int k = 0; k < _myParams._numFeat; k++ )
{
_weakclf[k] = new ClfOnlineStump( k );
_weakclf[k]->_lRate = _myParams._lRate;
}
_counter = 0;
}
void ClfMilBoost::update( const Mat& posx, const Mat& negx )
{
int numneg = negx.rows;
int numpos = posx.rows;
// compute ftrs
//if( !posx.ftrsComputed() )
// Ftr::compute( posx, _ftrs );
//if( !negx.ftrsComputed() )
// Ftr::compute( negx, _ftrs );
// initialize H
static std::vector<float> Hpos, Hneg;
Hpos.clear();
Hneg.clear();
Hpos.resize( posx.rows, 0.0f ), Hneg.resize( negx.rows, 0.0f );
_selectors.clear();
std::vector<float> posw( posx.rows ), negw( negx.rows );
std::vector<std::vector<float> > pospred( _weakclf.size() ), negpred( _weakclf.size() );
// train all weak classifiers without weights
#ifdef _OPENMP
#pragma omp parallel for
#endif
for ( int m = 0; m < _myParams._numFeat; m++ )
{
_weakclf[m]->update( posx, negx );
pospred[m] = _weakclf[m]->classifySetF( posx );
negpred[m] = _weakclf[m]->classifySetF( negx );
}
// pick the best features
for ( int s = 0; s < _myParams._numSel; s++ )
{
// compute errors/likl for all weak clfs
std::vector<float> poslikl( _weakclf.size(), 1.0f ), neglikl( _weakclf.size() ), likl( _weakclf.size() );
#ifdef _OPENMP
#pragma omp parallel for
#endif
for ( int w = 0; w < (int) _weakclf.size(); w++ )
{
float lll = 1.0f;
for ( int j = 0; j < numpos; j++ )
lll *= ( 1 - sigmoid( Hpos[j] + pospred[w][j] ) );
poslikl[w] = (float) -log( 1 - lll + 1e-5 );
lll = 0.0f;
for ( int j = 0; j < numneg; j++ )
lll += (float) -log( 1e-5f + 1 - sigmoid( Hneg[j] + negpred[w][j] ) );
neglikl[w] = lll;
likl[w] = poslikl[w] / numpos + neglikl[w] / numneg;
}
// pick best weak clf
std::vector<int> order;
sort_order_des( likl, order );
// find best weakclf that isn't already included
for ( uint k = 0; k < order.size(); k++ )
if( std::count( _selectors.begin(), _selectors.end(), order[k] ) == 0 )
{
_selectors.push_back( order[k] );
break;
}
// update H = H + h_m
#ifdef _OPENMP
#pragma omp parallel for
#endif
for ( int k = 0; k < posx.rows; k++ )
Hpos[k] += pospred[_selectors[s]][k];
#ifdef _OPENMP
#pragma omp parallel for
#endif
for ( int k = 0; k < negx.rows; k++ )
Hneg[k] += negpred[_selectors[s]][k];
}
//if( _myParams->_storeFtrHistory )
//for ( uint j = 0; j < _selectors.size(); j++ )
// _ftrHist( _selectors[j], _counter ) = 1.0f / ( j + 1 );
_counter++;
/* */
return;
}
std::vector<float> ClfMilBoost::classify( const Mat& x, bool logR )
{
int numsamples = x.rows;
std::vector<float> res( numsamples );
std::vector<float> tr;
for ( uint w = 0; w < _selectors.size(); w++ )
{
tr = _weakclf[_selectors[w]]->classifySetF( x );
#ifdef _OPENMP
#pragma omp parallel for
#endif
for ( int j = 0; j < numsamples; j++ )
{
res[j] += tr[j];
}
}
// return probabilities or log odds ratio
if( !logR )
{
#ifdef _OPENMP
#pragma omp parallel for
#endif
for ( int j = 0; j < (int) res.size(); j++ )
{
res[j] = sigmoid( res[j] );
}
}
return res;
}
//implementations for weak classifier
ClfOnlineStump::ClfOnlineStump()
{
_trained = false;
_ind = -1;
init();
}
ClfOnlineStump::ClfOnlineStump( int ind )
{
_trained = false;
_ind = ind;
init();
}
void ClfOnlineStump::init()
{
_mu0 = 0;
_mu1 = 0;
_sig0 = 1;
_sig1 = 1;
_lRate = 0.85f;
_trained = false;
}
void ClfOnlineStump::update( const Mat& posx, const Mat& negx, const Mat_<float>& /*posw*/, const Mat_<float>& /*negw*/)
{
//std::cout << " ClfOnlineStump::update" << _ind << std::endl;
float posmu = 0.0, negmu = 0.0;
if( posx.cols > 0 )
posmu = float( mean( posx.col( _ind ) )[0] );
if( negx.cols > 0 )
negmu = float( mean( negx.col( _ind ) )[0] );
if( _trained )
{
if( posx.cols > 0 )
{
_mu1 = ( _lRate * _mu1 + ( 1 - _lRate ) * posmu );
cv::Mat diff = posx.col( _ind ) - _mu1;
_sig1 = _lRate * _sig1 + ( 1 - _lRate ) * float( mean( diff.mul( diff ) )[0] );
}
if( negx.cols > 0 )
{
_mu0 = ( _lRate * _mu0 + ( 1 - _lRate ) * negmu );
cv::Mat diff = negx.col( _ind ) - _mu0;
_sig0 = _lRate * _sig0 + ( 1 - _lRate ) * float( mean( diff.mul( diff ) )[0] );
}
_q = ( _mu1 - _mu0 ) / 2;
_s = sign( _mu1 - _mu0 );
_log_n0 = std::log( float( 1.0f / pow( _sig0, 0.5f ) ) );
_log_n1 = std::log( float( 1.0f / pow( _sig1, 0.5f ) ) );
//_e1 = -1.0f/(2.0f*_sig1+1e-99f);
//_e0 = -1.0f/(2.0f*_sig0+1e-99f);
_e1 = -1.0f / ( 2.0f * _sig1 + std::numeric_limits<float>::min() );
_e0 = -1.0f / ( 2.0f * _sig0 + std::numeric_limits<float>::min() );
}
else
{
_trained = true;
if( posx.cols > 0 )
{
_mu1 = posmu;
cv::Scalar scal_mean, scal_std_dev;
cv::meanStdDev( posx.col( _ind ), scal_mean, scal_std_dev );
_sig1 = float( scal_std_dev[0] ) * float( scal_std_dev[0] ) + 1e-9f;
}
if( negx.cols > 0 )
{
_mu0 = negmu;
cv::Scalar scal_mean, scal_std_dev;
cv::meanStdDev( negx.col( _ind ), scal_mean, scal_std_dev );
_sig0 = float( scal_std_dev[0] ) * float( scal_std_dev[0] ) + 1e-9f;
}
_q = ( _mu1 - _mu0 ) / 2;
_s = sign( _mu1 - _mu0 );
_log_n0 = std::log( float( 1.0f / pow( _sig0, 0.5f ) ) );
_log_n1 = std::log( float( 1.0f / pow( _sig1, 0.5f ) ) );
//_e1 = -1.0f/(2.0f*_sig1+1e-99f);
//_e0 = -1.0f/(2.0f*_sig0+1e-99f);
_e1 = -1.0f / ( 2.0f * _sig1 + std::numeric_limits<float>::min() );
_e0 = -1.0f / ( 2.0f * _sig0 + std::numeric_limits<float>::min() );
}
}
bool ClfOnlineStump::classify( const Mat& x, int i )
{
float xx = x.at<float>( i, _ind );
double log_p0 = ( xx - _mu0 ) * ( xx - _mu0 ) * _e0 + _log_n0;
double log_p1 = ( xx - _mu1 ) * ( xx - _mu1 ) * _e1 + _log_n1;
return log_p1 > log_p0;
}
float ClfOnlineStump::classifyF( const Mat& x, int i )
{
float xx = x.at<float>( i, _ind );
double log_p0 = ( xx - _mu0 ) * ( xx - _mu0 ) * _e0 + _log_n0;
double log_p1 = ( xx - _mu1 ) * ( xx - _mu1 ) * _e1 + _log_n1;
return float( log_p1 - log_p0 );
}
inline std::vector<float> ClfOnlineStump::classifySetF( const Mat& x )
{
std::vector<float> res( x.rows );
#ifdef _OPENMP
#pragma omp parallel for
#endif
for ( int k = 0; k < (int) res.size(); k++ )
{
res[k] = classifyF( x, k );
}
return res;
}
} /* namespace cv */

@ -0,0 +1,49 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#ifndef __OPENCV_PRECOMP_H__
#define __OPENCV_PRECOMP_H__
#include "opencv2/tracking.hpp"
#include "opencv2/core/utility.hpp"
#include "opencv2/core/private.hpp"
#endif

@ -0,0 +1,107 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "precomp.hpp"
namespace cv
{
/*
* Tracker
*/
Tracker::~Tracker()
{
}
bool Tracker::init( const Mat& image, const Rect& boundingBox )
{
if( isInit )
{
return false;
}
if( image.empty() )
return false;
sampler = Ptr<TrackerSampler>( new TrackerSampler() );
featureSet = Ptr<TrackerFeatureSet>( new TrackerFeatureSet() );
model = Ptr<TrackerModel>();
bool initTracker = initImpl( image, boundingBox );
//check if the model component is initialized
if( model == 0 )
{
CV_Error( -1, "The model are not initialized" );
return false;
}
if( initTracker )
{
isInit = true;
}
return initTracker;
}
bool Tracker::update( const Mat& image, Rect& boundingBox )
{
if( !isInit )
{
return false;
}
if( image.empty() )
return false;
return updateImpl( image, boundingBox );
}
Ptr<Tracker> Tracker::create( const String& trackerType )
{
return Algorithm::create<Tracker>( "TRACKER." + trackerType );
}
} /* namespace cv */

@ -0,0 +1,308 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "precomp.hpp"
#include "trackerBoostingModel.hpp"
namespace cv
{
/*
* TrackerBoosting
*/
/*
* Parameters
*/
TrackerBoosting::Params::Params()
{
numClassifiers = 100;
samplerOverlap = 0.99f;
samplerSearchFactor = 2;
iterationInit = 50;
featureSetNumFeatures = ( numClassifiers * 10 ) + iterationInit;
}
void TrackerBoosting::Params::read( const cv::FileNode& fn )
{
numClassifiers = fn["numClassifiers"];
samplerOverlap = fn["overlap"];
samplerSearchFactor = fn["samplerSearchFactor"];
iterationInit = fn["iterationInit"];
samplerSearchFactor = fn["searchFactor"];
}
void TrackerBoosting::Params::write( cv::FileStorage& fs ) const
{
fs << "numClassifiers" << numClassifiers;
fs << "overlap" << samplerOverlap;
fs << "searchFactor" << samplerSearchFactor;
fs << "iterationInit" << iterationInit;
fs << "samplerSearchFactor" << samplerSearchFactor;
}
/*
* Constructor
*/
TrackerBoosting::TrackerBoosting( const TrackerBoosting::Params &parameters ) :
params( parameters )
{
isInit = false;
}
/*
* Destructor
*/
TrackerBoosting::~TrackerBoosting()
{
}
void TrackerBoosting::read( const cv::FileNode& fn )
{
params.read( fn );
}
void TrackerBoosting::write( cv::FileStorage& fs ) const
{
params.write( fs );
}
bool TrackerBoosting::initImpl( const Mat& image, const Rect& boundingBox )
{
//sampling
Mat_<int> intImage;
Mat_<double> intSqImage;
Mat image_;
cvtColor( image, image_, CV_RGB2GRAY );
integral( image_, intImage, intSqImage, CV_32S );
TrackerSamplerCS::Params CSparameters;
CSparameters.overlap = params.samplerOverlap;
CSparameters.searchFactor = params.samplerSearchFactor;
Ptr<TrackerSamplerAlgorithm> CSSampler = Ptr<TrackerSamplerCS>( new TrackerSamplerCS( CSparameters ) );
if( !sampler->addTrackerSamplerAlgorithm( CSSampler ) )
return false;
CSSampler.staticCast<TrackerSamplerCS>()->setMode( TrackerSamplerCS::MODE_POSITIVE );
sampler->sampling( intImage, boundingBox );
const std::vector<Mat> posSamples = sampler->getSamples();
CSSampler.staticCast<TrackerSamplerCS>()->setMode( TrackerSamplerCS::MODE_NEGATIVE );
sampler->sampling( intImage, boundingBox );
const std::vector<Mat> negSamples = sampler->getSamples();
if( posSamples.empty() || negSamples.empty() )
return false;
Rect ROI = CSSampler.staticCast<TrackerSamplerCS>()->getROI();
//compute HAAR features
TrackerFeatureHAAR::Params HAARparameters;
HAARparameters.numFeatures = params.featureSetNumFeatures;
HAARparameters.isIntegral = true;
HAARparameters.rectSize = Size( boundingBox.width, boundingBox.height );
Ptr<TrackerFeature> trackerFeature = Ptr<TrackerFeatureHAAR>( new TrackerFeatureHAAR( HAARparameters ) );
if( !featureSet->addTrackerFeature( trackerFeature ) )
return false;
featureSet->extraction( posSamples );
const std::vector<Mat> posResponse = featureSet->getResponses();
featureSet->extraction( negSamples );
const std::vector<Mat> negResponse = featureSet->getResponses();
//Model
model = Ptr<TrackerBoostingModel>( new TrackerBoostingModel( boundingBox ) );
Ptr<TrackerStateEstimatorAdaBoosting> stateEstimator = Ptr<TrackerStateEstimatorAdaBoosting>(
new TrackerStateEstimatorAdaBoosting( params.numClassifiers, params.iterationInit, params.featureSetNumFeatures,
Size( boundingBox.width, boundingBox.height ), ROI ) );
model->setTrackerStateEstimator( stateEstimator );
//Run model estimation and update for iterationInit iterations
for ( int i = 0; i < params.iterationInit; i++ )
{
//compute temp features
TrackerFeatureHAAR::Params HAARparameters2;
HAARparameters2.numFeatures = ( posSamples.size() + negSamples.size() );
HAARparameters2.isIntegral = true;
HAARparameters2.rectSize = Size( boundingBox.width, boundingBox.height );
Ptr<TrackerFeatureHAAR> trackerFeature2 = Ptr<TrackerFeatureHAAR>( new TrackerFeatureHAAR( HAARparameters2 ) );
model.staticCast<TrackerBoostingModel>()->setMode( TrackerBoostingModel::MODE_NEGATIVE, negSamples );
model->modelEstimation( negResponse );
model.staticCast<TrackerBoostingModel>()->setMode( TrackerBoostingModel::MODE_POSITIVE, posSamples );
model->modelEstimation( posResponse );
model->modelUpdate();
//get replaced classifier and change the features
std::vector<int> replacedClassifier = stateEstimator->computeReplacedClassifier();
std::vector<int> swappedClassified = stateEstimator->computeSwappedClassifier();
for ( size_t j = 0; j < replacedClassifier.size(); j++ )
{
if( replacedClassifier[j] != -1 && swappedClassified[j] != -1 )
{
trackerFeature.staticCast<TrackerFeatureHAAR>()->swapFeature( replacedClassifier[j], swappedClassified[j] );
trackerFeature.staticCast<TrackerFeatureHAAR>()->swapFeature( swappedClassified[j], trackerFeature2->getFeatureAt( j ) );
}
}
}
return true;
}
bool TrackerBoosting::updateImpl( const Mat& image, Rect& boundingBox )
{
Mat_<int> intImage;
Mat_<double> intSqImage;
Mat image_;
cvtColor( image, image_, CV_RGB2GRAY );
integral( image_, intImage, intSqImage, CV_32S );
//get the last location [AAM] X(k-1)
Ptr<TrackerTargetState> lastLocation = model->getLastTargetState();
Rect lastBoundingBox( lastLocation->getTargetPosition().x, lastLocation->getTargetPosition().y, lastLocation->getTargetWidth(),
lastLocation->getTargetHeight() );
//sampling new frame based on last location
( sampler->getSamplers().at( 0 ).second ).staticCast<TrackerSamplerCS>()->setMode( TrackerSamplerCS::MODE_CLASSIFY );
sampler->sampling( intImage, lastBoundingBox );
const std::vector<Mat> detectSamples = sampler->getSamples();
Rect ROI = ( sampler->getSamplers().at( 0 ).second ).staticCast<TrackerSamplerCS>()->getROI();
if( detectSamples.empty() )
return false;
/*//TODO debug samples
Mat f;
image.copyTo( f );
for ( size_t i = 0; i < detectSamples.size(); i = i + 10 )
{
Size sz;
Point off;
detectSamples.at( i ).locateROI( sz, off );
rectangle( f, Rect( off.x, off.y, detectSamples.at( i ).cols, detectSamples.at( i ).rows ), Scalar( 255, 0, 0 ), 1 );
}*/
std::vector<Mat> responses;
Mat response;
std::vector<int> classifiers = model->getTrackerStateEstimator().staticCast<TrackerStateEstimatorAdaBoosting>()->computeSelectedWeakClassifier();
Ptr<TrackerFeatureHAAR> extractor = featureSet->getTrackerFeature()[0].second.staticCast<TrackerFeatureHAAR>();
extractor->extractSelected( classifiers, detectSamples, response );
responses.push_back( response );
//predict new location
ConfidenceMap cmap;
model.staticCast<TrackerBoostingModel>()->setMode( TrackerBoostingModel::MODE_CLASSIFY, detectSamples );
model.staticCast<TrackerBoostingModel>()->responseToConfidenceMap( responses, cmap );
model->getTrackerStateEstimator().staticCast<TrackerStateEstimatorAdaBoosting>()->setCurrentConfidenceMap( cmap );
model->getTrackerStateEstimator().staticCast<TrackerStateEstimatorAdaBoosting>()->setSampleROI( ROI );
if( !model->runStateEstimator() )
{
return false;
}
Ptr<TrackerTargetState> currentState = model->getLastTargetState();
boundingBox = Rect( currentState->getTargetPosition().x, currentState->getTargetPosition().y, currentState->getTargetWidth(),
currentState->getTargetHeight() );
/*//TODO debug
rectangle( f, lastBoundingBox, Scalar( 0, 255, 0 ), 1 );
rectangle( f, boundingBox, Scalar( 0, 0, 255 ), 1 );
imshow( "f", f );
//waitKey( 0 );*/
//sampling new frame based on new location
//Positive sampling
( sampler->getSamplers().at( 0 ).second ).staticCast<TrackerSamplerCS>()->setMode( TrackerSamplerCS::MODE_POSITIVE );
sampler->sampling( intImage, boundingBox );
const std::vector<Mat> posSamples = sampler->getSamples();
//Negative sampling
( sampler->getSamplers().at( 0 ).second ).staticCast<TrackerSamplerCS>()->setMode( TrackerSamplerCS::MODE_NEGATIVE );
sampler->sampling( intImage, boundingBox );
const std::vector<Mat> negSamples = sampler->getSamples();
if( posSamples.empty() || negSamples.empty() )
return false;
//extract features
featureSet->extraction( posSamples );
const std::vector<Mat> posResponse = featureSet->getResponses();
featureSet->extraction( negSamples );
const std::vector<Mat> negResponse = featureSet->getResponses();
//compute temp features
TrackerFeatureHAAR::Params HAARparameters2;
HAARparameters2.numFeatures = ( posSamples.size() + negSamples.size() );
HAARparameters2.isIntegral = true;
HAARparameters2.rectSize = Size( boundingBox.width, boundingBox.height );
Ptr<TrackerFeatureHAAR> trackerFeature2 = Ptr<TrackerFeatureHAAR>( new TrackerFeatureHAAR( HAARparameters2 ) );
//model estimate
model.staticCast<TrackerBoostingModel>()->setMode( TrackerBoostingModel::MODE_NEGATIVE, negSamples );
model->modelEstimation( negResponse );
model.staticCast<TrackerBoostingModel>()->setMode( TrackerBoostingModel::MODE_POSITIVE, posSamples );
model->modelEstimation( posResponse );
//model update
model->modelUpdate();
//get replaced classifier and change the features
std::vector<int> replacedClassifier = model->getTrackerStateEstimator().staticCast<TrackerStateEstimatorAdaBoosting>()->computeReplacedClassifier();
std::vector<int> swappedClassified = model->getTrackerStateEstimator().staticCast<TrackerStateEstimatorAdaBoosting>()->computeSwappedClassifier();
for ( size_t j = 0; j < replacedClassifier.size(); j++ )
{
if( replacedClassifier[j] != -1 && swappedClassified[j] != -1 )
{
featureSet->getTrackerFeature().at( 0 ).second.staticCast<TrackerFeatureHAAR>()->swapFeature( replacedClassifier[j], swappedClassified[j] );
featureSet->getTrackerFeature().at( 0 ).second.staticCast<TrackerFeatureHAAR>()->swapFeature( swappedClassified[j],
trackerFeature2->getFeatureAt( j ) );
}
}
return true;
}
} /* namespace cv */

@ -0,0 +1,123 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "trackerBoostingModel.hpp"
/**
* TrackerBoostingModel
*/
namespace cv
{
TrackerBoostingModel::TrackerBoostingModel( const Rect& boundingBox )
{
mode = MODE_POSITIVE;
Ptr<TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState> initState =
Ptr<TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState>(
new TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState( Point2f( boundingBox.x, boundingBox.y ), boundingBox.width,
boundingBox.height, true, Mat() ) );
trajectory.push_back( initState );
maxCMLength = 10;
}
void TrackerBoostingModel::modelEstimationImpl( const std::vector<Mat>& responses )
{
responseToConfidenceMap( responses, currentConfidenceMap );
}
void TrackerBoostingModel::modelUpdateImpl()
{
}
void TrackerBoostingModel::setMode( int trainingMode, const std::vector<Mat>& samples )
{
currentSample.clear();
currentSample = samples;
mode = trainingMode;
}
std::vector<int> TrackerBoostingModel::getSelectedWeakClassifier()
{
return stateEstimator.staticCast<TrackerStateEstimatorAdaBoosting>()->computeSelectedWeakClassifier();
}
void TrackerBoostingModel::responseToConfidenceMap( const std::vector<Mat>& responses, ConfidenceMap& confidenceMap )
{
if( currentSample.empty() )
{
CV_Error( -1, "The samples in Model estimation are empty" );
return;
}
for ( size_t i = 0; i < currentSample.size(); i++ )
{
Size currentSize;
Point currentOfs;
currentSample.at( i ).locateROI( currentSize, currentOfs );
bool foreground;
if( mode == MODE_POSITIVE || mode == MODE_CLASSIFY )
{
foreground = true;
}
else if( mode == MODE_NEGATIVE )
{
foreground = false;
}
const Mat resp = responses[0].col( i );
//create the state
Ptr<TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState> currentState = Ptr<
TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState>(
new TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState( currentOfs, currentSample.at( i ).cols, currentSample.at( i ).rows,
foreground, resp ) );
confidenceMap.push_back( std::make_pair( currentState, 0 ) );
}
}
}

@ -0,0 +1,109 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#ifndef __OPENCV_TRACKER_BOOSTING_MODEL_HPP__
#define __OPENCV_TRACKER_BOOSTING_MODEL_HPP__
#include "precomp.hpp"
#include "opencv2/core.hpp"
namespace cv
{
/**
* \brief Implementation of TrackerModel for BOOSTING algorithm
*/
class TrackerBoostingModel : public TrackerModel
{
public:
enum
{
MODE_POSITIVE = 1, // mode for positive features
MODE_NEGATIVE = 2, // mode for negative features
MODE_CLASSIFY = 3 // mode for classify step
};
/**
* \brief Constructor
* \param boundingBox The first boundingBox
*/
TrackerBoostingModel( const Rect& boundingBox );
/**
* \brief Destructor
*/
~TrackerBoostingModel()
{
}
;
/**
* \brief Set the mode
*/
void setMode( int trainingMode, const std::vector<Mat>& samples );
/**
* \brief Create the ConfidenceMap from a list of responses
* \param responses The list of the responses
* \param confidenceMap The output
*/
void responseToConfidenceMap( const std::vector<Mat>& responses, ConfidenceMap& confidenceMap );
/**
* \brief return the selected weak classifiers for the detect
* @return the selected weak classifiers
*/
std::vector<int> getSelectedWeakClassifier();
protected:
void modelEstimationImpl( const std::vector<Mat>& responses );
void modelUpdateImpl();
private:
std::vector<Mat> currentSample;
std::vector<std::pair<float, float> > meanSigmaPair;
int mode;
};
} /* namespace cv */
#endif

@ -0,0 +1,325 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "precomp.hpp"
namespace cv
{
/*
* TrackerFeature
*/
TrackerFeature::~TrackerFeature()
{
}
void TrackerFeature::compute( const std::vector<Mat>& images, Mat& response )
{
if( images.empty() )
return;
computeImpl( images, response );
}
Ptr<TrackerFeature> TrackerFeature::create( const String& trackerFeatureType )
{
if( trackerFeatureType.find( "FEATURE2D" ) == 0 )
{
size_t firstSep = trackerFeatureType.find_first_of( "." );
size_t secondSep = trackerFeatureType.find_last_of( "." );
String detector = trackerFeatureType.substr( firstSep, secondSep - firstSep );
String descriptor = trackerFeatureType.substr( secondSep, trackerFeatureType.length() - secondSep );
return Ptr<TrackerFeatureFeature2d>( new TrackerFeatureFeature2d( detector, descriptor ) );
}
if( trackerFeatureType.find( "HOG" ) == 0 )
{
return Ptr<TrackerFeatureHOG>( new TrackerFeatureHOG() );
}
if( trackerFeatureType.find( "HAAR" ) == 0 )
{
return Ptr<TrackerFeatureHAAR>( new TrackerFeatureHAAR() );
}
if( trackerFeatureType.find( "LBP" ) == 0 )
{
return Ptr<TrackerFeatureLBP>( new TrackerFeatureLBP() );
}
CV_Error( -1, "Tracker feature type not supported" );
return Ptr<TrackerFeature>();
}
String TrackerFeature::getClassName() const
{
return className;
}
/**
* TrackerFeatureFeature2d
*/
TrackerFeatureFeature2d::TrackerFeatureFeature2d( String /*detectorType*/, String /*descriptorType*/)
{
className = "FEATURE2D";
}
TrackerFeatureFeature2d::~TrackerFeatureFeature2d()
{
}
bool TrackerFeatureFeature2d::computeImpl( const std::vector<Mat>& /*images*/, Mat& /*response*/)
{
return false;
}
void TrackerFeatureFeature2d::selection( Mat& /*response*/, int /*npoints*/)
{
}
/**
* TrackerFeatureHOG
*/
TrackerFeatureHOG::TrackerFeatureHOG()
{
className = "HOG";
}
TrackerFeatureHOG::~TrackerFeatureHOG()
{
}
bool TrackerFeatureHOG::computeImpl( const std::vector<Mat>& /*images*/, Mat& /*response*/)
{
return false;
}
void TrackerFeatureHOG::selection( Mat& /*response*/, int /*npoints*/)
{
}
/**
* TrackerFeatureHAAR
*/
/**
* Parameters
*/
TrackerFeatureHAAR::Params::Params()
{
numFeatures = 250;
rectSize = Size( 100, 100 );
isIntegral = false;
}
TrackerFeatureHAAR::TrackerFeatureHAAR( const TrackerFeatureHAAR::Params &parameters ) :
params( parameters )
{
className = "HAAR";
CvHaarFeatureParams haarParams;
haarParams.numFeatures = params.numFeatures;
haarParams.isIntegral = params.isIntegral;
featureEvaluator = CvFeatureEvaluator::create( CvFeatureParams::HAAR ).staticCast<CvHaarEvaluator>();
featureEvaluator->init( &haarParams, 1, params.rectSize );
}
TrackerFeatureHAAR::~TrackerFeatureHAAR()
{
}
CvHaarEvaluator::FeatureHaar& TrackerFeatureHAAR::getFeatureAt( int id )
{
return featureEvaluator->getFeatures( id );
}
bool TrackerFeatureHAAR::swapFeature( int id, CvHaarEvaluator::FeatureHaar& feature )
{
featureEvaluator->getFeatures( id ) = feature;
return true;
}
bool TrackerFeatureHAAR::swapFeature( int source, int target )
{
CvHaarEvaluator::FeatureHaar feature = featureEvaluator->getFeatures( source );
featureEvaluator->getFeatures( source ) = featureEvaluator->getFeatures( target );
featureEvaluator->getFeatures( target ) = feature;
return true;
}
bool TrackerFeatureHAAR::extractSelected( const std::vector<int> selFeatures, const std::vector<Mat>& images, Mat& response )
{
if( images.empty() )
{
return false;
}
int numFeatures = featureEvaluator->getNumFeatures();
int numSelFeatures = selFeatures.size();
//response = Mat_<float>( Size( images.size(), numFeatures ) );
response.create( Size( images.size(), numFeatures ), CV_32F );
response.setTo( 0 );
//double t = getTickCount();
//for each sample compute #n_feature -> put each feature (n Rect) in response
for ( size_t i = 0; i < images.size(); i++ )
{
int c = images[i].cols;
int r = images[i].rows;
for ( int j = 0; j < numSelFeatures; j++ )
{
float res = 0;
//const feat
CvHaarEvaluator::FeatureHaar& feature = featureEvaluator->getFeatures( selFeatures[j] );
feature.eval( images[i], Rect( 0, 0, c, r ), &res );
//( Mat_<float>( response ) )( j, i ) = res;
response.at<float>( selFeatures[j], i ) = res;
}
}
//t = ( (double) getTickCount() - t ) / getTickFrequency();
//std::cout << "StrongClassifierDirectSelection time " << t << std::endl;
return true;
}
class Parallel_compute : public cv::ParallelLoopBody
{
private:
Ptr<CvHaarEvaluator> featureEvaluator;
std::vector<Mat> images;
Mat response;
//std::vector<CvHaarEvaluator::FeatureHaar> features;
public:
Parallel_compute( Ptr<CvHaarEvaluator>& fe, const std::vector<Mat>& img, Mat& resp ) :
featureEvaluator( fe ),
images( img ),
response( resp )
{
//features = featureEvaluator->getFeatures();
}
virtual void operator()( const cv::Range &r ) const
{
for ( register int jf = r.start; jf != r.end; ++jf )
{
int cols = images[jf].cols;
int rows = images[jf].rows;
for ( int j = 0; j < featureEvaluator->getNumFeatures(); j++ )
{
float res = 0;
featureEvaluator->getFeatures()[j].eval( images[jf], Rect( 0, 0, cols, rows ), &res );
( Mat_<float>( response ) )( j, jf ) = res;
}
}
}
};
bool TrackerFeatureHAAR::computeImpl( const std::vector<Mat>& images, Mat& response )
{
if( images.empty() )
{
return false;
}
int numFeatures = featureEvaluator->getNumFeatures();
response = Mat_<float>( Size( images.size(), numFeatures ) );
std::vector<CvHaarEvaluator::FeatureHaar> f = featureEvaluator->getFeatures();
//for each sample compute #n_feature -> put each feature (n Rect) in response
parallel_for_( Range( 0, images.size() ), Parallel_compute( featureEvaluator, images, response ) );
/*for ( size_t i = 0; i < images.size(); i++ )
{
int c = images[i].cols;
int r = images[i].rows;
for ( int j = 0; j < numFeatures; j++ )
{
float res = 0;
featureEvaluator->getFeatures( j ).eval( images[i], Rect( 0, 0, c, r ), &res );
( Mat_<float>( response ) )( j, i ) = res;
}
}*/
return true;
}
void TrackerFeatureHAAR::selection( Mat& /*response*/, int /*npoints*/)
{
}
/**
* TrackerFeatureLBP
*/
TrackerFeatureLBP::TrackerFeatureLBP()
{
className = "LBP";
}
TrackerFeatureLBP::~TrackerFeatureLBP()
{
}
bool TrackerFeatureLBP::computeImpl( const std::vector<Mat>& /*images*/, Mat& /*response*/)
{
return false;
}
void TrackerFeatureLBP::selection( Mat& /*response*/, int /*npoints*/)
{
}
} /* namespace cv */

@ -0,0 +1,142 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "precomp.hpp"
namespace cv
{
/*
* TrackerFeatureSet
*/
/*
* Constructor
*/
TrackerFeatureSet::TrackerFeatureSet()
{
blockAddTrackerFeature = false;
}
/*
* Destructor
*/
TrackerFeatureSet::~TrackerFeatureSet()
{
}
void TrackerFeatureSet::extraction( const std::vector<Mat>& images )
{
clearResponses();
responses.resize( features.size() );
for ( size_t i = 0; i < features.size(); i++ )
{
Mat response;
features[i].second->compute( images, response );
responses[i] = response;
}
if( !blockAddTrackerFeature )
{
blockAddTrackerFeature = true;
}
}
void TrackerFeatureSet::selection()
{
}
void TrackerFeatureSet::removeOutliers()
{
}
bool TrackerFeatureSet::addTrackerFeature( String trackerFeatureType )
{
if( blockAddTrackerFeature )
{
return false;
}
Ptr<TrackerFeature> feature = TrackerFeature::create( trackerFeatureType );
if( feature == 0 )
{
return false;
}
features.push_back( std::make_pair( trackerFeatureType, feature ) );
return true;
}
bool TrackerFeatureSet::addTrackerFeature( Ptr<TrackerFeature>& feature )
{
if( blockAddTrackerFeature )
{
return false;
}
String trackerFeatureType = feature->getClassName();
features.push_back( std::make_pair( trackerFeatureType, feature ) );
return true;
}
const std::vector<std::pair<String, Ptr<TrackerFeature> > >& TrackerFeatureSet::getTrackerFeature() const
{
return features;
}
const std::vector<Mat>& TrackerFeatureSet::getResponses() const
{
return responses;
}
void TrackerFeatureSet::clearResponses()
{
responses.clear();
}
} /* namespace cv */

@ -0,0 +1,273 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "precomp.hpp"
#include "trackerMILModel.hpp"
namespace cv
{
/*
* TrackerMIL
*/
/*
* Parameters
*/
TrackerMIL::Params::Params()
{
samplerInitInRadius = 3;
samplerTrackInRadius = 4;
samplerSearchWinSize = 25;
samplerInitMaxNegNum = 65;
samplerTrackMaxPosNum = 100000;
samplerTrackMaxNegNum = 65;
featureSetNumFeatures = 250;
}
void TrackerMIL::Params::read( const cv::FileNode& fn )
{
samplerInitInRadius = fn["samplerInitInRadius"];
samplerSearchWinSize = fn["samplerSearchWinSize"];
samplerInitInRadius = fn["samplerInitInRadius"];
samplerTrackInRadius = fn["samplerTrackInRadius"];
samplerTrackMaxPosNum = fn["samplerTrackMaxPosNum"];
samplerTrackMaxNegNum = fn["samplerTrackMaxNegNum"];
featureSetNumFeatures = fn["featureSetNumFeatures"];
}
void TrackerMIL::Params::write( cv::FileStorage& fs ) const
{
fs << "samplerInitInRadius" << samplerInitInRadius;
fs << "samplerSearchWinSize" << samplerSearchWinSize;
fs << "samplerInitInRadius" << samplerInitInRadius;
fs << "samplerTrackInRadius" << samplerTrackInRadius;
fs << "samplerTrackMaxPosNum" << samplerTrackMaxPosNum;
fs << "samplerTrackMaxNegNum" << samplerTrackMaxNegNum;
fs << "featureSetNumFeatures" << featureSetNumFeatures;
}
/*
* Constructor
*/
TrackerMIL::TrackerMIL( const TrackerMIL::Params &parameters ) :
params( parameters )
{
isInit = false;
}
/*
* Destructor
*/
TrackerMIL::~TrackerMIL()
{
}
void TrackerMIL::read( const cv::FileNode& fn )
{
params.read( fn );
}
void TrackerMIL::write( cv::FileStorage& fs ) const
{
params.write( fs );
}
void TrackerMIL::compute_integral( const Mat & img, Mat & ii_img )
{
Mat ii;
std::vector<Mat> ii_imgs;
integral( img, ii, CV_32F );
split( ii, ii_imgs );
ii_img = ii_imgs[0];
}
bool TrackerMIL::initImpl( const Mat& image, const Rect& boundingBox )
{
Mat intImage;
compute_integral( image, intImage );
TrackerSamplerCSC::Params CSCparameters;
CSCparameters.initInRad = params.samplerInitInRadius;
CSCparameters.searchWinSize = params.samplerSearchWinSize;
CSCparameters.initMaxNegNum = params.samplerInitMaxNegNum;
CSCparameters.trackInPosRad = params.samplerTrackInRadius;
CSCparameters.trackMaxPosNum = params.samplerTrackMaxPosNum;
CSCparameters.trackMaxNegNum = params.samplerTrackMaxNegNum;
Ptr<TrackerSamplerAlgorithm> CSCSampler = Ptr<TrackerSamplerCSC>( new TrackerSamplerCSC( CSCparameters ) );
if( !sampler->addTrackerSamplerAlgorithm( CSCSampler ) )
return false;
//or add CSC sampler with default parameters
//sampler->addTrackerSamplerAlgorithm( "CSC" );
//Positive sampling
CSCSampler.staticCast<TrackerSamplerCSC>()->setMode( TrackerSamplerCSC::MODE_INIT_POS );
sampler->sampling( intImage, boundingBox );
std::vector<Mat> posSamples = sampler->getSamples();
//Negative sampling
CSCSampler.staticCast<TrackerSamplerCSC>()->setMode( TrackerSamplerCSC::MODE_INIT_NEG );
sampler->sampling( intImage, boundingBox );
std::vector<Mat> negSamples = sampler->getSamples();
if( posSamples.empty() || negSamples.empty() )
return false;
//compute HAAR features
TrackerFeatureHAAR::Params HAARparameters;
HAARparameters.numFeatures = params.featureSetNumFeatures;
HAARparameters.rectSize = Size( boundingBox.width, boundingBox.height );
HAARparameters.isIntegral = true;
Ptr<TrackerFeature> trackerFeature = Ptr<TrackerFeatureHAAR>( new TrackerFeatureHAAR( HAARparameters ) );
featureSet->addTrackerFeature( trackerFeature );
featureSet->extraction( posSamples );
const std::vector<Mat> posResponse = featureSet->getResponses();
featureSet->extraction( negSamples );
const std::vector<Mat> negResponse = featureSet->getResponses();
model = Ptr<TrackerMILModel>( new TrackerMILModel( boundingBox ) );
Ptr<TrackerStateEstimatorMILBoosting> stateEstimator = Ptr<TrackerStateEstimatorMILBoosting>(
new TrackerStateEstimatorMILBoosting( params.featureSetNumFeatures ) );
model->setTrackerStateEstimator( stateEstimator );
//Run model estimation and update
model.staticCast<TrackerMILModel>()->setMode( TrackerMILModel::MODE_POSITIVE, posSamples );
model->modelEstimation( posResponse );
model.staticCast<TrackerMILModel>()->setMode( TrackerMILModel::MODE_NEGATIVE, negSamples );
model->modelEstimation( negResponse );
model->modelUpdate();
return true;
}
bool TrackerMIL::updateImpl( const Mat& image, Rect& boundingBox )
{
Mat intImage;
compute_integral( image, intImage );
//get the last location [AAM] X(k-1)
Ptr<TrackerTargetState> lastLocation = model->getLastTargetState();
Rect lastBoundingBox( lastLocation->getTargetPosition().x, lastLocation->getTargetPosition().y, lastLocation->getTargetWidth(),
lastLocation->getTargetHeight() );
//sampling new frame based on last location
( sampler->getSamplers().at( 0 ).second ).staticCast<TrackerSamplerCSC>()->setMode( TrackerSamplerCSC::MODE_DETECT );
sampler->sampling( intImage, lastBoundingBox );
std::vector<Mat> detectSamples = sampler->getSamples();
if( detectSamples.empty() )
return false;
/*//TODO debug samples
Mat f;
image.copyTo(f);
for( size_t i = 0; i < detectSamples.size(); i=i+10 )
{
Size sz;
Point off;
detectSamples.at(i).locateROI(sz, off);
rectangle(f, Rect(off.x,off.y,detectSamples.at(i).cols,detectSamples.at(i).rows), Scalar(255,0,0), 1);
}*/
//extract features from new samples
featureSet->extraction( detectSamples );
std::vector<Mat> response = featureSet->getResponses();
//predict new location
ConfidenceMap cmap;
model.staticCast<TrackerMILModel>()->setMode( TrackerMILModel::MODE_ESTIMATON, detectSamples );
model.staticCast<TrackerMILModel>()->responseToConfidenceMap( response, cmap );
model->getTrackerStateEstimator().staticCast<TrackerStateEstimatorMILBoosting>()->setCurrentConfidenceMap( cmap );
if( !model->runStateEstimator() )
{
return false;
}
Ptr<TrackerTargetState> currentState = model->getLastTargetState();
boundingBox = Rect( currentState->getTargetPosition().x, currentState->getTargetPosition().y, currentState->getTargetWidth(),
currentState->getTargetHeight() );
/*//TODO debug
rectangle(f, lastBoundingBox, Scalar(0,255,0), 1);
rectangle(f, boundingBox, Scalar(0,0,255), 1);
imshow("f", f);
//waitKey( 0 );*/
//sampling new frame based on new location
//Positive sampling
( sampler->getSamplers().at( 0 ).second ).staticCast<TrackerSamplerCSC>()->setMode( TrackerSamplerCSC::MODE_INIT_POS );
sampler->sampling( intImage, boundingBox );
std::vector<Mat> posSamples = sampler->getSamples();
//Negative sampling
( sampler->getSamplers().at( 0 ).second ).staticCast<TrackerSamplerCSC>()->setMode( TrackerSamplerCSC::MODE_INIT_NEG );
sampler->sampling( intImage, boundingBox );
std::vector<Mat> negSamples = sampler->getSamples();
if( posSamples.empty() || negSamples.empty() )
return false;
//extract features
featureSet->extraction( posSamples );
std::vector<Mat> posResponse = featureSet->getResponses();
featureSet->extraction( negSamples );
std::vector<Mat> negResponse = featureSet->getResponses();
//model estimate
model.staticCast<TrackerMILModel>()->setMode( TrackerMILModel::MODE_POSITIVE, posSamples );
model->modelEstimation( posResponse );
model.staticCast<TrackerMILModel>()->setMode( TrackerMILModel::MODE_NEGATIVE, negSamples );
model->modelEstimation( negResponse );
//model update
model->modelUpdate();
return true;
}
} /* namespace cv */

@ -0,0 +1,126 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "precomp.hpp"
#include "trackerMILModel.hpp"
/**
* TrackerMILModel
*/
namespace cv
{
TrackerMILModel::TrackerMILModel( const Rect& boundingBox )
{
currentSample.clear();
mode = MODE_POSITIVE;
width = boundingBox.width;
height = boundingBox.height;
Ptr<TrackerStateEstimatorMILBoosting::TrackerMILTargetState> initState = Ptr<TrackerStateEstimatorMILBoosting::TrackerMILTargetState>(
new TrackerStateEstimatorMILBoosting::TrackerMILTargetState( Point2f( boundingBox.x, boundingBox.y ), boundingBox.width, boundingBox.height,
true, Mat() ) );
trajectory.push_back( initState );
}
void TrackerMILModel::responseToConfidenceMap( const std::vector<Mat>& responses, ConfidenceMap& confidenceMap )
{
if( currentSample.empty() )
{
CV_Error( -1, "The samples in Model estimation are empty" );
return;
}
for ( size_t i = 0; i < responses.size(); i++ )
{
//for each column (one sample) there are #num_feature
//get informations from currentSample
for ( int j = 0; j < responses.at( i ).cols; j++ )
{
Size currentSize;
Point currentOfs;
currentSample.at( j ).locateROI( currentSize, currentOfs );
bool foreground = false;
if( mode == MODE_POSITIVE || mode == MODE_ESTIMATON )
{
foreground = true;
}
else if( mode == MODE_NEGATIVE )
{
foreground = false;
}
//get the column of the HAAR responses
Mat singleResponse = responses.at( i ).col( j );
//create the state
Ptr<TrackerStateEstimatorMILBoosting::TrackerMILTargetState> currentState = Ptr<TrackerStateEstimatorMILBoosting::TrackerMILTargetState>(
new TrackerStateEstimatorMILBoosting::TrackerMILTargetState( currentOfs, width, height, foreground, singleResponse ) );
confidenceMap.push_back( std::make_pair( currentState, 0 ) );
}
}
}
void TrackerMILModel::modelEstimationImpl( const std::vector<Mat>& responses )
{
responseToConfidenceMap( responses, currentConfidenceMap );
}
void TrackerMILModel::modelUpdateImpl()
{
}
void TrackerMILModel::setMode( int trainingMode, const std::vector<Mat>& samples )
{
currentSample.clear();
currentSample = samples;
mode = trainingMode;
}
}

@ -0,0 +1,103 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#ifndef __OPENCV_TRACKER_MIL_MODEL_HPP__
#define __OPENCV_TRACKER_MIL_MODEL_HPP__
#include "opencv2/core.hpp"
namespace cv
{
/**
* \brief Implementation of TrackerModel for MIL algorithm
*/
class TrackerMILModel : public TrackerModel
{
public:
enum
{
MODE_POSITIVE = 1, // mode for positive features
MODE_NEGATIVE = 2, // mode for negative features
MODE_ESTIMATON = 3 // mode for estimation step
};
/**
* \brief Constructor
* \param boundingBox The first boundingBox
*/
TrackerMILModel( const Rect& boundingBox );
/**
* \brief Destructor
*/
~TrackerMILModel()
{
}
;
/**
* \brief Set the mode
*/
void setMode( int trainingMode, const std::vector<Mat>& samples );
/**
* \brief Create the ConfidenceMap from a list of responses
* \param responses The list of the responses
* \param confidenceMap The output
*/
void responseToConfidenceMap( const std::vector<Mat>& responses, ConfidenceMap& confidenceMap );
protected:
void modelEstimationImpl( const std::vector<Mat>& responses );
void modelUpdateImpl();
private:
int mode;
std::vector<Mat> currentSample;
int width; //initial width of the boundingBox
int height; //initial height of the boundingBox
};
} /* namespace cv */
#endif

@ -0,0 +1,178 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "precomp.hpp"
namespace cv
{
/*
* TrackerModel
*/
TrackerModel::TrackerModel()
{
stateEstimator = Ptr<TrackerStateEstimator>();
maxCMLength = 1;
}
TrackerModel::~TrackerModel()
{
}
bool TrackerModel::setTrackerStateEstimator( Ptr<TrackerStateEstimator> trackerStateEstimator )
{
if( stateEstimator != 0 )
{
return false;
}
stateEstimator = trackerStateEstimator;
return true;
}
Ptr<TrackerStateEstimator> TrackerModel::getTrackerStateEstimator() const
{
return stateEstimator;
}
void TrackerModel::modelEstimation( const std::vector<Mat>& responses )
{
modelEstimationImpl( responses );
}
void TrackerModel::clearCurrentConfidenceMap()
{
currentConfidenceMap.clear();
}
void TrackerModel::modelUpdate()
{
modelUpdateImpl();
if( maxCMLength != -1 && (int) confidenceMaps.size() >= maxCMLength - 1 )
{
int l = maxCMLength / 2;
confidenceMaps.erase( confidenceMaps.begin(), confidenceMaps.begin() + l );
}
if( maxCMLength != -1 && (int) trajectory.size() >= maxCMLength - 1 )
{
int l = maxCMLength / 2;
trajectory.erase( trajectory.begin(), trajectory.begin() + l );
}
confidenceMaps.push_back( currentConfidenceMap );
stateEstimator->update( confidenceMaps );
clearCurrentConfidenceMap();
}
bool TrackerModel::runStateEstimator()
{
if( stateEstimator == 0 )
{
CV_Error( -1, "Tracker state estimator is not setted" );
return false;
}
Ptr<TrackerTargetState> targetState = stateEstimator->estimate( confidenceMaps );
if( targetState == 0 )
return false;
setLastTargetState( targetState );
return true;
}
void TrackerModel::setLastTargetState( const Ptr<TrackerTargetState>& lastTargetState )
{
trajectory.push_back( lastTargetState );
}
Ptr<TrackerTargetState> TrackerModel::getLastTargetState() const
{
return trajectory.back();
}
const std::vector<ConfidenceMap>& TrackerModel::getConfidenceMaps() const
{
return confidenceMaps;
}
const ConfidenceMap& TrackerModel::getLastConfidenceMap() const
{
return confidenceMaps.back();
}
/*
* TrackerTargetState
*/
Point2f TrackerTargetState::getTargetPosition() const
{
return targetPosition;
}
void TrackerTargetState::setTargetPosition( const Point2f& position )
{
targetPosition = position;
}
int TrackerTargetState::getTargetWidth() const
{
return targetWidth;
}
void TrackerTargetState::setTargetWidth( int width )
{
targetWidth = width;
}
int TrackerTargetState::getTargetHeight() const
{
return targetHeight;
}
void TrackerTargetState::setTargetHeight( int height )
{
targetHeight = height;
}
} /* namespace cv */

@ -0,0 +1,142 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "precomp.hpp"
namespace cv
{
/*
* TrackerSampler
*/
/*
* Constructor
*/
TrackerSampler::TrackerSampler()
{
blockAddTrackerSampler = false;
}
/*
* Destructor
*/
TrackerSampler::~TrackerSampler()
{
}
void TrackerSampler::sampling( const Mat& image, Rect boundingBox )
{
clearSamples();
for ( size_t i = 0; i < samplers.size(); i++ )
{
std::vector<Mat> current_samples;
samplers[i].second->sampling( image, boundingBox, current_samples );
//push in samples all current_samples
for ( size_t j = 0; j < current_samples.size(); j++ )
{
std::vector<Mat>::iterator it = samples.end();
samples.insert( it, current_samples.at( j ) );
}
}
if( !blockAddTrackerSampler )
{
blockAddTrackerSampler = true;
}
}
bool TrackerSampler::addTrackerSamplerAlgorithm( String trackerSamplerAlgorithmType )
{
if( blockAddTrackerSampler )
{
return false;
}
Ptr<TrackerSamplerAlgorithm> sampler = TrackerSamplerAlgorithm::create( trackerSamplerAlgorithmType );
if( sampler == 0 )
{
return false;
}
samplers.push_back( std::make_pair( trackerSamplerAlgorithmType, sampler ) );
return true;
}
bool TrackerSampler::addTrackerSamplerAlgorithm( Ptr<TrackerSamplerAlgorithm>& sampler )
{
if( blockAddTrackerSampler )
{
return false;
}
if( sampler == 0 )
{
return false;
}
String trackerSamplerAlgorithmType = sampler->getClassName();
samplers.push_back( std::make_pair( trackerSamplerAlgorithmType, sampler ) );
return true;
}
const std::vector<std::pair<String, Ptr<TrackerSamplerAlgorithm> > >& TrackerSampler::getSamplers() const
{
return samplers;
}
const std::vector<Mat>& TrackerSampler::getSamples() const
{
return samples;
}
void TrackerSampler::clearSamples()
{
samples.clear();
}
} /* namespace cv */

@ -0,0 +1,382 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "precomp.hpp"
#include <time.h>
#ifdef _WIN32
#define TIME( arg ) (((double) clock()) / CLOCKS_PER_SEC)
#else
#define TIME( arg ) (time( arg ))
#endif
namespace cv
{
/*
* TrackerSamplerAlgorithm
*/
TrackerSamplerAlgorithm::~TrackerSamplerAlgorithm()
{
}
bool TrackerSamplerAlgorithm::sampling( const Mat& image, Rect boundingBox, std::vector<Mat>& sample )
{
if( image.empty() )
return false;
return samplingImpl( image, boundingBox, sample );
}
Ptr<TrackerSamplerAlgorithm> TrackerSamplerAlgorithm::create( const String& trackerSamplerType )
{
if( trackerSamplerType.find( "CSC" ) == 0 )
{
return Ptr<TrackerSamplerCSC>( new TrackerSamplerCSC() );
}
if( trackerSamplerType.find( "CS" ) == 0 )
{
return Ptr<TrackerSamplerCS>( new TrackerSamplerCS() );
}
CV_Error( -1, "Tracker sampler algorithm type not supported" );
return Ptr<TrackerSamplerAlgorithm>();
}
String TrackerSamplerAlgorithm::getClassName() const
{
return className;
}
/**
* TrackerSamplerCSC
*/
/**
* Parameters
*/
TrackerSamplerCSC::Params::Params()
{
initInRad = 3;
initMaxNegNum = 65;
searchWinSize = 25;
trackInPosRad = 4;
trackMaxNegNum = 65;
trackMaxPosNum = 100000;
}
TrackerSamplerCSC::TrackerSamplerCSC( const TrackerSamplerCSC::Params &parameters ) :
params( parameters )
{
className = "CSC";
mode = MODE_INIT_POS;
rng = RNG( uint64( TIME( 0 ) ) );
}
TrackerSamplerCSC::~TrackerSamplerCSC()
{
}
bool TrackerSamplerCSC::samplingImpl( const Mat& image, Rect boundingBox, std::vector<Mat>& sample )
{
float inrad = 0;
float outrad = 0;
int maxnum = 0;
switch ( mode )
{
case MODE_INIT_POS:
inrad = params.initInRad;
sample = sampleImage( image, boundingBox.x, boundingBox.y, boundingBox.width, boundingBox.height, inrad );
break;
case MODE_INIT_NEG:
inrad = 2.0f * params.searchWinSize;
outrad = 1.5f * params.initInRad;
maxnum = params.initMaxNegNum;
sample = sampleImage( image, boundingBox.x, boundingBox.y, boundingBox.width, boundingBox.height, inrad, outrad, maxnum );
break;
case MODE_TRACK_POS:
inrad = params.trackInPosRad;
outrad = 0;
maxnum = params.trackMaxPosNum;
sample = sampleImage( image, boundingBox.x, boundingBox.y, boundingBox.width, boundingBox.height, inrad, outrad, maxnum );
break;
case MODE_TRACK_NEG:
inrad = 1.5f * params.searchWinSize;
outrad = params.trackInPosRad + 5;
maxnum = params.trackMaxNegNum;
sample = sampleImage( image, boundingBox.x, boundingBox.y, boundingBox.width, boundingBox.height, inrad, outrad, maxnum );
break;
case MODE_DETECT:
inrad = params.searchWinSize;
sample = sampleImage( image, boundingBox.x, boundingBox.y, boundingBox.width, boundingBox.height, inrad );
break;
default:
inrad = params.initInRad;
sample = sampleImage( image, boundingBox.x, boundingBox.y, boundingBox.width, boundingBox.height, inrad );
break;
}
return false;
}
void TrackerSamplerCSC::setMode( int samplingMode )
{
mode = samplingMode;
}
std::vector<Mat> TrackerSamplerCSC::sampleImage( const Mat& img, int x, int y, int w, int h, float inrad, float outrad, int maxnum )
{
int rowsz = img.rows - h - 1;
int colsz = img.cols - w - 1;
float inradsq = inrad * inrad;
float outradsq = outrad * outrad;
int dist;
uint minrow = max( 0, (int) y - (int) inrad );
uint maxrow = min( (int) rowsz - 1, (int) y + (int) inrad );
uint mincol = max( 0, (int) x - (int) inrad );
uint maxcol = min( (int) colsz - 1, (int) x + (int) inrad );
//fprintf(stderr,"inrad=%f minrow=%d maxrow=%d mincol=%d maxcol=%d\n",inrad,minrow,maxrow,mincol,maxcol);
std::vector<Mat> samples;
samples.resize( ( maxrow - minrow + 1 ) * ( maxcol - mincol + 1 ) );
int i = 0;
float prob = ( (float) ( maxnum ) ) / samples.size();
for ( int r = minrow; r <= int( maxrow ); r++ )
for ( int c = mincol; c <= int( maxcol ); c++ )
{
dist = ( y - r ) * ( y - r ) + ( x - c ) * ( x - c );
if( float( rng.uniform( 0.f, 1.f ) ) < prob && dist < inradsq && dist >= outradsq )
{
samples[i] = img( Rect( c, r, w, h ) );
i++;
}
}
samples.resize( min( i, maxnum ) );
return samples;
}
;
/**
* TrackerSamplerCS
*/
TrackerSamplerCS::Params::Params()
{
overlap = 0.99f;
searchFactor = 2;
}
TrackerSamplerCS::TrackerSamplerCS( const TrackerSamplerCS::Params &parameters ) :
params( parameters )
{
className = "CS";
mode = MODE_POSITIVE;
}
void TrackerSamplerCS::setMode( int samplingMode )
{
mode = samplingMode;
}
TrackerSamplerCS::~TrackerSamplerCS()
{
}
bool TrackerSamplerCS::samplingImpl( const Mat& image, Rect boundingBox, std::vector<Mat>& sample )
{
trackedPatch = boundingBox;
Size imageSize( image.cols, image.rows );
validROI = Rect( 0, 0, imageSize.width, imageSize.height );
Size trackedPatchSize( trackedPatch.width, trackedPatch.height );
Rect trackingROI = getTrackingROI( params.searchFactor );
sample = patchesRegularScan( image, trackingROI, trackedPatchSize );
return true;
}
Rect TrackerSamplerCS::getTrackingROI( float searchFactor )
{
Rect searchRegion;
searchRegion = RectMultiply( trackedPatch, searchFactor );
//check
if( searchRegion.y + searchRegion.height > validROI.height )
searchRegion.height = validROI.height - searchRegion.y;
if( searchRegion.x + searchRegion.width > validROI.width )
searchRegion.width = validROI.width - searchRegion.x;
return searchRegion;
}
Rect TrackerSamplerCS::RectMultiply( const Rect & rect, float f )
{
cv::Rect r_tmp;
r_tmp.y = (int) ( rect.y - ( (float) rect.height * f - rect.height ) / 2 );
if( r_tmp.y < 0 )
r_tmp.y = 0;
r_tmp.x = (int) ( rect.x - ( (float) rect.width * f - rect.width ) / 2 );
if( r_tmp.x < 0 )
r_tmp.x = 0;
r_tmp.height = (int) ( rect.height * f );
r_tmp.width = (int) ( rect.width * f );
return r_tmp;
}
Rect TrackerSamplerCS::getROI() const
{
return ROI;
}
void TrackerSamplerCS::setCheckedROI( Rect imageROI )
{
int dCol, dRow;
dCol = imageROI.x - validROI.x;
dRow = imageROI.y - validROI.y;
ROI.y = ( dRow < 0 ) ? validROI.y : imageROI.y;
ROI.x = ( dCol < 0 ) ? validROI.x : imageROI.x;
dCol = imageROI.x + imageROI.width - ( validROI.x + validROI.width );
dRow = imageROI.y + imageROI.height - ( validROI.y + validROI.height );
ROI.height = ( dRow > 0 ) ? validROI.height + validROI.y - ROI.y : imageROI.height + imageROI.y - ROI.y;
ROI.width = ( dCol > 0 ) ? validROI.width + validROI.x - ROI.x : imageROI.width + imageROI.x - ROI.x;
}
std::vector<Mat> TrackerSamplerCS::patchesRegularScan( const Mat& image, Rect trackingROI, Size patchSize )
{
std::vector<Mat> sample;
if( ( validROI == trackingROI ) )
ROI = trackingROI;
else
setCheckedROI( trackingROI );
if( mode == MODE_POSITIVE )
{
int num = 4;
sample.resize( num );
Mat singleSample = image( trackedPatch );
for ( int i = 0; i < num; i++ )
sample[i] = singleSample;
return sample;
}
int stepCol = (int) floor( ( 1.0f - params.overlap ) * (float) patchSize.width + 0.5f );
int stepRow = (int) floor( ( 1.0f - params.overlap ) * (float) patchSize.height + 0.5f );
if( stepCol <= 0 )
stepCol = 1;
if( stepRow <= 0 )
stepRow = 1;
Size m_patchGrid;
Rect m_rectUpperLeft;
Rect m_rectUpperRight;
Rect m_rectLowerLeft;
Rect m_rectLowerRight;
int num;
m_patchGrid.height = ( (int) ( (float) ( ROI.height - patchSize.height ) / stepRow ) + 1 );
m_patchGrid.width = ( (int) ( (float) ( ROI.width - patchSize.width ) / stepCol ) + 1 );
num = m_patchGrid.width * m_patchGrid.height;
sample.resize( num );
int curPatch = 0;
m_rectUpperLeft = m_rectUpperRight = m_rectLowerLeft = m_rectLowerRight = cv::Rect( 0, 0, patchSize.width, patchSize.height );
m_rectUpperLeft.y = ROI.y;
m_rectUpperLeft.x = ROI.x;
m_rectUpperRight.y = ROI.y;
m_rectUpperRight.x = ROI.x + ROI.width - patchSize.width;
m_rectLowerLeft.y = ROI.y + ROI.height - patchSize.height;
m_rectLowerLeft.x = ROI.x;
m_rectLowerRight.y = ROI.y + ROI.height - patchSize.height;
m_rectLowerRight.x = ROI.x + ROI.width - patchSize.width;
if( mode == MODE_NEGATIVE )
{
int numSamples = 4;
sample.resize( numSamples );
sample[0] = image( m_rectUpperLeft );
sample[1] = image( m_rectUpperRight );
sample[2] = image( m_rectLowerLeft );
sample[3] = image( m_rectLowerRight );
return sample;
}
int numPatchesX;
int numPatchesY;
numPatchesX = 0;
numPatchesY = 0;
for ( int curRow = 0; curRow < ROI.height - patchSize.height + 1; curRow += stepRow )
{
numPatchesY++;
for ( int curCol = 0; curCol < ROI.width - patchSize.width + 1; curCol += stepCol )
{
if( curRow == 0 )
numPatchesX++;
Mat singleSample = image( Rect( curCol + ROI.x, curRow + ROI.y, patchSize.width, patchSize.height ) );
sample[curPatch] = singleSample;
curPatch++;
}
}
CV_Assert( curPatch == num );
return sample;
}
} /* namespace cv */

@ -0,0 +1,445 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "precomp.hpp"
namespace cv
{
/*
* TrackerStateEstimator
*/
TrackerStateEstimator::~TrackerStateEstimator()
{
}
Ptr<TrackerTargetState> TrackerStateEstimator::estimate( const std::vector<ConfidenceMap>& confidenceMaps )
{
if( confidenceMaps.empty() )
return Ptr<TrackerTargetState>();
return estimateImpl( confidenceMaps );
}
void TrackerStateEstimator::update( std::vector<ConfidenceMap>& confidenceMaps )
{
if( confidenceMaps.empty() )
return;
return updateImpl( confidenceMaps );
}
Ptr<TrackerStateEstimator> TrackerStateEstimator::create( const String& trackeStateEstimatorType )
{
if( trackeStateEstimatorType.find( "SVM" ) == 0 )
{
return Ptr<TrackerStateEstimatorSVM>( new TrackerStateEstimatorSVM() );
}
if( trackeStateEstimatorType.find( "BOOSTING" ) == 0 )
{
return Ptr<TrackerStateEstimatorMILBoosting>( new TrackerStateEstimatorMILBoosting() );
}
CV_Error( -1, "Tracker state estimator type not supported" );
return Ptr<TrackerStateEstimator>();
}
String TrackerStateEstimator::getClassName() const
{
return className;
}
/**
* TrackerStateEstimatorMILBoosting::TrackerMILTargetState
*/
TrackerStateEstimatorMILBoosting::TrackerMILTargetState::TrackerMILTargetState( const Point2f& position, int width, int height, bool foreground,
const Mat& features )
{
setTargetPosition( position );
setTargetWidth( width );
setTargetHeight( height );
setTargetFg( foreground );
setFeatures( features );
}
void TrackerStateEstimatorMILBoosting::TrackerMILTargetState::setTargetFg( bool foreground )
{
isTarget = foreground;
}
void TrackerStateEstimatorMILBoosting::TrackerMILTargetState::setFeatures( const Mat& features )
{
targetFeatures = features;
}
bool TrackerStateEstimatorMILBoosting::TrackerMILTargetState::isTargetFg() const
{
return isTarget;
}
Mat TrackerStateEstimatorMILBoosting::TrackerMILTargetState::getFeatures() const
{
return targetFeatures;
}
TrackerStateEstimatorMILBoosting::TrackerStateEstimatorMILBoosting( int nFeatures )
{
className = "BOOSTING";
trained = false;
numFeatures = nFeatures;
}
TrackerStateEstimatorMILBoosting::~TrackerStateEstimatorMILBoosting()
{
}
void TrackerStateEstimatorMILBoosting::setCurrentConfidenceMap( ConfidenceMap& confidenceMap )
{
currentConfidenceMap.clear();
currentConfidenceMap = confidenceMap;
}
uint TrackerStateEstimatorMILBoosting::max_idx( const std::vector<float> &v )
{
const float* findPtr = & ( *std::max_element( v.begin(), v.end() ) );
const float* beginPtr = & ( *v.begin() );
return (uint) ( findPtr - beginPtr );
}
Ptr<TrackerTargetState> TrackerStateEstimatorMILBoosting::estimateImpl( const std::vector<ConfidenceMap>& /*confidenceMaps*/)
{
//run ClfMilBoost classify in order to compute next location
if( currentConfidenceMap.empty() )
return Ptr<TrackerTargetState>();
Mat positiveStates;
Mat negativeStates;
prepareData( currentConfidenceMap, positiveStates, negativeStates );
std::vector<float> prob = boostMILModel.classify( positiveStates );
int bestind = max_idx( prob );
//float resp = prob[bestind];
return currentConfidenceMap.at( bestind ).first;
}
void TrackerStateEstimatorMILBoosting::prepareData( const ConfidenceMap& confidenceMap, Mat& positive, Mat& negative )
{
int posCounter = 0;
int negCounter = 0;
for ( size_t i = 0; i < confidenceMap.size(); i++ )
{
Ptr<TrackerMILTargetState> currentTargetState = confidenceMap.at( i ).first.staticCast<TrackerMILTargetState>();
if( currentTargetState->isTargetFg() )
posCounter++;
else
negCounter++;
}
positive.create( posCounter, numFeatures, CV_32FC1 );
negative.create( negCounter, numFeatures, CV_32FC1 );
//TODO change with mat fast access
//initialize trainData (positive and negative)
int pc = 0;
int nc = 0;
for ( size_t i = 0; i < confidenceMap.size(); i++ )
{
Ptr<TrackerMILTargetState> currentTargetState = confidenceMap.at( i ).first.staticCast<TrackerMILTargetState>();
Mat stateFeatures = currentTargetState->getFeatures();
if( currentTargetState->isTargetFg() )
{
for ( int j = 0; j < stateFeatures.rows; j++ )
{
//fill the positive trainData with the value of the feature j for sample i
positive.at<float>( pc, j ) = stateFeatures.at<float>( j, 0 );
}
pc++;
}
else
{
for ( int j = 0; j < stateFeatures.rows; j++ )
{
//fill the negative trainData with the value of the feature j for sample i
negative.at<float>( nc, j ) = stateFeatures.at<float>( j, 0 );
}
nc++;
}
}
}
void TrackerStateEstimatorMILBoosting::updateImpl( std::vector<ConfidenceMap>& confidenceMaps )
{
if( !trained )
{
//this is the first time that the classifier is built
//init MIL
boostMILModel.init();
trained = true;
}
ConfidenceMap lastConfidenceMap = confidenceMaps.back();
Mat positiveStates;
Mat negativeStates;
prepareData( lastConfidenceMap, positiveStates, negativeStates );
//update MIL
boostMILModel.update( positiveStates, negativeStates );
}
/**
* TrackerStateEstimatorAdaBoosting
*/
TrackerStateEstimatorAdaBoosting::TrackerStateEstimatorAdaBoosting( int numClassifer, int initIterations, int nFeatures, Size patchSize, const Rect& ROI )
{
className = "ADABOOSTING";
numBaseClassifier = numClassifer;
numFeatures = nFeatures;
iterationInit = initIterations;
initPatchSize = patchSize;
trained = false;
sampleROI = ROI;
}
Rect TrackerStateEstimatorAdaBoosting::getSampleROI() const
{
return sampleROI;
}
void TrackerStateEstimatorAdaBoosting::setSampleROI( const Rect& ROI )
{
sampleROI = ROI;
}
/**
* TrackerAdaBoostingTargetState::TrackerAdaBoostingTargetState
*/
TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::TrackerAdaBoostingTargetState( const Point2f& position, int width, int height,
bool foreground, const Mat& responses )
{
setTargetPosition( position );
setTargetWidth( width );
setTargetHeight( height );
setTargetFg( foreground );
setTargetResponses( responses );
}
void TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::setTargetFg( bool foreground )
{
isTarget = foreground;
}
bool TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::isTargetFg() const
{
return isTarget;
}
void TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::setTargetResponses( const Mat& responses )
{
targetResponses = responses;
}
Mat TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::getTargetResponses() const
{
return targetResponses;
}
TrackerStateEstimatorAdaBoosting::~TrackerStateEstimatorAdaBoosting()
{
}
void TrackerStateEstimatorAdaBoosting::setCurrentConfidenceMap( ConfidenceMap& confidenceMap )
{
currentConfidenceMap.clear();
currentConfidenceMap = confidenceMap;
}
std::vector<int> TrackerStateEstimatorAdaBoosting::computeReplacedClassifier()
{
return replacedClassifier;
}
std::vector<int> TrackerStateEstimatorAdaBoosting::computeSwappedClassifier()
{
return swappedClassifier;
}
std::vector<int> TrackerStateEstimatorAdaBoosting::computeSelectedWeakClassifier()
{
return boostClassifier->getSelectedWeakClassifier();
}
Ptr<TrackerTargetState> TrackerStateEstimatorAdaBoosting::estimateImpl( const std::vector<ConfidenceMap>& /*confidenceMaps*/ )
{
//run classify in order to compute next location
if( currentConfidenceMap.empty() )
return Ptr<TrackerTargetState>();
std::vector<Mat> images;
for ( size_t i = 0; i < currentConfidenceMap.size(); i++ )
{
Ptr<TrackerAdaBoostingTargetState> currentTargetState = currentConfidenceMap.at( i ).first.staticCast<TrackerAdaBoostingTargetState>();
images.push_back( currentTargetState->getTargetResponses() );
}
int bestIndex;
boostClassifier->classifySmooth( images, sampleROI, bestIndex );
// get bestIndex from classifySmooth
return currentConfidenceMap.at( bestIndex ).first;
}
void TrackerStateEstimatorAdaBoosting::updateImpl( std::vector<ConfidenceMap>& confidenceMaps )
{
if( !trained )
{
//this is the first time that the classifier is built
int numWeakClassifier = numBaseClassifier * 10;
bool useFeatureExchange = true;
boostClassifier = Ptr<StrongClassifierDirectSelection>(
new StrongClassifierDirectSelection( numBaseClassifier, numWeakClassifier, initPatchSize, sampleROI, useFeatureExchange, iterationInit ) );
//init base classifiers
boostClassifier->initBaseClassifier();
trained = true;
}
ConfidenceMap lastConfidenceMap = confidenceMaps.back();
bool featureEx = boostClassifier->getUseFeatureExchange();
replacedClassifier.clear();
replacedClassifier.resize( lastConfidenceMap.size(), -1 );
swappedClassifier.clear();
swappedClassifier.resize( lastConfidenceMap.size(), -1 );
for ( size_t i = 0; i < lastConfidenceMap.size() / 2; i++ )
{
Ptr<TrackerAdaBoostingTargetState> currentTargetState = lastConfidenceMap.at( i ).first.staticCast<TrackerAdaBoostingTargetState>();
int currentFg = 1;
if( !currentTargetState->isTargetFg() )
currentFg = -1;
Mat res = currentTargetState->getTargetResponses();
boostClassifier->update( res, currentFg );
if( featureEx )
{
replacedClassifier[i] = boostClassifier->getReplacedClassifier();
swappedClassifier[i] = boostClassifier->getSwappedClassifier();
if( replacedClassifier[i] >= 0 && swappedClassifier[i] >= 0 )
boostClassifier->replaceWeakClassifier( replacedClassifier[i] );
}
else
{
replacedClassifier[i] = -1;
swappedClassifier[i] = -1;
}
int mapPosition = i + lastConfidenceMap.size() / 2;
Ptr<TrackerAdaBoostingTargetState> currentTargetState2 = lastConfidenceMap.at( mapPosition ).first.staticCast<TrackerAdaBoostingTargetState>();
currentFg = 1;
if( !currentTargetState2->isTargetFg() )
currentFg = -1;
const Mat res2 = currentTargetState2->getTargetResponses();
boostClassifier->update( res2, currentFg );
if( featureEx )
{
replacedClassifier[mapPosition] = boostClassifier->getReplacedClassifier();
swappedClassifier[mapPosition] = boostClassifier->getSwappedClassifier();
if( replacedClassifier[mapPosition] >= 0 && swappedClassifier[mapPosition] >= 0 )
boostClassifier->replaceWeakClassifier( replacedClassifier[mapPosition] );
}
else
{
replacedClassifier[mapPosition] = -1;
swappedClassifier[mapPosition] = -1;
}
}
}
/**
* TrackerStateEstimatorSVM
*/
TrackerStateEstimatorSVM::TrackerStateEstimatorSVM()
{
className = "SVM";
}
TrackerStateEstimatorSVM::~TrackerStateEstimatorSVM()
{
}
Ptr<TrackerTargetState> TrackerStateEstimatorSVM::estimateImpl( const std::vector<ConfidenceMap>& confidenceMaps )
{
return confidenceMaps.back().back().first;
}
void TrackerStateEstimatorSVM::updateImpl( std::vector<ConfidenceMap>& /*confidenceMaps*/)
{
}
} /* namespace cv */

@ -0,0 +1,61 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "precomp.hpp"
#include "opencv2/tracking.hpp"
namespace cv
{
CV_INIT_ALGORITHM(TrackerMIL, "TRACKER.MIL",);
CV_INIT_ALGORITHM(TrackerBoosting, "TRACKER.BOOSTING",);
bool initModule_tracking(void)
{
bool all = true;
all &= !TrackerMIL_info_auto.name().empty();
all &= !TrackerBoosting_info_auto.name().empty();
return all;
}
}

@ -0,0 +1,3 @@
#include "test_precomp.hpp"
CV_TEST_MAIN("cv")

@ -0,0 +1,18 @@
#ifdef __GNUC__
# pragma GCC diagnostic ignored "-Wmissing-declarations"
# if defined __clang__ || defined __APPLE__
# pragma GCC diagnostic ignored "-Wmissing-prototypes"
# pragma GCC diagnostic ignored "-Wextra"
# endif
#endif
#ifndef __OPENCV_TEST_PRECOMP_HPP__
#define __OPENCV_TEST_PRECOMP_HPP__
#include <iostream>
#include "opencv2/ts.hpp"
#include "opencv2/imgproc.hpp"
#include "opencv2/tracking.hpp"
#include "opencv2/highgui.hpp"
#endif

@ -0,0 +1,130 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2013, OpenCV Foundation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "test_precomp.hpp"
#include "opencv2/tracking.hpp"
using namespace cv;
using namespace std;
class CV_TrackerBaseTest : public cvtest::BaseTest
{
public:
CV_TrackerBaseTest();
virtual ~CV_TrackerBaseTest();
};
CV_TrackerBaseTest::CV_TrackerBaseTest()
{
}
CV_TrackerBaseTest::~CV_TrackerBaseTest()
{
}
/************************************ TrackerMIL ************************************/
class CV_TrackerMILTest : public CV_TrackerBaseTest
{
public:
CV_TrackerMILTest();
~CV_TrackerMILTest();
protected:
void run( int );
};
CV_TrackerMILTest::CV_TrackerMILTest()
{
}
CV_TrackerMILTest::~CV_TrackerMILTest()
{
}
void CV_TrackerMILTest::run( int )
{
ts->set_failed_test_info( cvtest::TS::FAIL_GENERIC );
ts->printf( cvtest::TS::LOG, "CV_TrackerMILTest to be implemented" );
}
TEST(DISABLED_Tracking_TrackerMIL, accuracy)
{
CV_TrackerMILTest test;
test.safe_run();
}
/************************************ TrackerBoosting ************************************/
class CV_TrackerBoostingTest : public CV_TrackerBaseTest
{
public:
CV_TrackerBoostingTest();
~CV_TrackerBoostingTest();
protected:
void run( int );
};
CV_TrackerBoostingTest::CV_TrackerBoostingTest()
{
}
CV_TrackerBoostingTest::~CV_TrackerBoostingTest()
{
}
void CV_TrackerBoostingTest::run( int )
{
ts->set_failed_test_info( cvtest::TS::FAIL_GENERIC );
ts->printf( cvtest::TS::LOG, "CV_TrackerBoostingTest to be implemented" );
}
TEST(DISABLED_Tracking_TrackerBoosting, accuracy)
{
CV_TrackerBoostingTest test;
test.safe_run();
}
/* End of file. */
Loading…
Cancel
Save