diff --git a/modules/tracking/CMakeLists.txt b/modules/tracking/CMakeLists.txt new file mode 100644 index 000000000..39983045b --- /dev/null +++ b/modules/tracking/CMakeLists.txt @@ -0,0 +1,2 @@ +set(the_description "Tracking API") +ocv_define_module(tracking opencv_imgproc) diff --git a/modules/tracking/doc/common_interfaces_tracker.rst b/modules/tracking/doc/common_interfaces_tracker.rst new file mode 100644 index 000000000..049366098 --- /dev/null +++ b/modules/tracking/doc/common_interfaces_tracker.rst @@ -0,0 +1,264 @@ +Common Interfaces of Tracker +============================ + +.. highlight:: cpp + + +Tracker : Algorithm +------------------- + +.. ocv:class:: Tracker + +Base abstract class for the long-term tracker:: + + class CV_EXPORTS_W Tracker : public virtual Algorithm + { + virtual ~Tracker(); + + bool init( const Mat& image, const Rect& boundingBox ); + + bool update( const Mat& image, Rect& boundingBox ); + + static Ptr create( const String& trackerType ); + + }; + +Tracker::init +------------- + +Initialize the tracker with a know bounding box that surrounding the target + +.. ocv:function:: bool Tracker::init( const Mat& image, const Rect& boundingBox ) + + :param image: The initial frame + + :param boundingBox: The initial boundig box + + +Tracker::update +--------------- + +Update the tracker, find the new most likely bounding box for the target + +.. ocv:function:: bool Tracker::update( const Mat& image, Rect& boundingBox ) + + :param image: The current frame + + :param boundingBox: The boundig box that represent the new target location + + +Tracker::create +--------------- + +Creates a tracker by its name. + +.. ocv:function:: static Ptr Tracker::create( const String& trackerType ) + + :param trackerType: Tracker type + +The following detector types are supported: + +* ``"MIL"`` -- :ocv:class:`TrackerMIL` + +* ``"BOOSTING"`` -- :ocv:class:`TrackerBoosting` + +Creating Own Tracker +-------------------- + +If you want create a new tracker, you should follow some simple rules. + +First, your tracker should be inherit from :ocv:class:`Tracker`, so you must implement two method: + +* Tracker: initImpl, it should be called once in the first frame, here you should initialize all structures. The second argument is the initial bounding box of the target. + +* Tracker:updateImpl, it should be called at the begin of in loop through video frames. Here you should overwrite the bounding box with new location. + +Example of creating specialized Tracker ``TrackerMIL`` : :: + + class CV_EXPORTS_W TrackerMIL : public Tracker + { + public: + TrackerMIL( const TrackerMIL::Params ¶meters = TrackerMIL::Params() ); + virtual ~TrackerMIL(); + ... + + protected: + bool initImpl( const Mat& image, const Rect& boundingBox ); + bool updateImpl( const Mat& image, Rect& boundingBox ); + ... + }; + + +Every tracker has three component :ocv:class:`TrackerSampler`, :ocv:class:`TrackerFeatureSet` and :ocv:class:`TrackerModel`. +The first two are instantiated from Tracker base class, instead the last component is abstract, so you must implement your TrackerModel. + +Finally add your tracker in the file tracking_init.cpp + +TrackerSampler +.............. + +TrackerSampler is already instantiated, but you should define the sampling algorithm and add the classes (or single class) to TrackerSampler. +You can choose one of the ready implementation as TrackerSamplerCSC or you can implement your sampling method, in this case +the class must inherit :ocv:class:`TrackerSamplerAlgorithm`. Fill the samplingImpl method that writes the result in "sample" output argument. + +Example of creating specialized TrackerSamplerAlgorithm ``TrackerSamplerCSC`` : :: + + class CV_EXPORTS_W TrackerSamplerCSC : public TrackerSamplerAlgorithm + { + public: + TrackerSamplerCSC( const TrackerSamplerCSC::Params ¶meters = TrackerSamplerCSC::Params() ); + ~TrackerSamplerCSC(); + ... + + protected: + bool samplingImpl( const Mat& image, Rect boundingBox, std::vector& sample ); + ... + + }; + +Example of adding TrackerSamplerAlgorithm to TrackerSampler : :: + + //sampler is the TrackerSampler + Ptr CSCSampler = new TrackerSamplerCSC( CSCparameters ); + if( !sampler->addTrackerSamplerAlgorithm( CSCSampler ) ) + return false; + + //or add CSC sampler with default parameters + //sampler->addTrackerSamplerAlgorithm( "CSC" ); + +.. seealso:: + + :ocv:class:`TrackerSamplerCSC`, :ocv:class:`TrackerSamplerAlgorithm` + + +TrackerFeatureSet +................. + +TrackerFeatureSet is already instantiated (as first) , but you should define what kinds of features you'll use in your tracker. +You can use multiple feature types, so you can add a ready implementation as :ocv:class:`TrackerFeatureHAAR` in your TrackerFeatureSet or develop your own implementation. +In this case, in the computeImpl method put the code that extract the features and +in the selection method optionally put the code for the refinement and selection of the features. + +Example of creating specialized TrackerFeature ``TrackerFeatureHAAR`` : :: + + class CV_EXPORTS_W TrackerFeatureHAAR : public TrackerFeature + { + public: + TrackerFeatureHAAR( const TrackerFeatureHAAR::Params ¶meters = TrackerFeatureHAAR::Params() ); + ~TrackerFeatureHAAR(); + void selection( Mat& response, int npoints ); + ... + + protected: + bool computeImpl( const std::vector& images, Mat& response ); + ... + + }; + +Example of adding TrackerFeature to TrackerFeatureSet : :: + + //featureSet is the TrackerFeatureSet + Ptr trackerFeature = new TrackerFeatureHAAR( HAARparameters ); + featureSet->addTrackerFeature( trackerFeature ); + +.. seealso:: + + :ocv:class:`TrackerFeatureHAAR`, :ocv:class:`TrackerFeatureSet` + +TrackerModel +............ + +TrackerModel is abstract, so in your implementation you must develop your TrackerModel that inherit from :ocv:class:`TrackerModel`. +Fill the method for the estimation of the state "modelEstimationImpl", that estimates the most likely target location, +see [AAM]_ table I (ME) for further information. Fill "modelUpdateImpl" in order to update the model, see [AAM]_ table I (MU). +In this class you can use the :c:type:`ConfidenceMap` and :c:type:`Trajectory` to storing the model. The first represents the model on the all +possible candidate states and the second represents the list of all estimated states. + +Example of creating specialized TrackerModel ``TrackerMILModel`` : :: + + class TrackerMILModel : public TrackerModel + { + public: + TrackerMILModel( const Rect& boundingBox ); + ~TrackerMILModel(); + ... + + protected: + void modelEstimationImpl( const std::vector& responses ); + void modelUpdateImpl(); + ... + + }; + +And add it in your Tracker : :: + + bool TrackerMIL::initImpl( const Mat& image, const Rect& boundingBox ) + { + ... + //model is the general TrackerModel field od the general Tracker + model = new TrackerMILModel( boundingBox ); + ... + } + + +In the last step you should define the TrackerStateEstimator based on your implementation or you can use one of ready class as :ocv:class:`TrackerStateEstimatorMILBoosting`. +It represent the statistical part of the model that estimates the most likely target state. + +Example of creating specialized TrackerStateEstimator ``TrackerStateEstimatorMILBoosting`` : :: + + class CV_EXPORTS_W TrackerStateEstimatorMILBoosting : public TrackerStateEstimator + { + class TrackerMILTargetState : public TrackerTargetState + { + ... + }; + + public: + TrackerStateEstimatorMILBoosting( int nFeatures = 250 ); + ~TrackerStateEstimatorMILBoosting(); + ... + + protected: + Ptr estimateImpl( const std::vector& confidenceMaps ); + void updateImpl( std::vector& confidenceMaps ); + ... + + }; + +And add it in your TrackerModel : :: + + //model is the TrackerModel of your Tracker + Ptr stateEstimator = new TrackerStateEstimatorMILBoosting( params.featureSetNumFeatures ); + model->setTrackerStateEstimator( stateEstimator ); + +.. seealso:: + + :ocv:class:`TrackerModel`, :ocv:class:`TrackerStateEstimatorMILBoosting`, :ocv:class:`TrackerTargetState` + + +During this step, you should define your TrackerTargetState based on your implementation. :ocv:class:`TrackerTargetState` base class has only the bounding box (upper-left position, width and height), you can +enrich it adding scale factor, target rotation, etc. + +Example of creating specialized TrackerTargetState ``TrackerMILTargetState`` : :: + + class TrackerMILTargetState : public TrackerTargetState + { + public: + TrackerMILTargetState( const Point2f& position, int targetWidth, int targetHeight, bool foreground, const Mat& features ); + ~TrackerMILTargetState(); + ... + + private: + bool isTarget; + Mat targetFeatures; + ... + + }; + + +Try it +...... + +To try your tracker you can use the demo at https://github.com/lenlen/opencv/blob/tracking_api/samples/cpp/tracker.cpp. + +The first argument is the name of the tracker and the second is a video source. diff --git a/modules/tracking/doc/common_interfaces_tracker_feature_set.rst b/modules/tracking/doc/common_interfaces_tracker_feature_set.rst new file mode 100644 index 000000000..2dbfea564 --- /dev/null +++ b/modules/tracking/doc/common_interfaces_tracker_feature_set.rst @@ -0,0 +1,343 @@ +Common Interfaces of TrackerFeatureSet +====================================== + +.. highlight:: cpp + + +TrackerFeatureSet +----------------- + +Class that manages the extraction and selection of features + +[AAM]_ Feature Extraction and Feature Set Refinement (Feature Processing and Feature Selection). See table I and section III C +[AMVOT]_ Appearance modelling -> Visual representation (Table II, section 3.1 - 3.2) + +.. ocv:class:: TrackerFeatureSet + +TrackerFeatureSet class:: + + class CV_EXPORTS_W TrackerFeatureSet + { + public: + + TrackerFeatureSet(); + ~TrackerFeatureSet(); + + void extraction( const std::vector& images ); + void selection(); + void removeOutliers(); + + bool addTrackerFeature( String trackerFeatureType ); + bool addTrackerFeature( Ptr& feature ); + + const std::vector > >& getTrackerFeature() const; + const std::vector& getResponses() const; + + }; + + +TrackerFeatureSet is an aggregation of :ocv:class:`TrackerFeature` + +.. seealso:: + + :ocv:class:`TrackerFeature` + +TrackerFeatureSet::extraction +----------------------------- + +Extract features from the images collection + +.. ocv:function:: void TrackerFeatureSet::extraction( const std::vector& images ) + + :param images: The input images + +TrackerFeatureSet::selection +---------------------------- + +Identify most effective features for all feature types (optional) + +.. ocv:function:: void TrackerFeatureSet::selection() + +TrackerFeatureSet::removeOutliers +--------------------------------- + +Remove outliers for all feature types (optional) + +.. ocv:function:: void TrackerFeatureSet::removeOutliers() + +TrackerFeatureSet::addTrackerFeature +------------------------------------ + +Add TrackerFeature in the collection. Return true if TrackerFeature is added, false otherwise + +.. ocv:function:: bool TrackerFeatureSet::addTrackerFeature( String trackerFeatureType ) + + :param trackerFeatureType: The TrackerFeature name + +.. ocv:function:: bool TrackerFeatureSet::addTrackerFeature( Ptr& feature ) + + :param feature: The TrackerFeature class + + +The modes available now: + +* ``"HAAR"`` -- Haar Feature-based + +The modes available soon: + +* ``"HOG"`` -- Histogram of Oriented Gradients features + +* ``"LBP"`` -- Local Binary Pattern features + +* ``"FEATURE2D"`` -- All types of Feature2D + +Example ``TrackerFeatureSet::addTrackerFeature`` : :: + + //sample usage: + + Ptr trackerFeature = new TrackerFeatureHAAR( HAARparameters ); + featureSet->addTrackerFeature( trackerFeature ); + + //or add CSC sampler with default parameters + //featureSet->addTrackerFeature( "HAAR" ); + + +.. note:: If you use the second method, you must initialize the TrackerFeature + +TrackerFeatureSet::getTrackerFeature +------------------------------------ + +Get the TrackerFeature collection (TrackerFeature name, TrackerFeature pointer) + +.. ocv:function:: const std::vector > >& TrackerFeatureSet::getTrackerFeature() const + +TrackerFeatureSet::getResponses +------------------------------- + +Get the responses + +.. ocv:function:: const std::vector& TrackerFeatureSet::getResponses() const + +.. note:: Be sure to call extraction before getResponses + +Example ``TrackerFeatureSet::getResponses`` : :: + + //get the patches from sampler + std::vector detectSamples = sampler->getSamples(); + + if( detectSamples.empty() ) + return false; + + //features extraction + featureSet->extraction( detectSamples ); + + //get responses + std::vector response = featureSet->getResponses(); + +TrackerFeature +-------------- + +Abstract base class for TrackerFeature that represents the feature. + +.. ocv:class:: TrackerFeature + +TrackerFeature class:: + + class CV_EXPORTS_W TrackerFeature + { + public: + virtual ~TrackerFeature(); + + static Ptr create( const String& trackerFeatureType ); + + void compute( const std::vector& images, Mat& response ); + + virtual void selection( Mat& response, int npoints ) = 0; + + String getClassName() const; + }; + +TrackerFeature::create +---------------------- + +Create TrackerFeature by tracker feature type + +.. ocv:function:: static Ptr TrackerFeature::create( const String& trackerFeatureType ) + + :param trackerFeatureType: The TrackerFeature name + +The modes available now: + +* ``"HAAR"`` -- Haar Feature-based + +The modes available soon: + +* ``"HOG"`` -- Histogram of Oriented Gradients features + +* ``"LBP"`` -- Local Binary Pattern features + +* ``"FEATURE2D"`` -- All types of Feature2D + +TrackerFeature::compute +----------------------- + +Compute the features in the images collection + +.. ocv:function:: void TrackerFeature::compute( const std::vector& images, Mat& response ) + + :param images: The images + + :param response: The output response + +TrackerFeature::selection +------------------------- + +Identify most effective features + +.. ocv:function:: void TrackerFeature::selection( Mat& response, int npoints ) + + :param response: Collection of response for the specific TrackerFeature + + :param npoints: Max number of features + +.. note:: This method modifies the response parameter + +TrackerFeature::getClassName +---------------------------- + +Get the name of the specific TrackerFeature + +.. ocv:function:: String TrackerFeature::getClassName() const + +Specialized TrackerFeature +========================== + +In [AAM]_ table I and section III C are described the most known features type. At moment only :ocv:class:`TrackerFeatureHAAR` is implemented. + +TrackerFeatureHAAR : TrackerFeature +----------------------------------- + +TrackerFeature based on HAAR features, used by TrackerMIL and many others algorithms + +.. ocv:class:: TrackerFeatureHAAR + +TrackerFeatureHAAR class:: + + class CV_EXPORTS_W TrackerFeatureHAAR : TrackerFeature + { + public: + + TrackerFeatureHAAR( const TrackerFeatureHAAR::Params ¶meters = TrackerFeatureHAAR::Params() ); + ~TrackerFeatureHAAR(); + + void selection( Mat& response, int npoints ); + bool extractSelected( const std::vector selFeatures, const std::vector& images, Mat& response ); + std::vector >& getMeanSigmaPairs(); + bool swapFeature( int source, int target ); + bool swapFeature( int id, CvHaarEvaluator::FeatureHaar& feature ); + CvHaarEvaluator::FeatureHaar& getFeatureAt( int id ); + }; + +.. note:: HAAR features implementation is copied from apps/traincascade and modified according to MIL implementation + +TrackerFeatureHAAR::Params +-------------------------- + +.. ocv:struct:: TrackerFeatureHAAR::Params + +List of TrackerFeatureHAAR parameters:: + + struct CV_EXPORTS Params + { + Params(); + int numFeatures; // # of rects + Size rectSize; // rect size + bool isIntegral; // true if input images are integral, false otherwise + }; + +TrackerFeatureHAAR::TrackerFeatureHAAR +-------------------------------------- + +Constructor + +.. ocv:function:: TrackerFeatureHAAR::TrackerFeatureHAAR( const TrackerFeatureHAAR::Params ¶meters = TrackerFeatureHAAR::Params() ) + + :param parameters: TrackerFeatureHAAR parameters :ocv:struct:`TrackerFeatureHAAR::Params` + + +TrackerFeatureHAAR::selection +----------------------------- + +Identify most effective features + +.. ocv:function:: void TrackerFeatureHAAR::selection( Mat& response, int npoints ) + + :param response: Collection of response for the specific TrackerFeature + + :param npoints: Max number of features + +.. note:: This method modifies the response parameter + +TrackerFeatureHAAR::extractSelected +----------------------------------- + +Compute the features only for the selected indices in the images collection + +.. ocv:function:: bool TrackerFeatureHAAR::extractSelected( const std::vector selFeatures, const std::vector& images, Mat& response ) + + :param selFeatures: indices of selected features + + :param images: The images + + :param response: Collection of response for the specific TrackerFeature + +TrackerFeatureHAAR::getMeanSigmaPairs +------------------------------------- + +Get the list of mean/sigma. Return the list of mean/sigma + +.. ocv:function:: std::vector >& TrackerFeatureHAAR::getMeanSigmaPairs() + +TrackerFeatureHAAR::swapFeature +------------------------------- + +Swap the feature in position source with the feature in position target + +.. ocv:function:: bool TrackerFeatureHAAR::swapFeature( int source, int target ) + + :param source: The source position + + :param target: The target position + +Swap the feature in position id with the feature input + +.. ocv:function:: bool TrackerFeatureHAAR::swapFeature( int id, CvHaarEvaluator::FeatureHaar& feature ) + + :param id: The position + + :param feature: The feature + +TrackerFeatureHAAR::getFeatureAt +-------------------------------- + +Get the feature in position id + +.. ocv:function:: CvHaarEvaluator::FeatureHaar& TrackerFeatureHAAR::getFeatureAt( int id ) + + :param id: The position + + +TrackerFeatureHOG +----------------- + +TODO To be implemented + +TrackerFeatureLBP +----------------- + +TODO To be implemented + +TrackerFeatureFeature2d +----------------------- + +TODO To be implemented diff --git a/modules/tracking/doc/common_interfaces_tracker_model.rst b/modules/tracking/doc/common_interfaces_tracker_model.rst new file mode 100644 index 000000000..ef0696f0d --- /dev/null +++ b/modules/tracking/doc/common_interfaces_tracker_model.rst @@ -0,0 +1,506 @@ +Common Interfaces of TrackerModel +================================= + +.. highlight:: cpp + +ConfidenceMap +------------- + +Represents the model of the target at frame :math:`k` (all states and scores) + +[AAM]_ The set of the pair :math:`\langle \hat{x}^{i}_{k}, C^{i}_{k} \rangle` + +.. c:type:: ConfidenceMap + +ConfidenceMap:: + + typedef std::vector, float> > ConfidenceMap; + +.. seealso:: + + :ocv:class:`TrackerTargetState` + +Trajectory +---------- + +Represents the estimate states for all frames + +[AAM]_ :math:`x_{k}` is the trajectory of the target up to time :math:`k` + +.. c:type:: Trajectory + +Trajectory:: + + typedef std::vector > Trajectory; + +.. seealso:: + + :ocv:class:`TrackerTargetState` + +TrackerTargetState +------------------ + +Abstract base class for TrackerTargetState that represents a possible state of the target. + +[AAM]_ :math:`\hat{x}^{i}_{k}` all the states candidates. + +Inherits this class with your Target state + +.. ocv:class:: TrackerTargetState + +TrackerTargetState class:: + + class CV_EXPORTS_W TrackerTargetState + { + public: + virtual ~TrackerTargetState(){}; + + Point2f getTargetPosition() const; + void setTargetPosition( const Point2f& position ); + + int getTargetWidth() const; + void setTargetWidth( int width ); + + int getTargetHeight() const; + void setTargetHeight( int height ); + + }; + +In own implementation you can add scale variation, width, height, orientation, etc. + + +TrackerStateEstimator +--------------------- + +Abstract base class for TrackerStateEstimator that estimates the most likely target state. + +[AAM]_ State estimator + +[AMVOT]_ Statistical modeling (Fig. 3), Table III (generative) - IV (discriminative) - V (hybrid) + +.. ocv:class:: TrackerStateEstimator + +TrackerStateEstimator class:: + + class CV_EXPORTS_W TrackerStateEstimator + { + public: + virtual ~TrackerStateEstimator(); + + static Ptr create( const String& trackeStateEstimatorType ); + + Ptr estimate( const std::vector& confidenceMaps ); + void update( std::vector& confidenceMaps ); + + String getClassName() const; + + }; + +TrackerStateEstimator::create +----------------------------- + +Create TrackerStateEstimator by tracker state estimator type + +.. ocv:function:: static Ptr TrackerStateEstimator::create( const String& trackeStateEstimatorType ) + + :param trackeStateEstimatorType: The TrackerStateEstimator name + +The modes available now: + +* ``"BOOSTING"`` -- Boosting-based discriminative appearance models. See [AMVOT]_ section 4.4 + +The modes available soon: + +* ``"SVM"`` -- SVM-based discriminative appearance models. See [AMVOT]_ section 4.5 + +TrackerStateEstimator::estimate +------------------------------- + +Estimate the most likely target state, return the estimated state + +.. ocv:function:: Ptr TrackerStateEstimator::estimate( const std::vector& confidenceMaps ) + + :param confidenceMaps: The overall appearance model as a list of :c:type:`ConfidenceMap` + +TrackerStateEstimator::update +----------------------------- + +Update the ConfidenceMap with the scores + +.. ocv:function:: void TrackerStateEstimator::update( std::vector& confidenceMaps ) + + :param confidenceMaps: The overall appearance model as a list of :c:type:`ConfidenceMap` + +TrackerStateEstimator::getClassName +----------------------------------- + +Get the name of the specific TrackerStateEstimator + +.. ocv:function:: String TrackerStateEstimator::getClassName() const + +TrackerModel +------------ + +Abstract class that represents the model of the target. It must be instantiated by specialized tracker + +[AAM]_ Ak + +Inherits this with your TrackerModel + +.. ocv:class:: TrackerModel + +TrackerModel class:: + + class CV_EXPORTS_W TrackerModel + { + public: + + TrackerModel(); + virtual ~TrackerModel(); + + void modelEstimation( const std::vector& responses ); + void modelUpdate(); + bool runStateEstimator(); + + bool setTrackerStateEstimator( Ptr trackerStateEstimator ); + void setLastTargetState( const Ptr& lastTargetState ); + + Ptr getLastTargetState() const; + const std::vector& getConfidenceMaps() const; + const ConfidenceMap& getLastConfidenceMap() const; + Ptr getTrackerStateEstimator() const; + }; + +TrackerModel::modelEstimation +----------------------------- + +Estimate the most likely target location + +[AAM]_ ME, Model Estimation table I + +.. ocv:function:: void TrackerModel::modelEstimation( const std::vector& responses ) + + :param responses: Features extracted from :ocv:class:`TrackerFeatureSet` + + +TrackerModel::modelUpdate +------------------------- + +Update the model + +[AAM]_ MU, Model Update table I + +.. ocv:function:: void TrackerModel::modelUpdate() + + +TrackerModel::runStateEstimator +------------------------------- + +Run the TrackerStateEstimator, return true if is possible to estimate a new state, false otherwise + +.. ocv:function:: bool TrackerModel::runStateEstimator() + +TrackerModel::setTrackerStateEstimator +-------------------------------------- + +Set TrackerEstimator, return true if the tracker state estimator is added, false otherwise + +.. ocv:function:: bool TrackerModel::setTrackerStateEstimator( Ptr trackerStateEstimator ) + + :param trackerStateEstimator: The :ocv:class:`TrackerStateEstimator` + +.. note:: You can add only one :ocv:class:`TrackerStateEstimator` + +TrackerModel::setLastTargetState +-------------------------------- + +Set the current :ocv:class:`TrackerTargetState` in the :c:type:`Trajectory` + +.. ocv:function:: void TrackerModel::setLastTargetState( const Ptr& lastTargetState ) + + :param lastTargetState: The current :ocv:class:`TrackerTargetState` + + +TrackerModel::getLastTargetState +-------------------------------- + +Get the last :ocv:class:`TrackerTargetState` from :c:type:`Trajectory` + +.. ocv:function:: Ptr TrackerModel::getLastTargetState() const + + +TrackerModel::getConfidenceMaps +------------------------------- + +Get the list of the :c:type:`ConfidenceMap` + +.. ocv:function:: const std::vector& TrackerModel::getConfidenceMaps() const + +TrackerModel::getLastConfidenceMap +---------------------------------- + +Get the last :c:type:`ConfidenceMap` for the current frame + +.. ocv:function:: const ConfidenceMap& TrackerModel::getLastConfidenceMap() const + +TrackerModel::getTrackerStateEstimator +-------------------------------------- + +Get the :ocv:class:`TrackerStateEstimator` + +.. ocv:function:: Ptr TrackerModel::getTrackerStateEstimator() const + +Specialized TrackerStateEstimator +================================= + +In [AMVOT]_ Statistical modeling (Fig. 3), Table III (generative) - IV (discriminative) - V (hybrid) are described the most known statistical model. + +At moment :ocv:class:`TrackerStateEstimatorMILBoosting` and :ocv:class:`TrackerStateEstimatorAdaBoosting` are implemented. + +TrackerStateEstimatorMILBoosting : TrackerStateEstimator +-------------------------------------------------------- + +TrackerStateEstimator based on Boosting + +.. ocv:class:: TrackerStateEstimatorMILBoosting + +TrackerStateEstimatorMILBoosting class:: + + class CV_EXPORTS_W TrackerStateEstimatorMILBoosting : public TrackerStateEstimator + { + public: + class TrackerMILTargetState : public TrackerTargetState + { + ... + }; + TrackerStateEstimatorMILBoosting( int nFeatures = 250 ); + ~TrackerStateEstimatorMILBoosting(); + + void setCurrentConfidenceMap( ConfidenceMap& confidenceMap ); + }; + +TrackerMILTargetState : TrackerTargetState +------------------------------------------ + +Implementation of the target state for TrackerMILTargetState + +.. ocv:class:: TrackerMILTargetState + +TrackerMILTargetState class:: + + class TrackerMILTargetState : public TrackerTargetState + { + public: + TrackerMILTargetState( const Point2f& position, int targetWidth, int targetHeight, bool foreground, const Mat& features ); + ~TrackerMILTargetState(){}; + + void setTargetFg( bool foreground ); + void setFeatures( const Mat& features ); + bool isTargetFg() const; + Mat getFeatures() const; + }; + +TrackerStateEstimatorMILBoosting::TrackerMILTargetState::setTargetFg +-------------------------------------------------------------------- + +Set label: true for target foreground, false for background + +.. ocv:function:: void TrackerStateEstimatorMILBoosting::TrackerMILTargetState::setTargetFg( bool foreground ) + + :param foreground: Label for background/foreground + +TrackerStateEstimatorMILBoosting::TrackerMILTargetState::setFeatures +-------------------------------------------------------------------- + +Set the features extracted from :ocv:class:`TrackerFeatureSet` + +.. ocv:function:: void TrackerStateEstimatorMILBoosting::TrackerMILTargetState::setFeatures( const Mat& features ) + + :param features: The features extracted + +TrackerStateEstimatorMILBoosting::TrackerMILTargetState::isTargetFg +------------------------------------------------------------------- + +Get the label. Return true for target foreground, false for background + +.. ocv:function:: bool TrackerStateEstimatorMILBoosting::TrackerMILTargetState::isTargetFg() const + +TrackerStateEstimatorMILBoosting::TrackerMILTargetState::getFeatures +-------------------------------------------------------------------- + +Get the features extracted + +.. ocv:function:: void TrackerStateEstimatorMILBoosting::TrackerMILTargetState::setFeatures( const Mat& features ) + +TrackerStateEstimatorMILBoosting::TrackerStateEstimatorMILBoosting +------------------------------------------------------------------ + +Constructor + +.. ocv:function:: TrackerStateEstimatorMILBoosting::TrackerStateEstimatorMILBoosting( int nFeatures=250 ) + + :param nFeatures: Number of features for each sample + +TrackerStateEstimatorMILBoosting::setCurrentConfidenceMap +--------------------------------------------------------- + +Set the current confidenceMap + +.. ocv:function:: void TrackerStateEstimatorMILBoosting::setCurrentConfidenceMap( ConfidenceMap& confidenceMap ) + + :param confidenceMap: The current :c:type:`ConfidenceMap` + +TrackerStateEstimatorAdaBoosting : TrackerStateEstimator +-------------------------------------------------------- + +TrackerStateEstimatorAdaBoosting based on ADA-Boosting + +.. ocv:class:: TrackerStateEstimatorAdaBoosting + +TrackerStateEstimatorAdaBoosting class:: + + class CV_EXPORTS_W TrackerStateEstimatorAdaBoosting : public TrackerStateEstimator + { + public: + class TrackerAdaBoostingTargetState : public TrackerTargetState + { + ... + }; + TrackerStateEstimatorAdaBoosting( int numClassifer, int initIterations, int nFeatures, Size patchSize, const Rect& ROI, const std::vector >& meanSigma ); + ~TrackerStateEstimatorAdaBoosting(); + + Rect getSampleROI() const; + void setSampleROI( const Rect& ROI ); + void setCurrentConfidenceMap( ConfidenceMap& confidenceMap ); + std::vector computeSelectedWeakClassifier(); + std::vector computeReplacedClassifier(); + std::vector computeSwappedClassifier(); + void setMeanSigmaPair( const std::vector >& meanSigmaPair ); + }; + +TrackerAdaBoostingTargetState : TrackerTargetState +-------------------------------------------------- + +Implementation of the target state for TrackerAdaBoostingTargetState + +.. ocv:class:: TrackerAdaBoostingTargetState + +TrackerAdaBoostingTargetState class:: + + class TrackerAdaBoostingTargetState : public TrackerTargetState + { + public: + TrackerAdaBoostingTargetState( const Point2f& position, int width, int height, bool foreground, const Mat& responses ); + ~TrackerAdaBoostingTargetState(){}; + + void setTargetResponses( const Mat& responses ); + void setTargetFg( bool foreground ); + Mat getTargetResponses() const; + bool isTargetFg() const; + }; + +TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::setTargetFg +---------------------------------------------------------------------------- + +Set label: true for target foreground, false for background + +.. ocv:function:: void TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::setTargetFg( bool foreground ) + + :param foreground: Label for background/foreground + +TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::setTargetResponses +----------------------------------------------------------------------------------- + +Set the features extracted from :ocv:class:`TrackerFeatureSet` + +.. ocv:function:: void TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::setTargetResponses( const Mat& responses ) + + :param responses: The features extracted + +TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::isTargetFg +--------------------------------------------------------------------------- + +Get the label. Return true for target foreground, false for background + +.. ocv:function:: bool TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::isTargetFg() const + +TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::getTargetResponses +----------------------------------------------------------------------------------- + +Get the features extracted + +.. ocv:function:: Mat TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::getTargetResponses() + +TrackerStateEstimatorAdaBoosting::TrackerStateEstimatorAdaBoosting +------------------------------------------------------------------ + +Constructor + +.. ocv:function:: TrackerStateEstimatorAdaBoosting::TrackerStateEstimatorAdaBoosting( int numClassifer, int initIterations, int nFeatures, Size patchSize, const Rect& ROI, const std::vector >& meanSigma ) + + :param numClassifer: Number of base classifiers + + :param initIterations: Number of iterations in the initialization + + :param nFeatures: Number of features/weak classifiers + + :param patchSize: tracking rect + + :param ROI: initial ROI + + :param meanSigma: pairs of mean/sigma + +TrackerStateEstimatorAdaBoosting::setCurrentConfidenceMap +--------------------------------------------------------- + +Set the current confidenceMap + +.. ocv:function:: void TrackerStateEstimatorAdaBoosting::setCurrentConfidenceMap( ConfidenceMap& confidenceMap ) + + :param confidenceMap: The current :c:type:`ConfidenceMap` + +TrackerStateEstimatorAdaBoosting::getSampleROI +---------------------------------------------- + +Get the sampling ROI + +.. ocv:function:: Rect TrackerStateEstimatorAdaBoosting::getSampleROI() const + +TrackerStateEstimatorAdaBoosting::setSampleROI +---------------------------------------------- + +Set the sampling ROI + +.. ocv:function:: void TrackerStateEstimatorAdaBoosting::setSampleROI( const Rect& ROI ) + + :param ROI: the sampling ROI + +TrackerStateEstimatorAdaBoosting::computeSelectedWeakClassifier +--------------------------------------------------------------- + +Get the list of the selected weak classifiers for the classification step + +.. ocv:function:: std::vector TrackerStateEstimatorAdaBoosting::computeSelectedWeakClassifier() + +TrackerStateEstimatorAdaBoosting::computeReplacedClassifier +----------------------------------------------------------- + +Get the list of the weak classifiers that should be replaced + +.. ocv:function:: std::vector TrackerStateEstimatorAdaBoosting::computeReplacedClassifier() + +TrackerStateEstimatorAdaBoosting::computeSwappedClassifier +---------------------------------------------------------- + +Get the list of the weak classifiers that replace those to be replaced + +.. ocv:function:: std::vector TrackerStateEstimatorAdaBoosting::computeSwappedClassifier() + +TrackerStateEstimatorAdaBoosting::setMeanSigmaPair +-------------------------------------------------- + +Set the mean/sigma to instantiate possibly new classifiers + +.. ocv:function:: void TrackerStateEstimatorAdaBoosting::setMeanSigmaPair( const std::vector >& meanSigmaPair ) + + :param meanSigmaPair: the mean/sigma pairs diff --git a/modules/tracking/doc/common_interfaces_tracker_sampler.rst b/modules/tracking/doc/common_interfaces_tracker_sampler.rst new file mode 100644 index 000000000..5355508cb --- /dev/null +++ b/modules/tracking/doc/common_interfaces_tracker_sampler.rst @@ -0,0 +1,293 @@ +Common Interfaces of TrackerSampler +=================================== + +.. highlight:: cpp + + +TrackerSampler +-------------- + +Class that manages the sampler in order to select regions for the update the model of the tracker + +[AAM]_ Sampling e Labeling. See table I and section III B + +.. ocv:class:: TrackerSampler + +TrackerSampler class:: + + class CV_EXPORTS_W TrackerSampler + { + public: + + TrackerSampler(); + ~TrackerSampler(); + + void sampling( const Mat& image, Rect boundingBox ); + + const std::vector > >& getSamplers() const; + const std::vector& getSamples() const; + + bool addTrackerSamplerAlgorithm( String trackerSamplerAlgorithmType ); + bool addTrackerSamplerAlgorithm( Ptr& sampler ); + + }; + + +TrackerSampler is an aggregation of :ocv:class:`TrackerSamplerAlgorithm` + +.. seealso:: + + :ocv:class:`TrackerSamplerAlgorithm` + +TrackerSampler::sampling +------------------------ + +Computes the regions starting from a position in an image + +.. ocv:function:: void TrackerSampler::sampling( const Mat& image, Rect boundingBox ) + + :param image: The current frame + + :param boundingBox: The bounding box from which regions can be calculated + + +TrackerSampler::getSamplers +--------------------------- + +Return the collection of the :ocv:class:`TrackerSamplerAlgorithm` + +.. ocv:function:: const std::vector > >& TrackerSampler::getSamplers() const + + +TrackerSampler::getSamples +-------------------------- + +Return the samples from all :ocv:class:`TrackerSamplerAlgorithm`, [AAM]_ Fig. 1 variable Sk + +.. ocv:function:: const std::vector& TrackerSampler::getSamples() const + +TrackerSampler::addTrackerSamplerAlgorithm +------------------------------------------ + +Add TrackerSamplerAlgorithm in the collection. +Return true if sampler is added, false otherwise + +.. ocv:function:: bool TrackerSampler::addTrackerSamplerAlgorithm( String trackerSamplerAlgorithmType ) + + :param trackerSamplerAlgorithmType: The TrackerSamplerAlgorithm name + +.. ocv:function:: bool TrackerSampler::addTrackerSamplerAlgorithm( Ptr& sampler ) + + :param sampler: The TrackerSamplerAlgorithm class + + +The modes available now: + +* ``"CSC"`` -- Current State Center + +* ``"CS"`` -- Current State + +Example ``TrackerSamplerAlgorithm::addTrackerSamplerAlgorithm`` : :: + + //sample usage: + + TrackerSamplerCSC::Params CSCparameters; + Ptr CSCSampler = new TrackerSamplerCSC( CSCparameters ); + + if( !sampler->addTrackerSamplerAlgorithm( CSCSampler ) ) + return false; + + //or add CSC sampler with default parameters + //sampler->addTrackerSamplerAlgorithm( "CSC" ); + + +.. note:: If you use the second method, you must initialize the TrackerSamplerAlgorithm + + +TrackerSamplerAlgorithm +----------------------- + +Abstract base class for TrackerSamplerAlgorithm that represents the algorithm for the specific sampler. + +.. ocv:class:: TrackerSamplerAlgorithm + +TrackerSamplerAlgorithm class:: + + class CV_EXPORTS_W TrackerSamplerAlgorithm + { + public: + + virtual ~TrackerSamplerAlgorithm(); + + static Ptr create( const String& trackerSamplerType ); + + bool sampling( const Mat& image, Rect boundingBox, std::vector& sample ); + String getClassName() const; + }; + +TrackerSamplerAlgorithm::create +------------------------------- + +Create TrackerSamplerAlgorithm by tracker sampler type. + +.. ocv:function:: static Ptr TrackerSamplerAlgorithm::create( const String& trackerSamplerType ) + + :param trackerSamplerType: The trackerSamplerType name + +The modes available now: + +* ``"CSC"`` -- Current State Center + +* ``"CS"`` -- Current State + + +TrackerSamplerAlgorithm::sampling +--------------------------------- + +Computes the regions starting from a position in an image. Return true if samples are computed, false otherwise + +.. ocv:function:: bool TrackerSamplerAlgorithm::sampling( const Mat& image, Rect boundingBox, std::vector& sample ) + + :param image: The current frame + + :param boundingBox: The bounding box from which regions can be calculated + + :sample: The computed samples [AAM]_ Fig. 1 variable Sk + +TrackerSamplerAlgorithm::getClassName +------------------------------------- + +Get the name of the specific TrackerSamplerAlgorithm + +.. ocv:function:: String TrackerSamplerAlgorithm::getClassName() const + +Specialized TrackerSamplerAlgorithm +=================================== + +In [AAM]_ table I are described the most known sampling strategies. At moment :ocv:class:`TrackerSamplerCSC` and :ocv:class:`TrackerSamplerCS` are implemented. + +TrackerSamplerCSC : TrackerSamplerAlgorithm +------------------------------------------- + +TrackerSampler based on CSC (current state centered), used by MIL algorithm TrackerMIL + +.. ocv:class:: TrackerSamplerCSC + +TrackerSamplerCSC class:: + + + class CV_EXPORTS_W TrackerSamplerCSC + { + public: + + TrackerSamplerCSC( const TrackerSamplerCSC::Params ¶meters = TrackerSamplerCSC::Params() ); + void setMode( int samplingMode ); + + ~TrackerSamplerCSC(); + }; + + +TrackerSamplerCSC::Params +------------------------- + +.. ocv:struct:: TrackerSamplerCSC::Params + +List of TrackerSamplerCSC parameters:: + + struct CV_EXPORTS Params + { + Params(); + float initInRad; // radius for gathering positive instances during init + float trackInPosRad; // radius for gathering positive instances during tracking + float searchWinSize; // size of search window + int initMaxNegNum; // # negative samples to use during init + int trackMaxPosNum; // # positive samples to use during training + int trackMaxNegNum; // # negative samples to use during training + }; + + +TrackerSamplerCSC::TrackerSamplerCSC +------------------------------------ + +Constructor + +.. ocv:function:: TrackerSamplerCSC::TrackerSamplerCSC( const TrackerSamplerCSC::Params ¶meters = TrackerSamplerCSC::Params() ) + + :param parameters: TrackerSamplerCSC parameters :ocv:struct:`TrackerSamplerCSC::Params` + +TrackerSamplerCSC::setMode +-------------------------- + +Set the sampling mode of TrackerSamplerCSC + +.. ocv:function:: void TrackerSamplerCSC::setMode( int samplingMode ) + + :param samplingMode: The sampling mode + +The modes are: + +* ``"MODE_INIT_POS = 1"`` -- for the positive sampling in initialization step +* ``"MODE_INIT_NEG = 2"`` -- for the negative sampling in initialization step +* ``"MODE_TRACK_POS = 3"`` -- for the positive sampling in update step +* ``"MODE_TRACK_NEG = 4"`` -- for the negative sampling in update step +* ``"MODE_DETECT = 5"`` -- for the sampling in detection step + +TrackerSamplerCS : TrackerSamplerAlgorithm +------------------------------------------- + +TrackerSampler based on CS (current state), used by algorithm TrackerBoosting + +.. ocv:class:: TrackerSamplerCS + +TrackerSamplerCS class:: + + + class CV_EXPORTS_W TrackerSamplerCS + { + public: + + TrackerSamplerCS( const TrackerSamplerCS::Params ¶meters = TrackerSamplerCS::Params() ); + void setMode( int samplingMode ); + + ~TrackerSamplerCS(); + }; + + +TrackerSamplerCS::Params +------------------------- + +.. ocv:struct:: TrackerSamplerCS::Params + +List of TrackerSamplerCS parameters:: + + struct CV_EXPORTS Params + { + Params(); + float overlap; //overlapping for the search windows + float searchFactor; //search region parameter + }; + + +TrackerSamplerCS::TrackerSamplerCS +------------------------------------ + +Constructor + +.. ocv:function:: TrackerSamplerCS::TrackerSamplerCS( const TrackerSamplerCS::Params ¶meters = TrackerSamplerCS::Params() ) + + :param parameters: TrackerSamplerCS parameters :ocv:struct:`TrackerSamplerCS::Params` + +TrackerSamplerCS::setMode +-------------------------- + +Set the sampling mode of TrackerSamplerCS + +.. ocv:function:: void TrackerSamplerCS::setMode( int samplingMode ) + + :param samplingMode: The sampling mode + +The modes are: + +* ``"MODE_POSITIVE = 1"`` -- for the positive sampling +* ``"MODE_NEGATIVE = 2"`` -- for the negative sampling +* ``"MODE_CLASSIFY = 3"`` -- for the sampling in classification step diff --git a/modules/tracking/doc/misc/Tracker.txt b/modules/tracking/doc/misc/Tracker.txt new file mode 100644 index 000000000..9bacdfeda --- /dev/null +++ b/modules/tracking/doc/misc/Tracker.txt @@ -0,0 +1,43 @@ +@startuml +package "Tracker package" #DDDDDD { + + + + +class Algorithm { +} +class Tracker{ + Ptr featureSet; + Ptr sampler; + Ptr model; + --- + +static Ptr create(const string& trackerType); + +bool init(const Mat& image, const Rect& boundingBox); + +bool update(const Mat& image, Rect& boundingBox); +} +class Tracker +note right: Tracker is the general interface for each specialized trackers +class TrackerMIL{ + +Params + --- + TrackerMIL(TrackerMIL::Params parameters); + +bool init(const Mat& image, const Rect& boundingBox); + +bool update(const Mat& image, Rect& boundingBox); +} +class TrackerBoosting{ + +Params + --- + TrackerBoosting(TrackerBoosting::Params parameters); + +bool init(const Mat& image, const Rect& boundingBox); + +bool update(const Mat& image, Rect& boundingBox); +} +Algorithm <|-- Tracker : virtual inheritance +Tracker <|-- TrackerMIL +Tracker <|-- TrackerBoosting + +note "Single instance of the Tracker" as N1 +TrackerBoosting .. N1 +TrackerMIL .. N1 +} + +@enduml diff --git a/modules/tracking/doc/misc/TrackerFeature.txt b/modules/tracking/doc/misc/TrackerFeature.txt new file mode 100644 index 000000000..b09818a7b --- /dev/null +++ b/modules/tracking/doc/misc/TrackerFeature.txt @@ -0,0 +1,55 @@ +@startuml +package "TrackerFeature package" #DDDDDD { + +class TrackerFeatureSet{ + -vector > > features + -vector responses + ... + TrackerFeatureSet(); + ~TrackerFeatureSet(); + -- + +extraction(const std::vector& images); + +selection(); + +removeOutliers(); + +vector response getResponses(); + +vector > > getTrackerFeatures(); + +bool addTrackerFeature(string trackerFeatureType); + +bool addTrackerFeature(Ptr& feature); + -clearResponses(); +} + +class TrackerFeature <>{ + static Ptr = create(const string& trackerFeatureType); + compute(const std::vector& images, Mat& response); + selection(Mat& response, int npoints); +} +note bottom: Can be specialized as in table II\nA tracker can use more types of features + +class TrackerFeatureFeature2D{ + -vector keypoints + --- + TrackerFeatureFeature2D(string detectorType, string descriptorType); + ~TrackerFeatureFeature2D(); + --- + compute(const std::vector& images, Mat& response); + selection( Mat& response, int npoints); +} +class TrackerFeatureHOG{ + TrackerFeatureHOG(); + ~TrackerFeatureHOG(); + --- + compute(const std::vector& images, Mat& response); + selection(Mat& response, int npoints); +} + +TrackerFeatureSet *-- TrackerFeature +TrackerFeature <|-- TrackerFeatureHOG +TrackerFeature <|-- TrackerFeatureFeature2D + + +note "Per readability and simplicity in this diagram\n there are only two TrackerFeature but you\n can considering the implementation of the other TrackerFeature" as N1 +TrackerFeatureHOG .. N1 +TrackerFeatureFeature2D .. N1 +} + +@enduml diff --git a/modules/tracking/doc/misc/TrackerModel.txt b/modules/tracking/doc/misc/TrackerModel.txt new file mode 100644 index 000000000..e25281a07 --- /dev/null +++ b/modules/tracking/doc/misc/TrackerModel.txt @@ -0,0 +1,62 @@ +@startuml +package "TrackerModel package" #DDDDDD { + +class Typedef << (T,#FF7700) >>{ + ConfidenceMap + Trajectory +} + +class TrackerModel{ + -vector confidenceMaps; + -Trajectory trajectory; + -Ptr stateEstimator; + ... + TrackerModel(); + ~TrackerModel(); + + +bool setTrackerStateEstimator(Ptr trackerStateEstimator); + +Ptr getTrackerStateEstimator(); + + +void modelEstimation(const vector& responses); + +void modelUpdate(); + +void setLastTargetState(const Ptr lastTargetState); + +void runStateEstimator(); + + +const vector& getConfidenceMaps(); + +const ConfidenceMap& getLastConfidenceMap(); +} +class TrackerTargetState <>{ + Point2f targetPosition; + --- + Point2f getTargetPosition(); + void setTargetPosition(Point2f position); +} +class TrackerTargetState +note bottom: Each TrackerStateEstimator can create own state + +class TrackerStateEstimator <>{ + ~TrackerStateEstimator(); + static Ptr create(const String& trackeStateEstimatorType); + Ptr estimate(const vector& confidenceMaps) + void update(vector& confidenceMaps) +} + +class TrackerStateEstimatorSVM{ + TrackerStateEstimatorSVM() + ~TrackerStateEstimatorSVM() + Ptr estimate(const vector& confidenceMaps) + void update(vector& confidenceMaps) +} +class TrackerStateEstimatorMILBoosting{ + TrackerStateEstimatorMILBoosting() + ~TrackerStateEstimatorMILBoosting() + Ptr estimate(const vector& confidenceMaps) + void update(vector& confidenceMaps) +} + +TrackerModel -> TrackerStateEstimator: create +TrackerModel *-- TrackerTargetState +TrackerStateEstimator <|-- TrackerStateEstimatorMILBoosting +TrackerStateEstimator <|-- TrackerStateEstimatorSVM +} +@enduml diff --git a/modules/tracking/doc/misc/TrackerSampler.txt b/modules/tracking/doc/misc/TrackerSampler.txt new file mode 100644 index 000000000..61b174ecb --- /dev/null +++ b/modules/tracking/doc/misc/TrackerSampler.txt @@ -0,0 +1,44 @@ +@startuml +package "TrackerSampler package" #DDDDDD { + +class TrackerSampler{ + -vector > > samplers + -vector samples; + ... + TrackerSampler(); + ~TrackerSampler(); + +sampling(const Mat& image, Rect boundingBox); + +const vector > >& getSamplers(); + +const vector& getSamples(); + +bool addTrackerSamplerAlgorithm(String trackerSamplerAlgorithmType); + +bool addTrackerSamplerAlgorithm(Ptr& sampler); + --- + -void clearSamples(); +} + +class TrackerSamplerAlgorithm{ + ~TrackerSamplerAlgorithm(); + +static Ptr create(const String& trackerSamplerType); + +bool sampling(const Mat& image, Rect boundingBox, vector& sample); +} +note bottom: A tracker could sample the target\nor it could sample the target and the background + + +class TrackerSamplerCS{ + TrackerSamplerCS(); + ~TrackerSamplerCS(); + +bool sampling(const Mat& image, Rect boundingBox, vector& sample); +} +class TrackerSamplerCSC{ + TrackerSamplerCSC(); + ~TrackerSamplerCSC(); + +bool sampling(const Mat& image, Rect boundingBox, vector& sample); +} + + + +TrackerSampler *-- TrackerSamplerAlgorithm +TrackerSamplerAlgorithm <|-- TrackerSamplerCS +TrackerSamplerAlgorithm <|-- TrackerSamplerCSC +} +@enduml diff --git a/modules/tracking/doc/misc/packages.txt b/modules/tracking/doc/misc/packages.txt new file mode 100644 index 000000000..085b43553 --- /dev/null +++ b/modules/tracking/doc/misc/packages.txt @@ -0,0 +1,19 @@ +@startuml +package "Tracker" #DDDDDD { + +} +package "TrackerFeature" #DDDDDD { + +} +package "TrackerSampler" #DDDDDD { + +} +package "TrackerModel" #DDDDDD { + +} +Tracker -> TrackerModel: create +Tracker -> TrackerSampler: create +Tracker -> TrackerFeature: create + + +@enduml \ No newline at end of file diff --git a/modules/tracking/doc/pics/Tracker.png b/modules/tracking/doc/pics/Tracker.png new file mode 100644 index 000000000..f5aa2bbd1 Binary files /dev/null and b/modules/tracking/doc/pics/Tracker.png differ diff --git a/modules/tracking/doc/pics/TrackerFeature.png b/modules/tracking/doc/pics/TrackerFeature.png new file mode 100644 index 000000000..8533be0c7 Binary files /dev/null and b/modules/tracking/doc/pics/TrackerFeature.png differ diff --git a/modules/tracking/doc/pics/TrackerModel.png b/modules/tracking/doc/pics/TrackerModel.png new file mode 100644 index 000000000..7eb446dde Binary files /dev/null and b/modules/tracking/doc/pics/TrackerModel.png differ diff --git a/modules/tracking/doc/pics/TrackerSampler.png b/modules/tracking/doc/pics/TrackerSampler.png new file mode 100644 index 000000000..ad00a32f3 Binary files /dev/null and b/modules/tracking/doc/pics/TrackerSampler.png differ diff --git a/modules/tracking/doc/pics/package.png b/modules/tracking/doc/pics/package.png new file mode 100644 index 000000000..adff6a8b7 Binary files /dev/null and b/modules/tracking/doc/pics/package.png differ diff --git a/modules/tracking/doc/tracker_algorithms.rst b/modules/tracking/doc/tracker_algorithms.rst new file mode 100644 index 000000000..519fb4219 --- /dev/null +++ b/modules/tracking/doc/tracker_algorithms.rst @@ -0,0 +1,121 @@ +Tracker Algorithms +================== + +.. highlight:: cpp + +Two algorithms will be implemented soon, the first is MIL (Multiple Instance Learning) [MIL]_ and second is Online Boosting [OLB]_. + +.. [MIL] B Babenko, M-H Yang, and S Belongie, Visual Tracking with Online Multiple Instance Learning, In CVPR, 2009 + +.. [OLB] H Grabner, M Grabner, and H Bischof, Real-time tracking via on-line boosting, In Proc. BMVC, volume 1, pages 47– 56, 2006 + +TrackerBoosting +--------------- + +This is a real-time object tracking based on a novel on-line version of the AdaBoost algorithm. +The classifier uses the surrounding background as negative examples in update step to avoid the drifting problem. + +.. ocv:class:: TrackerBoosting + +Implementation of TrackerBoosting from :ocv:class:`Tracker`:: + + class CV_EXPORTS_W TrackerBoosting : public Tracker + { + public: + + TrackerBoosting( const TrackerBoosting::Params ¶meters = TrackerBoosting::Params() ); + + virtual ~TrackerBoosting(); + + void read( const FileNode& fn ); + void write( FileStorage& fs ) const; + + + }; + +TrackerMIL +---------- + +The MIL algorithm trains a classifier in an online manner to separate the object from the background. Multiple Instance Learning avoids the drift problem for a robust tracking. + +Original code can be found here http://vision.ucsd.edu/~bbabenko/project_miltrack.shtml + +.. ocv:class:: TrackerMIL + +Implementation of TrackerMIL from :ocv:class:`Tracker`:: + + class CV_EXPORTS_W TrackerMIL : public Tracker + { + public: + + TrackerMIL( const TrackerMIL::Params ¶meters = TrackerMIL::Params() ); + + virtual ~TrackerMIL(); + + void read( const FileNode& fn ); + void write( FileStorage& fs ) const; + + }; + +TrackerMIL::Params +------------------ + +.. ocv:struct:: TrackerMIL::Params + +List of MIL parameters:: + + struct CV_EXPORTS Params + { + Params(); + //parameters for sampler + float samplerInitInRadius; // radius for gathering positive instances during init + int samplerInitMaxNegNum; // # negative samples to use during init + float samplerSearchWinSize; // size of search window + float samplerTrackInRadius; // radius for gathering positive instances during tracking + int samplerTrackMaxPosNum; // # positive samples to use during tracking + int samplerTrackMaxNegNum; // # negative samples to use during tracking + + int featureSetNumFeatures; // # features + + void read( const FileNode& fn ); + void write( FileStorage& fs ) const; + }; + +TrackerMIL::TrackerMIL +---------------------- + +Constructor + +.. ocv:function:: bool TrackerMIL::TrackerMIL( const TrackerMIL::Params ¶meters = TrackerMIL::Params() ) + + :param parameters: MIL parameters :ocv:struct:`TrackerMIL::Params` + + +TrackerBoosting::Params +------------------ + +.. ocv:struct:: TrackerBoosting::Params + +List of BOOSTING parameters:: + + struct CV_EXPORTS Params + { + Params(); + int numClassifiers; //the number of classifiers to use in a OnlineBoosting algorithm + float samplerOverlap; //search region parameters to use in a OnlineBoosting algorithm + float samplerSearchFactor; // search region parameters to use in a OnlineBoosting algorithm + int iterationInit; //the initial iterations + int featureSetNumFeatures; // #features + + void read( const FileNode& fn ); + void write( FileStorage& fs ) const; + }; + +TrackerBoosting::TrackerBoosting +---------------------- + +Constructor + +.. ocv:function:: bool TrackerBoosting::TrackerBoosting( const TrackerBoosting::Params ¶meters = TrackerBoosting::Params() ) + + :param parameters: BOOSTING parameters :ocv:struct:`TrackerBoosting::Params` diff --git a/modules/tracking/doc/tracking.rst b/modules/tracking/doc/tracking.rst new file mode 100644 index 000000000..8ad9fd56a --- /dev/null +++ b/modules/tracking/doc/tracking.rst @@ -0,0 +1,86 @@ +Tracking API +============ + +.. highlight:: cpp + + +Long-term optical tracking API +------------------------------ +Long-term optical tracking is one of most important issue for many computer vision applications in real world scenario. +The development in this area is very fragmented and this API is an unique interface useful for plug several algorithms and compare them. +This work is partially based on [AAM]_ and [AMVOT]_. + +This algorithms start from a bounding box of the target and with their internal representation they avoid the drift during the tracking. +These long-term trackers are able to evaluate online the quality of the location of the target in the new frame, without ground truth. + +There are three main components: the TrackerSampler, the TrackerFeatureSet and the TrackerModel. The first component is the object that computes the patches over the frame based on the last target location. +The TrackerFeatureSet is the class that manages the Features, is possible plug many kind of these (HAAR, HOG, LBP, Feature2D, etc). +The last component is the internal representation of the target, it is the appearence model. It stores all state candidates and compute the trajectory (the most likely target states). The class TrackerTargetState represents a possible state of the target. +The TrackerSampler and the TrackerFeatureSet are the visual representation of the target, instead the TrackerModel is the statistical model. + +A recent benchmark between these algorithms can be found in [OOT]_. + +UML design: +----------- + +**General diagram** + +.. image:: pics/package.png + :width: 50% + :alt: General diagram + :align: center + +**Tracker diagram** + +.. image:: pics/Tracker.png + :width: 80% + :alt: Tracker diagram + :align: center + +**TrackerSampler diagram** + +.. image:: pics/TrackerSampler.png + :width: 100% + :alt: TrackerSampler diagram + :align: center + +**TrackerFeatureSet diagram** + +.. image:: pics/TrackerFeature.png + :width: 100% + :alt: TrackerFeatureSet diagram + :align: center + +**TrackerModel diagram** + +.. image:: pics/TrackerModel.png + :width: 100% + :alt: TrackerModel diagram + :align: center + +To see how API works, try tracker demo: +https://github.com/lenlen/opencv/blob/tracking_api/samples/cpp/tracker.cpp + + +.. note:: This Tracking API has been designed with PlantUML. If you modify this API please change UML files under modules/tracking/misc/ + +The following reference was used in the API + +.. [AAM] S Salti, A Cavallaro, L Di Stefano, Adaptive Appearance Modeling for Video Tracking: Survey and Evaluation, IEEE Transactions on Image Processing, Vol. 21, Issue 10, October 2012, pp. 4334-4348 + +.. [AMVOT] X Li, W Hu, C Shen, Z Zhang, A Dick, A van den Hengel, A Survey of Appearance Models in Visual Object Tracking, ACM Transactions on Intelligent Systems and Technology (TIST), 2013 + +.. [OOT] Yi Wu and Jongwoo Lim and Ming-Hsuan Yang, Online Object Tracking: A Benchmark, The IEEE Conference on Computer Vision and Pattern Recognition (CVPR), 2013 + + +Tracker classes: +---------------- + +.. toctree:: + :maxdepth: 2 + + tracker_algorithms + common_interfaces_tracker + common_interfaces_tracker_sampler + common_interfaces_tracker_feature_set + common_interfaces_tracker_model diff --git a/modules/tracking/include/opencv2/tracking.hpp b/modules/tracking/include/opencv2/tracking.hpp new file mode 100644 index 000000000..55fb05d40 --- /dev/null +++ b/modules/tracking/include/opencv2/tracking.hpp @@ -0,0 +1,52 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#ifndef __OPENCV_TRACKING_HPP__ +#define __OPENCV_TRACKING_HPP__ + +#include "opencv2/tracking/tracker.hpp" + +namespace cv +{ +CV_EXPORTS bool initModule_tracking(void); +} + +#endif //__OPENCV_TRACKING_HPP__ diff --git a/modules/tracking/include/opencv2/tracking/feature.hpp b/modules/tracking/include/opencv2/tracking/feature.hpp new file mode 100644 index 000000000..34c0f15af --- /dev/null +++ b/modules/tracking/include/opencv2/tracking/feature.hpp @@ -0,0 +1,410 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#ifndef __OPENCV_FEATURE_HPP__ +#define __OPENCV_FEATURE_HPP__ + +#include "opencv2/core.hpp" +#include "opencv2/imgproc.hpp" +#include +#include +#include + +/* + * TODO This implementation is based on apps/traincascade/ + * TODO Changed CvHaarEvaluator based on ADABOOSTING implementation (Grabner et al.) + */ + +namespace cv +{ + +#define FEATURES "features" + +#define CC_FEATURES FEATURES +#define CC_FEATURE_PARAMS "featureParams" +#define CC_MAX_CAT_COUNT "maxCatCount" +#define CC_FEATURE_SIZE "featSize" +#define CC_NUM_FEATURES "numFeat" +#define CC_ISINTEGRAL "isIntegral" +#define CC_RECTS "rects" +#define CC_TILTED "tilted" +#define CC_RECT "rect" + +#define LBPF_NAME "lbpFeatureParams" +#define HOGF_NAME "HOGFeatureParams" +#define HFP_NAME "haarFeatureParams" + +#define CV_HAAR_FEATURE_MAX 3 +#define N_BINS 9 +#define N_CELLS 4 + +#define CV_SUM_OFFSETS( p0, p1, p2, p3, rect, step ) \ + /* (x, y) */ \ + (p0) = (rect).x + (step) * (rect).y; \ + /* (x + w, y) */ \ + (p1) = (rect).x + (rect).width + (step) * (rect).y; \ + /* (x + w, y) */ \ + (p2) = (rect).x + (step) * ((rect).y + (rect).height); \ + /* (x + w, y + h) */ \ + (p3) = (rect).x + (rect).width + (step) * ((rect).y + (rect).height); + +#define CV_TILTED_OFFSETS( p0, p1, p2, p3, rect, step ) \ + /* (x, y) */ \ + (p0) = (rect).x + (step) * (rect).y; \ + /* (x - h, y + h) */ \ + (p1) = (rect).x - (rect).height + (step) * ((rect).y + (rect).height);\ + /* (x + w, y + w) */ \ + (p2) = (rect).x + (rect).width + (step) * ((rect).y + (rect).width); \ + /* (x + w - h, y + w + h) */ \ + (p3) = (rect).x + (rect).width - (rect).height \ + + (step) * ((rect).y + (rect).width + (rect).height); + +float calcNormFactor( const Mat& sum, const Mat& sqSum ); + +template +void _writeFeatures( const std::vector features, FileStorage &fs, const Mat& featureMap ) +{ + fs << FEATURES << "["; + const Mat_& featureMap_ = (const Mat_&) featureMap; + for ( int fi = 0; fi < featureMap.cols; fi++ ) + if( featureMap_( 0, fi ) >= 0 ) + { + fs << "{"; + features[fi].write( fs ); + fs << "}"; + } + fs << "]"; +} + +class CvParams +{ + public: + CvParams(); + virtual ~CvParams() + { + } + // from|to file + virtual void write( FileStorage &fs ) const = 0; + virtual bool read( const FileNode &node ) = 0; + // from|to screen + virtual void printDefaults() const; + virtual void printAttrs() const; + virtual bool scanAttr( const std::string prmName, const std::string val ); + std::string name; +}; + +class CvFeatureParams : public CvParams +{ + public: + enum + { + HAAR = 0, + LBP = 1, + HOG = 2 + }; + CvFeatureParams(); + virtual void init( const CvFeatureParams& fp ); + virtual void write( FileStorage &fs ) const; + virtual bool read( const FileNode &node ); + static Ptr create( int featureType ); + int maxCatCount; // 0 in case of numerical features + int featSize; // 1 in case of simple features (HAAR, LBP) and N_BINS(9)*N_CELLS(4) in case of Dalal's HOG features + int numFeatures; +}; + +class CvFeatureEvaluator +{ + public: + virtual ~CvFeatureEvaluator() + { + } + virtual void init( const CvFeatureParams *_featureParams, int _maxSampleCount, Size _winSize ); + virtual void setImage( const Mat& img, uchar clsLabel, int idx ); + virtual void writeFeatures( FileStorage &fs, const Mat& featureMap ) const = 0; + virtual float operator()( int featureIdx, int sampleIdx ) = 0; + static Ptr create( int type ); + + int getNumFeatures() const + { + return numFeatures; + } + int getMaxCatCount() const + { + return featureParams->maxCatCount; + } + int getFeatureSize() const + { + return featureParams->featSize; + } + const Mat& getCls() const + { + return cls; + } + float getCls( int si ) const + { + return cls.at( si, 0 ); + } + protected: + virtual void generateFeatures() = 0; + + int npos, nneg; + int numFeatures; + Size winSize; + CvFeatureParams *featureParams; + Mat cls; +}; + +class CvHaarFeatureParams : public CvFeatureParams +{ + public: + + CvHaarFeatureParams(); + + virtual void init( const CvFeatureParams& fp ); + virtual void write( FileStorage &fs ) const; + virtual bool read( const FileNode &node ); + + virtual void printDefaults() const; + virtual void printAttrs() const; + virtual bool scanAttr( const std::string prm, const std::string val ); + + bool isIntegral; +}; + +class CvHaarEvaluator : public CvFeatureEvaluator +{ + public: + + class FeatureHaar + { + + public: + + FeatureHaar( Size patchSize ); + bool eval( const Mat& image, Rect ROI, float* result ) const; + int getNumAreas(); + const std::vector& getWeights() const; + const std::vector& getAreas() const; + void write( FileStorage ) const + { + } + ; + float getInitMean() const; + float getInitSigma() const; + + private: + int m_type; + int m_numAreas; + std::vector m_weights; + float m_initMean; + float m_initSigma; + void generateRandomFeature( Size imageSize ); + float getSum( const Mat& image, Rect imgROI ) const; + std::vector m_areas; // areas within the patch over which to compute the feature + cv::Size m_initSize; // size of the patch used during training + cv::Size m_curSize; // size of the patches currently under investigation + float m_scaleFactorHeight; // scaling factor in vertical direction + float m_scaleFactorWidth; // scaling factor in horizontal direction + std::vector m_scaleAreas; // areas after scaling + std::vector m_scaleWeights; // weights after scaling + + }; + + virtual void init( const CvFeatureParams *_featureParams, int _maxSampleCount, Size _winSize ); + virtual void setImage( const Mat& img, uchar clsLabel = 0, int idx = 1 ); + virtual float operator()( int featureIdx, int sampleIdx ); + virtual void writeFeatures( FileStorage &fs, const Mat& featureMap ) const; + void writeFeature( FileStorage &fs ) const; // for old file format + const std::vector& getFeatures() const; + inline CvHaarEvaluator::FeatureHaar& getFeatures( int idx ) + { + return features[idx]; + } + void setWinSize( Size patchSize ); + Size setWinSize() const; + virtual void generateFeatures(); + + /** + * TODO new method + * \brief Overload the original generateFeatures in order to limit the number of the features + * @param numFeatures Number of the features + */ + + virtual void generateFeatures( int numFeatures ); + + protected: + bool isIntegral; + + /* TODO Added from MIL implementation */ + Mat _ii_img; + void compute_integral( const cv::Mat & img, std::vector > & ii_imgs ) + { + Mat ii_img; + integral( img, ii_img, CV_32F ); + split( ii_img, ii_imgs ); + } + + std::vector features; + Mat sum; /* sum images (each row represents image) */ +}; + +struct CvHOGFeatureParams : public CvFeatureParams +{ + CvHOGFeatureParams(); +}; + +class CvHOGEvaluator : public CvFeatureEvaluator +{ + public: + virtual ~CvHOGEvaluator() + { + } + virtual void init( const CvFeatureParams *_featureParams, int _maxSampleCount, Size _winSize ); + virtual void setImage( const Mat& img, uchar clsLabel, int idx ); + virtual float operator()( int varIdx, int sampleIdx ); + virtual void writeFeatures( FileStorage &fs, const Mat& featureMap ) const; + protected: + virtual void generateFeatures(); + virtual void integralHistogram( const Mat &img, std::vector &histogram, Mat &norm, int nbins ) const; + class Feature + { + public: + Feature(); + Feature( int offset, int x, int y, int cellW, int cellH ); + float calc( const std::vector &_hists, const Mat &_normSum, size_t y, int featComponent ) const; + void write( FileStorage &fs ) const; + void write( FileStorage &fs, int varIdx ) const; + + Rect rect[N_CELLS]; //cells + + struct + { + int p0, p1, p2, p3; + } fastRect[N_CELLS]; + }; + std::vector features; + + Mat normSum; //for nomalization calculation (L1 or L2) + std::vector hist; +}; + +inline float CvHOGEvaluator::operator()( int varIdx, int sampleIdx ) +{ + int featureIdx = varIdx / ( N_BINS * N_CELLS ); + int componentIdx = varIdx % ( N_BINS * N_CELLS ); + //return features[featureIdx].calc( hist, sampleIdx, componentIdx); + return features[featureIdx].calc( hist, normSum, sampleIdx, componentIdx ); +} + +inline float CvHOGEvaluator::Feature::calc( const std::vector& _hists, const Mat& _normSum, size_t y, int featComponent ) const +{ + float normFactor; + float res; + + int binIdx = featComponent % N_BINS; + int cellIdx = featComponent / N_BINS; + + const float *phist = _hists[binIdx].ptr( (int) y ); + res = phist[fastRect[cellIdx].p0] - phist[fastRect[cellIdx].p1] - phist[fastRect[cellIdx].p2] + phist[fastRect[cellIdx].p3]; + + const float *pnormSum = _normSum.ptr( (int) y ); + normFactor = (float) ( pnormSum[fastRect[0].p0] - pnormSum[fastRect[1].p1] - pnormSum[fastRect[2].p2] + pnormSum[fastRect[3].p3] ); + res = ( res > 0.001f ) ? ( res / ( normFactor + 0.001f ) ) : 0.f; //for cutting negative values, which apper due to floating precision + + return res; +} + +struct CvLBPFeatureParams : CvFeatureParams +{ + CvLBPFeatureParams(); + +}; + +class CvLBPEvaluator : public CvFeatureEvaluator +{ + public: + virtual ~CvLBPEvaluator() + { + } + virtual void init( const CvFeatureParams *_featureParams, int _maxSampleCount, Size _winSize ); + virtual void setImage( const Mat& img, uchar clsLabel, int idx ); + virtual float operator()( int featureIdx, int sampleIdx ) + { + return (float) features[featureIdx].calc( sum, sampleIdx ); + } + virtual void writeFeatures( FileStorage &fs, const Mat& featureMap ) const; + protected: + virtual void generateFeatures(); + + class Feature + { + public: + Feature(); + Feature( int offset, int x, int y, int _block_w, int _block_h ); + uchar calc( const Mat& _sum, size_t y ) const; + void write( FileStorage &fs ) const; + + Rect rect; + int p[16]; + }; + std::vector features; + + Mat sum; +}; + +inline uchar CvLBPEvaluator::Feature::calc( const Mat &_sum, size_t y ) const +{ + const int* psum = _sum.ptr( (int) y ); + int cval = psum[p[5]] - psum[p[6]] - psum[p[9]] + psum[p[10]]; + + return (uchar) ( ( psum[p[0]] - psum[p[1]] - psum[p[4]] + psum[p[5]] >= cval ? 128 : 0 ) | // 0 + ( psum[p[1]] - psum[p[2]] - psum[p[5]] + psum[p[6]] >= cval ? 64 : 0 ) | // 1 + ( psum[p[2]] - psum[p[3]] - psum[p[6]] + psum[p[7]] >= cval ? 32 : 0 ) | // 2 + ( psum[p[6]] - psum[p[7]] - psum[p[10]] + psum[p[11]] >= cval ? 16 : 0 ) | // 5 + ( psum[p[10]] - psum[p[11]] - psum[p[14]] + psum[p[15]] >= cval ? 8 : 0 ) | // 8 + ( psum[p[9]] - psum[p[10]] - psum[p[13]] + psum[p[14]] >= cval ? 4 : 0 ) | // 7 + ( psum[p[8]] - psum[p[9]] - psum[p[12]] + psum[p[13]] >= cval ? 2 : 0 ) | // 6 + ( psum[p[4]] - psum[p[5]] - psum[p[8]] + psum[p[9]] >= cval ? 1 : 0 ) ); // 3 +} + +} /* namespace cv */ + +#endif diff --git a/modules/tracking/include/opencv2/tracking/onlineBoosting.hpp b/modules/tracking/include/opencv2/tracking/onlineBoosting.hpp new file mode 100644 index 000000000..d0eb3a10b --- /dev/null +++ b/modules/tracking/include/opencv2/tracking/onlineBoosting.hpp @@ -0,0 +1,282 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#ifndef __OPENCV_ONLINEBOOSTING_HPP__ +#define __OPENCV_ONLINEBOOSTING_HPP__ + +#include "opencv2/core.hpp" + +namespace cv +{ +//TODO based on the original implementation +//http://vision.ucsd.edu/~bbabenko/project_miltrack.shtml + +class BaseClassifier; +class WeakClassifierHaarFeature; +class EstimatedGaussDistribution; +class ClassifierThreshold; +class Detector; + +class StrongClassifierDirectSelection +{ + public: + + StrongClassifierDirectSelection( int numBaseClf, int numWeakClf, Size patchSz, const Rect& sampleROI, bool useFeatureEx = false, int iterationInit = + 0 ); + virtual ~StrongClassifierDirectSelection(); + + void initBaseClassifier(); + + bool update( const Mat& image, int target, float importance = 1.0 ); + float eval( const Mat& response ); + std::vector getSelectedWeakClassifier(); + float classifySmooth( const std::vector& images, const Rect& sampleROI, int& idx ); + int getNumBaseClassifier(); + Size getPatchSize() const; + Rect getROI() const; + bool getUseFeatureExchange() const; + int getReplacedClassifier() const; + + void replaceWeakClassifier( int idx ); + int getSwappedClassifier() const; + private: + + //StrongClassifier + int numBaseClassifier; + int numAllWeakClassifier; + int numWeakClassifier; + int iterInit; + BaseClassifier** baseClassifier; + std::vector alpha; + cv::Size patchSize; + + bool useFeatureExchange; + + //StrongClassifierDirectSelection + std::vector m_errorMask; + std::vector m_errors; + std::vector m_sumErrors; + + Detector* detector; + Rect ROI; + + int replacedClassifier; + int swappedClassifier; +}; + +class BaseClassifier +{ + public: + + BaseClassifier( int numWeakClassifier, int iterationInit ); + BaseClassifier( int numWeakClassifier, int iterationInit, WeakClassifierHaarFeature** weakCls ); + + WeakClassifierHaarFeature** getReferenceWeakClassifier() + { + return weakClassifier; + } + ; + void trainClassifier( const Mat& image, int target, float importance, std::vector& errorMask ); + int selectBestClassifier( std::vector& errorMask, float importance, std::vector & errors ); + int computeReplaceWeakestClassifier( const std::vector & errors ); + void replaceClassifierStatistic( int sourceIndex, int targetIndex ); + int getIdxOfNewWeakClassifier() + { + return m_idxOfNewWeakClassifier; + } + ; + int eval( const Mat& image ); + virtual ~BaseClassifier(); + float getError( int curWeakClassifier ); + void getErrors( float* errors ); + int getSelectedClassifier() const; + void replaceWeakClassifier( int index ); + + protected: + + void generateRandomClassifier(); + WeakClassifierHaarFeature** weakClassifier; + bool m_referenceWeakClassifier; + int m_numWeakClassifier; + int m_selectedClassifier; + int m_idxOfNewWeakClassifier; + std::vector m_wCorrect; + std::vector m_wWrong; + int m_iterationInit; + +}; + +class EstimatedGaussDistribution +{ + public: + + EstimatedGaussDistribution(); + EstimatedGaussDistribution( float P_mean, float R_mean, float P_sigma, float R_sigma ); + virtual ~EstimatedGaussDistribution(); + void update( float value ); //, float timeConstant = -1.0); + float getMean(); + float getSigma(); + void setValues( float mean, float sigma ); + + private: + + float m_mean; + float m_sigma; + float m_P_mean; + float m_P_sigma; + float m_R_mean; + float m_R_sigma; +}; + +class WeakClassifierHaarFeature +{ + + public: + + WeakClassifierHaarFeature(); + virtual ~WeakClassifierHaarFeature(); + + bool update( float value, int target ); + int eval( float value ); + + private: + + float sigma; + float mean; + ClassifierThreshold* m_classifier; + + void getInitialDistribution( EstimatedGaussDistribution *distribution ); + void generateRandomClassifier( EstimatedGaussDistribution* m_posSamples, EstimatedGaussDistribution* m_negSamples ); + +}; + +class Detector +{ + public: + + Detector( StrongClassifierDirectSelection* classifier ); + virtual + ~Detector( void ); + + void + classifySmooth( const std::vector& image, float minMargin = 0 ); + + int + getNumDetections(); + float + getConfidence( int patchIdx ); + float + getConfidenceOfDetection( int detectionIdx ); + + float getConfidenceOfBestDetection() + { + return m_maxConfidence; + } + ; + int + getPatchIdxOfBestDetection(); + + int + getPatchIdxOfDetection( int detectionIdx ); + + const std::vector & + getIdxDetections() const + { + return m_idxDetections; + } + ; + const std::vector & + getConfidences() const + { + return m_confidences; + } + ; + + const cv::Mat & + getConfImageDisplay() const + { + return m_confImageDisplay; + } + + private: + + void + prepareConfidencesMemory( int numPatches ); + void + prepareDetectionsMemory( int numDetections ); + + StrongClassifierDirectSelection* m_classifier; + std::vector m_confidences; + int m_sizeConfidences; + int m_numDetections; + std::vector m_idxDetections; + int m_sizeDetections; + int m_idxBestDetection; + float m_maxConfidence; + cv::Mat_ m_confMatrix; + cv::Mat_ m_confMatrixSmooth; + cv::Mat_ m_confImageDisplay; +}; + +class ClassifierThreshold +{ + public: + + ClassifierThreshold( EstimatedGaussDistribution* posSamples, EstimatedGaussDistribution* negSamples ); + virtual ~ClassifierThreshold(); + + void update( float value, int target ); + int eval( float value ); + + void* getDistribution( int target ); + + private: + + EstimatedGaussDistribution* m_posSamples; + EstimatedGaussDistribution* m_negSamples; + + float m_threshold; + int m_parity; +}; + +} /* namespace cv */ + +#endif diff --git a/modules/tracking/include/opencv2/tracking/onlineMIL.hpp b/modules/tracking/include/opencv2/tracking/onlineMIL.hpp new file mode 100644 index 000000000..2bb006334 --- /dev/null +++ b/modules/tracking/include/opencv2/tracking/onlineMIL.hpp @@ -0,0 +1,114 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#ifndef __OPENCV_ONLINEMIL_HPP__ +#define __OPENCV_ONLINEMIL_HPP__ + +#include "opencv2/core.hpp" +#include + +namespace cv +{ +//TODO based on the original implementation +//http://vision.ucsd.edu/~bbabenko/project_miltrack.shtml + +#define sign(s) ((s > 0 ) ? 1 : ((s<0) ? -1 : 0)) + +class ClfOnlineStump; + +class ClfMilBoost +{ + public: + struct CV_EXPORTS Params + { + Params(); + int _numSel; + int _numFeat; + float _lRate; + }; + + ClfMilBoost(); + ~ClfMilBoost(); + void init( const ClfMilBoost::Params ¶meters = ClfMilBoost::Params() ); + void update( const Mat& posx, const Mat& negx ); + std::vector classify( const Mat& x, bool logR = true ); + + inline float sigmoid( float x ) + { + return 1.0f / ( 1.0f + exp( -x ) ); + } + + private: + uint _numsamples; + ClfMilBoost::Params _myParams; + std::vector _selectors; + std::vector _weakclf; + uint _counter; + +}; + +class ClfOnlineStump +{ + public: + float _mu0, _mu1, _sig0, _sig1; + float _q; + int _s; + float _log_n1, _log_n0; + float _e1, _e0; + float _lRate; + + ClfOnlineStump(); + ClfOnlineStump( int ind ); + void init(); + void update( const Mat& posx, const Mat& negx, const cv::Mat_ & posw = cv::Mat_(), const cv::Mat_ & negw = cv::Mat_() ); + bool classify( const Mat& x, int i ); + float classifyF( const Mat& x, int i ); + std::vector classifySetF( const Mat& x ); + + private: + bool _trained; + int _ind; + +}; + +} /* namespace cv */ + +#endif diff --git a/modules/tracking/include/opencv2/tracking/tracker.hpp b/modules/tracking/include/opencv2/tracking/tracker.hpp new file mode 100644 index 000000000..bea3e8558 --- /dev/null +++ b/modules/tracking/include/opencv2/tracking/tracker.hpp @@ -0,0 +1,1021 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#ifndef __OPENCV_TRACKER_HPP__ +#define __OPENCV_TRACKER_HPP__ + +#include "opencv2/core.hpp" +#include "opencv2/imgproc/types_c.h" +#include "feature.hpp" +#include "onlineMIL.hpp" +#include "onlineBoosting.hpp" +#include + +/* + * Partially based on: + * ==================================================================================================================== + * - [AAM] S. Salti, A. Cavallaro, L. Di Stefano, Adaptive Appearance Modeling for Video Tracking: Survey and Evaluation + * - [AMVOT] X. Li, W. Hu, C. Shen, Z. Zhang, A. Dick, A. van den Hengel, A Survey of Appearance Models in Visual Object Tracking + * + * This Tracking API has been designed with PlantUML. If you modify this API please change UML files under modules/tracking/misc/ + * + */ + +namespace cv +{ + +/************************************ TrackerFeature Base Classes ************************************/ + +/** + * \brief Abstract base class for TrackerFeature that represents the feature. + */ +class CV_EXPORTS_W TrackerFeature +{ + public: + virtual ~TrackerFeature(); + + /** + * \brief Compute the features in the images collection + * \param images The images. + * \param response Computed features. + */ + void compute( const std::vector& images, Mat& response ); + + /** + * \brief Create TrackerFeature by tracker feature type. + */ + static Ptr create( const String& trackerFeatureType ); + + /** + * \brief Identify most effective features + * \param response Collection of response for the specific TrackerFeature + * \param npoints Max number of features + */ + virtual void selection( Mat& response, int npoints ) = 0; + + /** + * \brief Get the name of the specific tracker feature + * \return The name of the tracker feature + */ + String getClassName() const; + + protected: + + virtual bool computeImpl( const std::vector& images, Mat& response ) = 0; + + String className; +}; + +/** + * \brief Class that manages the extraction and selection of features + * [AAM] Feature Extraction and Feature Set Refinement (Feature Processing and Feature Selection). See table I and section III C + * [AMVOT] Appearance modelling -> Visual representation (Table II, section 3.1 - 3.2) + */ +class CV_EXPORTS_W TrackerFeatureSet +{ + public: + + TrackerFeatureSet(); + + ~TrackerFeatureSet(); + + /** + * \brief Extract features from the images collection + * \param images The images + */ + void extraction( const std::vector& images ); + + /** + * \brief Identify most effective features for all feature types + */ + void selection(); + + /** + * \brief Remove outliers for all feature types + */ + void removeOutliers(); + + /** + * \brief Add TrackerFeature in the collection from tracker feature type + * \param trackerFeatureType the tracker feature type FEATURE2D.DETECTOR.DESCRIPTOR - HOG - HAAR - LBP + * \return true if feature is added, false otherwise + */ + bool addTrackerFeature( String trackerFeatureType ); + + /** + * \brief Add TrackerFeature in collection directly + * \param feature The TrackerFeature + * \return true if feature is added, false otherwise + */ + bool addTrackerFeature( Ptr& feature ); + + /** + * \brief Get the TrackerFeature collection + * \return The TrackerFeature collection + */ + const std::vector > >& getTrackerFeature() const; + + /** + * \brief Get the responses + * \return the responses + */ + const std::vector& getResponses() const; + + private: + + void clearResponses(); + bool blockAddTrackerFeature; + + std::vector > > features; //list of features + std::vector responses; //list of response after compute + +}; + +/************************************ TrackerSampler Base Classes ************************************/ + +/** + * \brief Abstract base class for TrackerSamplerAlgorithm that represents the algorithm for the specific sampler. + */ +class CV_EXPORTS_W TrackerSamplerAlgorithm +{ + public: + /** + * \brief Destructor + */ + virtual ~TrackerSamplerAlgorithm(); + + /** + * \brief Create TrackerSamplerAlgorithm by tracker sampler type. + */ + static Ptr create( const String& trackerSamplerType ); + + /** + * \brief Computes the regions starting from a position in an image + * \param image The image + * \param boundingBox The bounding box from which regions can be calculated + * \param sample The computed samples [AAM] Fig. 1 variable Sk + * \return true if samples are computed, false otherwise + */ + bool sampling( const Mat& image, Rect boundingBox, std::vector& sample ); + + /** + * \brief Get the name of the specific sampler algorithm + * \return The name of the tracker sampler algorithm + */ + String getClassName() const; + + protected: + String className; + + virtual bool samplingImpl( const Mat& image, Rect boundingBox, std::vector& sample ) = 0; +}; + +/** + * \brief Class that manages the sampler in order to select regions for the update the model of the tracker + * [AAM] Sampling e Labeling. See table I and section III B + */ +class CV_EXPORTS_W TrackerSampler +{ + public: + + /** + * \brief Constructor + */ + TrackerSampler(); + + /** + * \brief Destructor + */ + ~TrackerSampler(); + + /** + * \brief Computes the regions starting from a position in an image + * \param image The image + * \param boundingBox The bounding box from which regions can be calculated + */ + void sampling( const Mat& image, Rect boundingBox ); + + /** + * Get the all samplers + * \return The samplers + */ + const std::vector > >& getSamplers() const; + + /** + * Get the samples from all TrackerSamplerAlgorithm + * \return The samples [AAM] Fig. 1 variable Sk + */ + const std::vector& getSamples() const; + + /** + * \brief Add TrackerSamplerAlgorithm in the collection from tracker sampler type + * \param trackerSamplerAlgorithmType the tracker sampler type CSC - CS + * \return true if sampler is added, false otherwise + */ + bool addTrackerSamplerAlgorithm( String trackerSamplerAlgorithmType ); + + /** + * \brief Add TrackerSamplerAlgorithm in collection directly + * \param sampler The TrackerSamplerAlgorithm + * \return true if sampler is added, false otherwise + */ + bool addTrackerSamplerAlgorithm( Ptr& sampler ); + + private: + std::vector > > samplers; + std::vector samples; + bool blockAddTrackerSampler; + + void clearSamples(); +}; + +/************************************ TrackerModel Base Classes ************************************/ + +/** + * \brief Abstract base class for TrackerTargetState that represents a possible state of the target + * [AAM] x̄_i all the states candidates + * Inherits this with your Target state + */ +class CV_EXPORTS_W TrackerTargetState +{ + public: + virtual ~TrackerTargetState() + { + } + ; + /** + * \brief Get the position + * \return The position + */ + Point2f getTargetPosition() const; + + /** + * \brief Set the position + * \param position The position + */ + void setTargetPosition( const Point2f& position ); + /** + * \brief Get the width of the target + * \return The width of the target + */ + int getTargetWidth() const; + + /** + * \brief Set the width of the target + * \param width The width of the target + */ + void setTargetWidth( int width ); + /** + * \brief Get the height of the target + * \return The height of the target + */ + int getTargetHeight() const; + + /** + * \brief Set the height of the target + * \param height The height of the target + */ + void setTargetHeight( int height ); + + protected: + Point2f targetPosition; + int targetWidth; + int targetHeight; + +}; + +/** + * \brief Represents the model of the target at frame k (all states and scores) + * [AAM] The set of the pair (x̄_k(i), C_k(i)) + */ +typedef std::vector, float> > ConfidenceMap; + +/** + * \brief Represents the estimate states for all frames + * [AAM] Xk is the trajectory of the target up to time k + */ +typedef std::vector > Trajectory; + +/** + * \brief Abstract base class for TrackerStateEstimator that estimates the most likely target state. + * [AAM] State estimator + * [AMVOT] Statistical modeling (Fig. 3), Table III (generative) - IV (discriminative) - V (hybrid) + */ +class CV_EXPORTS_W TrackerStateEstimator +{ + public: + virtual ~TrackerStateEstimator(); + + /** + * \brief Estimate the most likely target state + * \param confidenceMaps The overall appearance model + * \return The estimated state + */ + Ptr estimate( const std::vector& confidenceMaps ); + + /** + * \brief Update the ConfidenceMap with the scores + * \param confidenceMaps The overall appearance model + */ + void update( std::vector& confidenceMaps ); + + /** + * \brief Create TrackerStateEstimator by tracker state estimator type SVM - BOOSTING. + */ + static Ptr create( const String& trackeStateEstimatorType ); + + /** + * \brief Get the name of the specific state estimator + * \return The name of the state estimator + */ + String getClassName() const; + + protected: + + virtual Ptr estimateImpl( const std::vector& confidenceMaps ) = 0; + virtual void updateImpl( std::vector& confidenceMaps ) = 0; + String className; +}; + +/** + * \brief Abstract class that represents the model of the target. It must be instantiated by specialized tracker + * [AAM] Ak + */ +class CV_EXPORTS_W TrackerModel +{ + public: + + /** + * \brief Constructor + */ + TrackerModel(); + + /** + * \brief Destructor + */ + virtual ~TrackerModel(); + + /** + * \brief Set TrackerEstimator + * \return true if the tracker state estimator is added, false otherwise + */ + bool setTrackerStateEstimator( Ptr trackerStateEstimator ); + + /** + * \brief Estimate the most likely target location + * [AAM] ME, Model Estimation table I + * \param responses Features extracted + */ + void modelEstimation( const std::vector& responses ); + + /** + * \brief Update the model + * [AAM] MU, Model Update table I + */ + void modelUpdate(); + + /** + * \brief Run the TrackerStateEstimator + * \return true if is possible to estimate a new state, false otherwise + */ + bool runStateEstimator(); + + /** + * \brief Set the current estimated state + * \param lastTargetState the current estimated state + */ + void setLastTargetState( const Ptr& lastTargetState ); + + /** + * \brief Get the last target state + * \return The last target state + */ + Ptr getLastTargetState() const; + + /** + * \brief Get the list of the confidence map + * \return The list of the confidence map + */ + const std::vector& getConfidenceMaps() const; + + /** + * \brief Get the last confidence map + * \return The the last confidence map + */ + const ConfidenceMap& getLastConfidenceMap() const; + + /** + * \brief Get the tracker state estimator + * \return The tracker state estimator + */ + Ptr getTrackerStateEstimator() const; + + private: + + void clearCurrentConfidenceMap(); + + protected: + std::vector confidenceMaps; + Ptr stateEstimator; + ConfidenceMap currentConfidenceMap; + Trajectory trajectory; + int maxCMLength; + + virtual void modelEstimationImpl( const std::vector& responses ) = 0; + virtual void modelUpdateImpl() = 0; + +}; + +/************************************ Tracker Base Class ************************************/ + +/** + * \brief Abstract base class for Tracker algorithm. + */ +class CV_EXPORTS_W Tracker : public virtual Algorithm +{ + public: + + virtual ~Tracker(); + + /** + * \brief Initialize the tracker at the first frame. + * \param image The image. + * \param boundingBox The bounding box. + * \return true the tracker is initialized, false otherwise + */ + bool init( const Mat& image, const Rect& boundingBox ); + + /** + * \brief Update the tracker at the next frames. + * \param image The image. + * \param boundingBox The bounding box. + * \return true the tracker is updated, false otherwise + */ + bool update( const Mat& image, Rect& boundingBox ); + + /** + * \brief Create tracker by tracker type MIL - BOOSTING. + */ + static Ptr create( const String& trackerType ); + + protected: + + virtual bool initImpl( const Mat& image, const Rect& boundingBox ) = 0; + virtual bool updateImpl( const Mat& image, Rect& boundingBox ) = 0; + + bool isInit; + + Ptr featureSet; + Ptr sampler; + Ptr model; + +}; + +/************************************ Specific TrackerStateEstimator Classes ************************************/ + +/** + * \brief TrackerStateEstimator based on MILBoosting + */ +class CV_EXPORTS_W TrackerStateEstimatorMILBoosting : public TrackerStateEstimator +{ + public: + + /** + * Implementation of the target state for TrackerStateEstimatorMILBoosting + */ + class TrackerMILTargetState : public TrackerTargetState + { + + public: + /** + * \brief Constructor + * \param position Top left corner of the bounding box + * \param width Width of the bounding box + * \param height Height of the bounding box + * \param foreground label for target or background + * \param features features extracted + */ + TrackerMILTargetState( const Point2f& position, int width, int height, bool foreground, const Mat& features ); + + /** + * \brief Destructor + */ + ~TrackerMILTargetState() + { + } + ; + + /** + * setters and getters + */ + void setTargetFg( bool foreground ); + void setFeatures( const Mat& features ); + bool isTargetFg() const; + Mat getFeatures() const; + + private: + bool isTarget; + Mat targetFeatures; + }; + + TrackerStateEstimatorMILBoosting( int nFeatures = 250 ); + ~TrackerStateEstimatorMILBoosting(); + + void setCurrentConfidenceMap( ConfidenceMap& confidenceMap ); + + protected: + Ptr estimateImpl( const std::vector& confidenceMaps ); + void updateImpl( std::vector& confidenceMaps ); + + private: + uint max_idx( const std::vector &v ); + void prepareData( const ConfidenceMap& confidenceMap, Mat& positive, Mat& negative ); + + ClfMilBoost boostMILModel; + bool trained; + int numFeatures; + + ConfidenceMap currentConfidenceMap; +}; + +/** + * \brief TrackerStateEstimator based on AdaBoosting + */ +class CV_EXPORTS_W TrackerStateEstimatorAdaBoosting : public TrackerStateEstimator +{ + public: + class TrackerAdaBoostingTargetState : public TrackerTargetState + { + + public: + /** + * \brief Constructor + * \param position Top left corner of the bounding box + * \param width Width of the bounding box + * \param height Height of the bounding box + * \param foreground label for target or background + * \param responses list of features + */ + TrackerAdaBoostingTargetState( const Point2f& position, int width, int height, bool foreground, const Mat& responses ); + + /** + * \brief Destructor + */ + ~TrackerAdaBoostingTargetState() + { + } + ; + + /** + * setters and getters + */ + void setTargetResponses( const Mat& responses ); + void setTargetFg( bool foreground ); + Mat getTargetResponses() const; + bool isTargetFg() const; + + private: + bool isTarget; + Mat targetResponses; + + }; + + /** + * \brief Constructor + * \param numClassifer Number of base classifiers + * \param initIterations Number of iterations in the initialization + * \param nFeatures Number of features/weak classifiers + * \param patchSize tracking rect + * \param ROI initial ROI + */ + TrackerStateEstimatorAdaBoosting( int numClassifer, int initIterations, int nFeatures, Size patchSize, const Rect& ROI ); + + /** + * \brief Destructor + */ + ~TrackerStateEstimatorAdaBoosting(); + + /** + * \brief Get the sampling ROI + * \return the sampling ROI + */ + Rect getSampleROI() const; + + /** + * \brief Set the sampling ROI + * \param ROI the sampling ROI + */ + void setSampleROI( const Rect& ROI ); + + /** + * \brief Set the current confidence map + * \param confidenceMap the current confidence map + */ + void setCurrentConfidenceMap( ConfidenceMap& confidenceMap ); + + /** + * \brief Get the list of the selected weak classifiers for the classification step + * \return the list of the selected weak classifiers + */ + std::vector computeSelectedWeakClassifier(); + + /** + * \brief Get the list of the weak classifiers that should be replaced + * \return the list of the weak classifiers + */ + std::vector computeReplacedClassifier(); + + /** + * \brief Get the list of the weak classifiers that replace those to be replaced + * \return the list of the weak classifiers + */ + std::vector computeSwappedClassifier(); + + protected: + Ptr estimateImpl( const std::vector& confidenceMaps ); + void updateImpl( std::vector& confidenceMaps ); + + Ptr boostClassifier; + + private: + int numBaseClassifier; + int iterationInit; + int numFeatures; + bool trained; + Size initPatchSize; + Rect sampleROI; + std::vector replacedClassifier; + std::vector swappedClassifier; + + ConfidenceMap currentConfidenceMap; +}; + +/** + * \brief TrackerStateEstimator based on SVM + */ +class CV_EXPORTS_W TrackerStateEstimatorSVM : public TrackerStateEstimator +{ + public: + TrackerStateEstimatorSVM(); + ~TrackerStateEstimatorSVM(); + + protected: + Ptr estimateImpl( const std::vector& confidenceMaps ); + void updateImpl( std::vector& confidenceMaps ); +}; + +/************************************ Specific TrackerSamplerAlgorithm Classes ************************************/ + +/** + * \brief TrackerSampler based on CSC (current state centered) + */ +class CV_EXPORTS_W TrackerSamplerCSC : public TrackerSamplerAlgorithm +{ + public: + enum + { + MODE_INIT_POS = 1, // mode for init positive samples + MODE_INIT_NEG = 2, // mode for init negative samples + MODE_TRACK_POS = 3, // mode for update positive samples + MODE_TRACK_NEG = 4, // mode for update negative samples + MODE_DETECT = 5 // mode for detect samples + }; + + struct CV_EXPORTS Params + { + Params(); + float initInRad; // radius for gathering positive instances during init + float trackInPosRad; // radius for gathering positive instances during tracking + float searchWinSize; // size of search window + int initMaxNegNum; // # negative samples to use during init + int trackMaxPosNum; // # positive samples to use during training + int trackMaxNegNum; // # negative samples to use during training + }; + + TrackerSamplerCSC( const TrackerSamplerCSC::Params ¶meters = TrackerSamplerCSC::Params() ); + + /** + * \brief set the sampling mode + */ + void setMode( int samplingMode ); + + ~TrackerSamplerCSC(); + + protected: + + bool samplingImpl( const Mat& image, Rect boundingBox, std::vector& sample ); + + private: + + Params params; + int mode; + RNG rng; + + std::vector sampleImage( const Mat& img, int x, int y, int w, int h, float inrad, float outrad = 0, int maxnum = 1000000 ); +}; + +/** + * \brief TrackerSampler based on CS (current state) + */ +class CV_EXPORTS_W TrackerSamplerCS : public TrackerSamplerAlgorithm +{ + public: + enum + { + MODE_POSITIVE = 1, // mode for positive samples + MODE_NEGATIVE = 2, // mode for negative samples + MODE_CLASSIFY = 3 // mode for classify samples + }; + + struct CV_EXPORTS Params + { + Params(); + float overlap; //overlapping for the search windows + float searchFactor; //search region parameter + }; + TrackerSamplerCS( const TrackerSamplerCS::Params ¶meters = TrackerSamplerCS::Params() ); + + /** + * \brief set the sampling mode + */ + void setMode( int samplingMode ); + + ~TrackerSamplerCS(); + + bool samplingImpl( const Mat& image, Rect boundingBox, std::vector& sample ); + Rect getROI() const; + private: + Rect getTrackingROI( float searchFactor ); + Rect RectMultiply( const Rect & rect, float f ); + std::vector patchesRegularScan( const Mat& image, Rect trackingROI, Size patchSize ); + void setCheckedROI( Rect imageROI ); + + Params params; + int mode; + Rect trackedPatch; + Rect validROI; + Rect ROI; + +}; + +/************************************ Specific TrackerFeature Classes ************************************/ + +/** + * \brief TrackerFeature based on Feature2D + */ +class CV_EXPORTS_W TrackerFeatureFeature2d : public TrackerFeature +{ + public: + + /** + * \brief Constructor + * \param detectorType string of FeatureDetector + * \param descriptorType string of DescriptorExtractor + */ + TrackerFeatureFeature2d( String detectorType, String descriptorType ); + + ~TrackerFeatureFeature2d(); + + void selection( Mat& response, int npoints ); + + protected: + + bool computeImpl( const std::vector& images, Mat& response ); + + private: + + std::vector keypoints; +}; + +/** + * \brief TrackerFeature based on HOG + */ +class CV_EXPORTS_W TrackerFeatureHOG : public TrackerFeature +{ + public: + + TrackerFeatureHOG(); + + ~TrackerFeatureHOG(); + + void selection( Mat& response, int npoints ); + + protected: + + bool computeImpl( const std::vector& images, Mat& response ); + +}; + +/** + * \brief TrackerFeature based on HAAR + */ +class CV_EXPORTS_W TrackerFeatureHAAR : public TrackerFeature +{ + public: + struct CV_EXPORTS Params + { + Params(); + int numFeatures; // # of rects + Size rectSize; // rect size + bool isIntegral; // true if input images are integral, false otherwise + }; + + TrackerFeatureHAAR( const TrackerFeatureHAAR::Params ¶meters = TrackerFeatureHAAR::Params() ); + + ~TrackerFeatureHAAR(); + + /** + * \brief Compute the features only for the selected indices in the images collection + * \param selFeatures indices of selected features + * \param images The images. + * \param response Computed features. + */ + bool extractSelected( const std::vector selFeatures, const std::vector& images, Mat& response ); + + void selection( Mat& response, int npoints ); + + /** + * \brief Swap the feature in position source with the feature in position target + * \param source The source position + * \param target The target position + */ + bool swapFeature( int source, int target ); + + /** + * \brief Swap the feature in position id with the feature input + * \param id The position + * \param feature The feature + */ + bool swapFeature( int id, CvHaarEvaluator::FeatureHaar& feature ); + + /** + * \brief Get the feature + * \param id The position + * \return the feature in position id + */ + CvHaarEvaluator::FeatureHaar& getFeatureAt( int id ); + + protected: + bool computeImpl( const std::vector& images, Mat& response ); + + private: + + Params params; + Ptr featureEvaluator; +}; + +/** + * \brief TrackerFeature based on LBP + */ +class CV_EXPORTS_W TrackerFeatureLBP : public TrackerFeature +{ + public: + + TrackerFeatureLBP(); + + ~TrackerFeatureLBP(); + + void selection( Mat& response, int npoints ); + + protected: + + bool computeImpl( const std::vector& images, Mat& response ); + +}; + +/************************************ Specific Tracker Classes ************************************/ + +/** + \brief TrackerMIL implementation. + For more details see B Babenko, MH Yang, S Belongie, Visual Tracking with Online Multiple Instance Learning + */ + +class CV_EXPORTS_W TrackerMIL : public Tracker +{ + public: + struct CV_EXPORTS Params + { + Params(); + //parameters for sampler + float samplerInitInRadius; // radius for gathering positive instances during init + int samplerInitMaxNegNum; // # negative samples to use during init + float samplerSearchWinSize; // size of search window + float samplerTrackInRadius; // radius for gathering positive instances during tracking + int samplerTrackMaxPosNum; // # positive samples to use during tracking + int samplerTrackMaxNegNum; // # negative samples to use during tracking + + int featureSetNumFeatures; // #features + + void read( const FileNode& fn ); + void write( FileStorage& fs ) const; + }; + + /** + * \brief TrackerMIL Constructor + * \param parameters TrackerMIL parameters + */ + TrackerMIL( const TrackerMIL::Params ¶meters = TrackerMIL::Params() ); + + virtual ~TrackerMIL(); + + void read( const FileNode& fn ); + void write( FileStorage& fs ) const; + + protected: + + bool initImpl( const Mat& image, const Rect& boundingBox ); + bool updateImpl( const Mat& image, Rect& boundingBox ); + void compute_integral( const Mat & img, Mat & ii_img ); + + Params params; + AlgorithmInfo* info() const; + +}; + +/** + \brief TrackerBoosting implementation. + For more details see H Grabner, M Grabner, H Bischof, Real-time tracking via on-line boosting + */ +class CV_EXPORTS_W TrackerBoosting : public Tracker +{ + public: + struct CV_EXPORTS Params + { + Params(); + int numClassifiers; //the number of classifiers to use in a OnlineBoosting algorithm + float samplerOverlap; //search region parameters to use in a OnlineBoosting algorithm + float samplerSearchFactor; // search region parameters to use in a OnlineBoosting algorithm + int iterationInit; //the initial iterations + int featureSetNumFeatures; // #features + /** + * \brief Read parameters from file + */ + void read( const FileNode& fn ); + + /** + * \brief Write parameters in a file + */ + void write( FileStorage& fs ) const; + }; + + /** + * \brief TrackerBoosting Constructor + * \param parameters TrackerBoosting parameters + */ + TrackerBoosting( const TrackerBoosting::Params ¶meters = TrackerBoosting::Params() ); + + virtual ~TrackerBoosting(); + + void read( const FileNode& fn ); + void write( FileStorage& fs ) const; + + protected: + + bool initImpl( const Mat& image, const Rect& boundingBox ); + bool updateImpl( const Mat& image, Rect& boundingBox ); + + Params params; + AlgorithmInfo* info() const; +}; + +} /* namespace cv */ + +#endif diff --git a/modules/tracking/include/opencv2/tracking/tracking.hpp b/modules/tracking/include/opencv2/tracking/tracking.hpp new file mode 100644 index 000000000..eb098e27b --- /dev/null +++ b/modules/tracking/include/opencv2/tracking/tracking.hpp @@ -0,0 +1,46 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#ifdef __OPENCV_BUILD +#error this is a compatibility header which should not be used inside the OpenCV library +#endif + +#include "opencv2/tracking.hpp" diff --git a/modules/tracking/perf/perf_main.cpp b/modules/tracking/perf/perf_main.cpp new file mode 100644 index 000000000..de3cc39b7 --- /dev/null +++ b/modules/tracking/perf/perf_main.cpp @@ -0,0 +1,3 @@ +#include "perf_precomp.hpp" + +CV_PERF_TEST_MAIN(tracking) diff --git a/modules/tracking/perf/perf_precomp.hpp b/modules/tracking/perf/perf_precomp.hpp new file mode 100644 index 000000000..8c4987a5d --- /dev/null +++ b/modules/tracking/perf/perf_precomp.hpp @@ -0,0 +1,21 @@ +#ifdef __GNUC__ +# pragma GCC diagnostic ignored "-Wmissing-declarations" +# if defined __clang__ || defined __APPLE__ +# pragma GCC diagnostic ignored "-Wmissing-prototypes" +# pragma GCC diagnostic ignored "-Wextra" +# endif +#endif + +#ifndef __OPENCV_TRACKING_PRECOMP_HPP__ +#define __OPENCV_TRACKING_PRECOMP_HPP__ + +#include "opencv2/ts.hpp" +#include +#include +#include + +#ifdef GTEST_CREATE_SHARED_LIBRARY +#error no modules except ts should have GTEST_CREATE_SHARED_LIBRARY defined +#endif + +#endif diff --git a/modules/tracking/perf/perf_tracking.cpp b/modules/tracking/perf/perf_tracking.cpp new file mode 100644 index 000000000..c143a62ce --- /dev/null +++ b/modules/tracking/perf/perf_tracking.cpp @@ -0,0 +1,46 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#include "perf_precomp.hpp" + +using namespace std; +using namespace cv; +using namespace perf; diff --git a/modules/tracking/samples/tracker.cpp b/modules/tracking/samples/tracker.cpp new file mode 100644 index 000000000..80ef98f90 --- /dev/null +++ b/modules/tracking/samples/tracker.cpp @@ -0,0 +1,148 @@ +#include +#include +#include +#include + +using namespace std; +using namespace cv; + +static Mat image; +static Rect boundingBox; +static bool paused; +static bool selectObject = false; +static bool startSelection = false; + +static const char* keys = +{ "{@tracker_algorithm | | tracker algorithm }" + "{@video_name | | video name }" }; + +static void help() +{ + cout << "\nThis example shows the functionality of \"Long-term optical tracking API\"" + "-- pause video [p] and draw a bounding box around the target to start the tracker\n" + "Call:\n" + "./tracker \n" + << endl; + + cout << "\n\nHot keys: \n" + "\tq - quit the program\n" + "\tp - pause video\n"; +} + +static void onMouse( int event, int x, int y, int, void* ) +{ + if( !selectObject ) + { + switch ( event ) + { + case EVENT_LBUTTONDOWN: + //set origin of the bounding box + startSelection = true; + boundingBox.x = x; + boundingBox.y = y; + break; + case EVENT_LBUTTONUP: + //sei with and height of the bounding box + boundingBox.width = std::abs( x - boundingBox.x ); + boundingBox.height = std::abs( y - boundingBox.y ); + paused = false; + selectObject = true; + break; + case EVENT_MOUSEMOVE: + + if( startSelection && !selectObject ) + { + //draw the bounding box + Mat currentFrame; + image.copyTo( currentFrame ); + rectangle( currentFrame, Point( boundingBox.x, boundingBox.y ), Point( x, y ), Scalar( 255, 0, 0 ), 2, 1 ); + imshow( "Tracking API", currentFrame ); + } + break; + } + } +} + +int main( int argc, char** argv ) +{ + CommandLineParser parser( argc, argv, keys ); + + String tracker_algorithm = parser.get( 0 ); + String video_name = parser.get( 1 ); + + if( tracker_algorithm.empty() || video_name.empty() ) + { + help(); + return -1; + } + + //open the capture + VideoCapture cap; + cap.open( video_name ); + + if( !cap.isOpened() ) + { + help(); + cout << "***Could not initialize capturing...***\n"; + cout << "Current parameter's value: \n"; + parser.printMessage(); + return -1; + } + + Mat frame; + paused = true; + namedWindow( "Tracking API", 1 ); + setMouseCallback( "Tracking API", onMouse, 0 ); + + //instantiates the specific Tracker + Ptr tracker = Tracker::create( tracker_algorithm ); + if( tracker == NULL ) + { + cout << "***Error in the instantiation of the tracker...***\n"; + return -1; + } + + //get the first frame + cap >> frame; + frame.copyTo( image ); + imshow( "Tracking API", image ); + + bool initialized = false; + for ( ;; ) + { + if( !paused ) + { + cap >> frame; + frame.copyTo( image ); + + if( !initialized && selectObject ) + { + //initializes the tracker + if( !tracker->init( frame, boundingBox ) ) + { + cout << "***Could not initialize tracker...***\n"; + return -1; + } + initialized = true; + } + else if( initialized ) + { + //updates the tracker + if( tracker->update( frame, boundingBox ) ) + { + rectangle( image, boundingBox, Scalar( 255, 0, 0 ), 2, 1 ); + } + } + imshow( "Tracking API", image ); + } + + char c = (char) waitKey( 2 ); + if( c == 'q' ) + break; + if( c == 'p' ) + paused = !paused; + + } + + return 0; +} diff --git a/modules/tracking/src/feature.cpp b/modules/tracking/src/feature.cpp new file mode 100644 index 000000000..64995a640 --- /dev/null +++ b/modules/tracking/src/feature.cpp @@ -0,0 +1,1072 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#include "precomp.hpp" +#include "opencv2/tracking/feature.hpp" + +namespace cv +{ + +/* + * TODO This implementation is based on apps/traincascade/ + * TODO Changed CvHaarEvaluator based on ADABOOSTING implementation (Grabner et al.) + */ + +CvParams::CvParams() : + name( "params" ) +{ +} +void CvParams::printDefaults() const +{ + std::cout << "--" << name << "--" << std::endl; +} +void CvParams::printAttrs() const +{ +} +bool CvParams::scanAttr( const std::string, const std::string ) +{ + return false; +} + +//---------------------------- FeatureParams -------------------------------------- + +CvFeatureParams::CvFeatureParams() : + maxCatCount( 0 ), + featSize( 1 ), + numFeatures( 1 ) +{ + name = CC_FEATURE_PARAMS; +} + +void CvFeatureParams::init( const CvFeatureParams& fp ) +{ + maxCatCount = fp.maxCatCount; + featSize = fp.featSize; + numFeatures = fp.numFeatures; +} + +void CvFeatureParams::write( FileStorage &fs ) const +{ + fs << CC_MAX_CAT_COUNT << maxCatCount; + fs << CC_FEATURE_SIZE << featSize; + fs << CC_NUM_FEATURES << numFeatures; +} + +bool CvFeatureParams::read( const FileNode &node ) +{ + if( node.empty() ) + return false; + maxCatCount = node[CC_MAX_CAT_COUNT]; + featSize = node[CC_FEATURE_SIZE]; + numFeatures = node[CC_NUM_FEATURES]; + return ( maxCatCount >= 0 && featSize >= 1 ); +} + +Ptr CvFeatureParams::create( int featureType ) +{ + return featureType == HAAR ? Ptr( new CvHaarFeatureParams ) : featureType == LBP ? Ptr( new CvLBPFeatureParams ) : + featureType == HOG ? Ptr( new CvHOGFeatureParams ) : Ptr(); +} + +//------------------------------------- FeatureEvaluator --------------------------------------- + +void CvFeatureEvaluator::init( const CvFeatureParams *_featureParams, int _maxSampleCount, Size _winSize ) +{ + CV_Assert( _maxSampleCount > 0 ); + featureParams = (CvFeatureParams *) _featureParams; + winSize = _winSize; + numFeatures = _featureParams->numFeatures; + cls.create( (int) _maxSampleCount, 1, CV_32FC1 ); + generateFeatures(); +} + +void CvFeatureEvaluator::setImage( const Mat &img, uchar clsLabel, int idx ) +{ + winSize.width = img.cols; + winSize.height = img.rows; + //CV_Assert( img.cols == winSize.width ); + //CV_Assert( img.rows == winSize.height ); + CV_Assert( idx < cls.rows ); + cls.ptr( idx )[0] = clsLabel; +} + +Ptr CvFeatureEvaluator::create( int type ) +{ + return type == CvFeatureParams::HAAR ? Ptr( new CvHaarEvaluator ) : + type == CvFeatureParams::LBP ? Ptr( new CvLBPEvaluator ) : + type == CvFeatureParams::HOG ? Ptr( new CvHOGEvaluator ) : Ptr(); +} + +CvHaarFeatureParams::CvHaarFeatureParams() +{ + name = HFP_NAME; + isIntegral = false; +} + +void CvHaarFeatureParams::init( const CvFeatureParams& fp ) +{ + CvFeatureParams::init( fp ); + isIntegral = ( (const CvHaarFeatureParams&) fp ).isIntegral; +} + +void CvHaarFeatureParams::write( FileStorage &fs ) const +{ + CvFeatureParams::write( fs ); + fs << CC_ISINTEGRAL << isIntegral; +} + +bool CvHaarFeatureParams::read( const FileNode &node ) +{ + if( !CvFeatureParams::read( node ) ) + return false; + + FileNode rnode = node[CC_ISINTEGRAL]; + if( !rnode.isString() ) + return false; + String intStr; + rnode >> intStr; + isIntegral = !intStr.compare( "0" ) ? false : !true; + return true; +} + +void CvHaarFeatureParams::printDefaults() const +{ + CvFeatureParams::printDefaults(); + std::cout << "isIntegral: false" << std::endl; +} + +void CvHaarFeatureParams::printAttrs() const +{ + CvFeatureParams::printAttrs(); + std::string int_str = isIntegral == true ? "true" : "false"; + std::cout << "isIntegral: " << int_str << std::endl; +} + +bool CvHaarFeatureParams::scanAttr( const std::string /*prmName*/, const std::string /*val*/) +{ + + return true; +} + +//--------------------- HaarFeatureEvaluator ---------------- + +void CvHaarEvaluator::init( const CvFeatureParams *_featureParams, int /*_maxSampleCount*/, Size _winSize ) +{ + int cols = ( _winSize.width + 1 ) * ( _winSize.height + 1 ); + sum.create( (int) 1, cols, CV_32SC1 ); + isIntegral = ( (CvHaarFeatureParams*) _featureParams )->isIntegral; + CvFeatureEvaluator::init( _featureParams, 1, _winSize ); +} + +void CvHaarEvaluator::setImage( const Mat& img, uchar /*clsLabel*/, int /*idx*/) +{ + CV_DbgAssert( !sum.empty() ); + + winSize.width = img.cols; + winSize.height = img.rows; + + CvFeatureEvaluator::setImage( img, 1, 0 ); + if( !isIntegral ) + { + std::vector > ii_imgs; + compute_integral( img, ii_imgs ); + _ii_img = ii_imgs[0]; + } + else + { + _ii_img = img; + } +} + +void CvHaarEvaluator::writeFeatures( FileStorage &fs, const Mat& featureMap ) const +{ + _writeFeatures( features, fs, featureMap ); +} + +void CvHaarEvaluator::writeFeature( FileStorage &fs ) const +{ + String modeStr = isIntegral == true ? "1" : "0"; + CV_Assert( !modeStr.empty() ); + fs << "isIntegral" << modeStr; +} + +void CvHaarEvaluator::generateFeatures() +{ + generateFeatures( featureParams->numFeatures ); +} + +void CvHaarEvaluator::generateFeatures( int nFeatures ) +{ + for ( int i = 0; i < nFeatures; i++ ) + { + CvHaarEvaluator::FeatureHaar feature( Size( winSize.width, winSize.height ) ); + features.push_back( feature ); + } + +} + +const std::vector& CvHaarEvaluator::getFeatures() const +{ + return features; +} + +float CvHaarEvaluator::operator()( int featureIdx, int /*sampleIdx*/) +{ + /* TODO Added from MIL implementation */ + //return features[featureIdx].calc( _ii_img, Mat(), 0 ); + float res; + features.at( featureIdx ).eval( _ii_img, Rect( 0, 0, winSize.width, winSize.height ), &res ); + return res; +} + +void CvHaarEvaluator::setWinSize( Size patchSize ) +{ + winSize.width = patchSize.width; + winSize.height = patchSize.height; +} + +Size CvHaarEvaluator::setWinSize() const +{ + return Size( winSize.width, winSize.height ); +} + +#define INITSIGMA( numAreas ) ( static_cast( sqrt( 256.0f*256.0f / 12.0f * (numAreas) ) ) ); + +CvHaarEvaluator::FeatureHaar::FeatureHaar( Size patchSize ) +{ + try + { + generateRandomFeature( patchSize ); + } + catch ( ... ) + { + throw; + } +} + +float CvHaarEvaluator::FeatureHaar::getInitMean() const +{ + return m_initMean; +} + +float CvHaarEvaluator::FeatureHaar::getInitSigma() const +{ + return m_initSigma; +} + +void CvHaarEvaluator::FeatureHaar::generateRandomFeature( Size patchSize ) +{ + cv::Point2i position; + Size baseDim; + Size sizeFactor; + int area; + + //Size minSize = Size( 3, 3 ); + int minArea = 9; + + bool valid = false; + while ( !valid ) + { + //choose position and scale + position.y = rand() % ( patchSize.height ); + position.x = rand() % ( patchSize.width ); + + baseDim.width = (int) ( ( 1 - sqrt( 1 - (float) rand() / RAND_MAX ) ) * patchSize.width ); + baseDim.height = (int) ( ( 1 - sqrt( 1 - (float) rand() / RAND_MAX ) ) * patchSize.height ); + + //select types + //float probType[11] = {0.0909f, 0.0909f, 0.0909f, 0.0909f, 0.0909f, 0.0909f, 0.0909f, 0.0909f, 0.0909f, 0.0909f, 0.0950f}; + float probType[11] = + { 0.2f, 0.2f, 0.2f, 0.2f, 0.2f, 0.2f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f }; + float prob = (float) rand() / RAND_MAX; + + if( prob < probType[0] ) + { + //check if feature is valid + sizeFactor.height = 2; + sizeFactor.width = 1; + if( position.y + baseDim.height * sizeFactor.height >= patchSize.height || position.x + baseDim.width * sizeFactor.width >= patchSize.width ) + continue; + area = baseDim.height * sizeFactor.height * baseDim.width * sizeFactor.width; + if( area < minArea ) + continue; + + m_type = 1; + m_numAreas = 2; + m_weights.resize( m_numAreas ); + m_weights[0] = 1; + m_weights[1] = -1; + m_areas.resize( m_numAreas ); + m_areas[0].x = position.x; + m_areas[0].y = position.y; + m_areas[0].height = baseDim.height; + m_areas[0].width = baseDim.width; + m_areas[1].x = position.x; + m_areas[1].y = position.y + baseDim.height; + m_areas[1].height = baseDim.height; + m_areas[1].width = baseDim.width; + m_initMean = 0; + m_initSigma = INITSIGMA( m_numAreas ); + + valid = true; + + } + else if( prob < probType[0] + probType[1] ) + { + //check if feature is valid + sizeFactor.height = 1; + sizeFactor.width = 2; + if( position.y + baseDim.height * sizeFactor.height >= patchSize.height || position.x + baseDim.width * sizeFactor.width >= patchSize.width ) + continue; + area = baseDim.height * sizeFactor.height * baseDim.width * sizeFactor.width; + if( area < minArea ) + continue; + + m_type = 2; + m_numAreas = 2; + m_weights.resize( m_numAreas ); + m_weights[0] = 1; + m_weights[1] = -1; + m_areas.resize( m_numAreas ); + m_areas[0].x = position.x; + m_areas[0].y = position.y; + m_areas[0].height = baseDim.height; + m_areas[0].width = baseDim.width; + m_areas[1].x = position.x + baseDim.width; + m_areas[1].y = position.y; + m_areas[1].height = baseDim.height; + m_areas[1].width = baseDim.width; + m_initMean = 0; + m_initSigma = INITSIGMA( m_numAreas ); + valid = true; + + } + else if( prob < probType[0] + probType[1] + probType[2] ) + { + //check if feature is valid + sizeFactor.height = 4; + sizeFactor.width = 1; + if( position.y + baseDim.height * sizeFactor.height >= patchSize.height || position.x + baseDim.width * sizeFactor.width >= patchSize.width ) + continue; + area = baseDim.height * sizeFactor.height * baseDim.width * sizeFactor.width; + if( area < minArea ) + continue; + + m_type = 3; + m_numAreas = 3; + m_weights.resize( m_numAreas ); + m_weights[0] = 1; + m_weights[1] = -2; + m_weights[2] = 1; + m_areas.resize( m_numAreas ); + m_areas[0].x = position.x; + m_areas[0].y = position.y; + m_areas[0].height = baseDim.height; + m_areas[0].width = baseDim.width; + m_areas[1].x = position.x; + m_areas[1].y = position.y + baseDim.height; + m_areas[1].height = 2 * baseDim.height; + m_areas[1].width = baseDim.width; + m_areas[2].y = position.y + 3 * baseDim.height; + m_areas[2].x = position.x; + m_areas[2].height = baseDim.height; + m_areas[2].width = baseDim.width; + m_initMean = 0; + m_initSigma = INITSIGMA( m_numAreas ); + valid = true; + } + else if( prob < probType[0] + probType[1] + probType[2] + probType[3] ) + { + //check if feature is valid + sizeFactor.height = 1; + sizeFactor.width = 4; + if( position.y + baseDim.height * sizeFactor.height >= patchSize.height || position.x + baseDim.width * sizeFactor.width >= patchSize.width ) + continue; + area = baseDim.height * sizeFactor.height * baseDim.width * sizeFactor.width; + if( area < minArea ) + continue; + + m_type = 3; + m_numAreas = 3; + m_weights.resize( m_numAreas ); + m_weights[0] = 1; + m_weights[1] = -2; + m_weights[2] = 1; + m_areas.resize( m_numAreas ); + m_areas[0].x = position.x; + m_areas[0].y = position.y; + m_areas[0].height = baseDim.height; + m_areas[0].width = baseDim.width; + m_areas[1].x = position.x + baseDim.width; + m_areas[1].y = position.y; + m_areas[1].height = baseDim.height; + m_areas[1].width = 2 * baseDim.width; + m_areas[2].y = position.y; + m_areas[2].x = position.x + 3 * baseDim.width; + m_areas[2].height = baseDim.height; + m_areas[2].width = baseDim.width; + m_initMean = 0; + m_initSigma = INITSIGMA( m_numAreas ); + valid = true; + } + else if( prob < probType[0] + probType[1] + probType[2] + probType[3] + probType[4] ) + { + //check if feature is valid + sizeFactor.height = 2; + sizeFactor.width = 2; + if( position.y + baseDim.height * sizeFactor.height >= patchSize.height || position.x + baseDim.width * sizeFactor.width >= patchSize.width ) + continue; + area = baseDim.height * sizeFactor.height * baseDim.width * sizeFactor.width; + if( area < minArea ) + continue; + + m_type = 5; + m_numAreas = 4; + m_weights.resize( m_numAreas ); + m_weights[0] = 1; + m_weights[1] = -1; + m_weights[2] = -1; + m_weights[3] = 1; + m_areas.resize( m_numAreas ); + m_areas[0].x = position.x; + m_areas[0].y = position.y; + m_areas[0].height = baseDim.height; + m_areas[0].width = baseDim.width; + m_areas[1].x = position.x + baseDim.width; + m_areas[1].y = position.y; + m_areas[1].height = baseDim.height; + m_areas[1].width = baseDim.width; + m_areas[2].y = position.y + baseDim.height; + m_areas[2].x = position.x; + m_areas[2].height = baseDim.height; + m_areas[2].width = baseDim.width; + m_areas[3].y = position.y + baseDim.height; + m_areas[3].x = position.x + baseDim.width; + m_areas[3].height = baseDim.height; + m_areas[3].width = baseDim.width; + m_initMean = 0; + m_initSigma = INITSIGMA( m_numAreas ); + valid = true; + } + else if( prob < probType[0] + probType[1] + probType[2] + probType[3] + probType[4] + probType[5] ) + { + //check if feature is valid + sizeFactor.height = 3; + sizeFactor.width = 3; + if( position.y + baseDim.height * sizeFactor.height >= patchSize.height || position.x + baseDim.width * sizeFactor.width >= patchSize.width ) + continue; + area = baseDim.height * sizeFactor.height * baseDim.width * sizeFactor.width; + if( area < minArea ) + continue; + + m_type = 6; + m_numAreas = 2; + m_weights.resize( m_numAreas ); + m_weights[0] = 1; + m_weights[1] = -9; + m_areas.resize( m_numAreas ); + m_areas[0].x = position.x; + m_areas[0].y = position.y; + m_areas[0].height = 3 * baseDim.height; + m_areas[0].width = 3 * baseDim.width; + m_areas[1].x = position.x + baseDim.width; + m_areas[1].y = position.y + baseDim.height; + m_areas[1].height = baseDim.height; + m_areas[1].width = baseDim.width; + m_initMean = -8 * 128; + m_initSigma = INITSIGMA( m_numAreas ); + valid = true; + } + else if( prob < probType[0] + probType[1] + probType[2] + probType[3] + probType[4] + probType[5] + probType[6] ) + { + //check if feature is valid + sizeFactor.height = 3; + sizeFactor.width = 1; + if( position.y + baseDim.height * sizeFactor.height >= patchSize.height || position.x + baseDim.width * sizeFactor.width >= patchSize.width ) + continue; + area = baseDim.height * sizeFactor.height * baseDim.width * sizeFactor.width; + if( area < minArea ) + continue; + + m_type = 7; + m_numAreas = 3; + m_weights.resize( m_numAreas ); + m_weights[0] = 1; + m_weights[1] = -2; + m_weights[2] = 1; + m_areas.resize( m_numAreas ); + m_areas[0].x = position.x; + m_areas[0].y = position.y; + m_areas[0].height = baseDim.height; + m_areas[0].width = baseDim.width; + m_areas[1].x = position.x; + m_areas[1].y = position.y + baseDim.height; + m_areas[1].height = baseDim.height; + m_areas[1].width = baseDim.width; + m_areas[2].y = position.y + baseDim.height * 2; + m_areas[2].x = position.x; + m_areas[2].height = baseDim.height; + m_areas[2].width = baseDim.width; + m_initMean = 0; + m_initSigma = INITSIGMA( m_numAreas ); + valid = true; + } + else if( prob < probType[0] + probType[1] + probType[2] + probType[3] + probType[4] + probType[5] + probType[6] + probType[7] ) + { + //check if feature is valid + sizeFactor.height = 1; + sizeFactor.width = 3; + if( position.y + baseDim.height * sizeFactor.height >= patchSize.height || position.x + baseDim.width * sizeFactor.width >= patchSize.width ) + continue; + + area = baseDim.height * sizeFactor.height * baseDim.width * sizeFactor.width; + + if( area < minArea ) + continue; + + m_type = 8; + m_numAreas = 3; + m_weights.resize( m_numAreas ); + m_weights[0] = 1; + m_weights[1] = -2; + m_weights[2] = 1; + m_areas.resize( m_numAreas ); + m_areas[0].x = position.x; + m_areas[0].y = position.y; + m_areas[0].height = baseDim.height; + m_areas[0].width = baseDim.width; + m_areas[1].x = position.x + baseDim.width; + m_areas[1].y = position.y; + m_areas[1].height = baseDim.height; + m_areas[1].width = baseDim.width; + m_areas[2].y = position.y; + m_areas[2].x = position.x + 2 * baseDim.width; + m_areas[2].height = baseDim.height; + m_areas[2].width = baseDim.width; + m_initMean = 0; + m_initSigma = INITSIGMA( m_numAreas ); + valid = true; + } + else if( prob < probType[0] + probType[1] + probType[2] + probType[3] + probType[4] + probType[5] + probType[6] + probType[7] + probType[8] ) + { + //check if feature is valid + sizeFactor.height = 3; + sizeFactor.width = 3; + if( position.y + baseDim.height * sizeFactor.height >= patchSize.height || position.x + baseDim.width * sizeFactor.width >= patchSize.width ) + continue; + area = baseDim.height * sizeFactor.height * baseDim.width * sizeFactor.width; + if( area < minArea ) + continue; + + m_type = 9; + m_numAreas = 2; + m_weights.resize( m_numAreas ); + m_weights[0] = 1; + m_weights[1] = -2; + m_areas.resize( m_numAreas ); + m_areas[0].x = position.x; + m_areas[0].y = position.y; + m_areas[0].height = 3 * baseDim.height; + m_areas[0].width = 3 * baseDim.width; + m_areas[1].x = position.x + baseDim.width; + m_areas[1].y = position.y + baseDim.height; + m_areas[1].height = baseDim.height; + m_areas[1].width = baseDim.width; + m_initMean = 0; + m_initSigma = INITSIGMA( m_numAreas ); + valid = true; + } + else if( prob + < probType[0] + probType[1] + probType[2] + probType[3] + probType[4] + probType[5] + probType[6] + probType[7] + probType[8] + probType[9] ) + { + //check if feature is valid + sizeFactor.height = 3; + sizeFactor.width = 1; + if( position.y + baseDim.height * sizeFactor.height >= patchSize.height || position.x + baseDim.width * sizeFactor.width >= patchSize.width ) + continue; + area = baseDim.height * sizeFactor.height * baseDim.width * sizeFactor.width; + if( area < minArea ) + continue; + + m_type = 10; + m_numAreas = 3; + m_weights.resize( m_numAreas ); + m_weights[0] = 1; + m_weights[1] = -1; + m_weights[2] = 1; + m_areas.resize( m_numAreas ); + m_areas[0].x = position.x; + m_areas[0].y = position.y; + m_areas[0].height = baseDim.height; + m_areas[0].width = baseDim.width; + m_areas[1].x = position.x; + m_areas[1].y = position.y + baseDim.height; + m_areas[1].height = baseDim.height; + m_areas[1].width = baseDim.width; + m_areas[2].y = position.y + baseDim.height * 2; + m_areas[2].x = position.x; + m_areas[2].height = baseDim.height; + m_areas[2].width = baseDim.width; + m_initMean = 128; + m_initSigma = INITSIGMA( m_numAreas ); + valid = true; + } + else if( prob + < probType[0] + probType[1] + probType[2] + probType[3] + probType[4] + probType[5] + probType[6] + probType[7] + probType[8] + probType[9] + + probType[10] ) + { + //check if feature is valid + sizeFactor.height = 1; + sizeFactor.width = 3; + if( position.y + baseDim.height * sizeFactor.height >= patchSize.height || position.x + baseDim.width * sizeFactor.width >= patchSize.width ) + continue; + area = baseDim.height * sizeFactor.height * baseDim.width * sizeFactor.width; + if( area < minArea ) + continue; + + m_type = 11; + m_numAreas = 3; + m_weights.resize( m_numAreas ); + m_weights[0] = 1; + m_weights[1] = -1; + m_weights[2] = 1; + m_areas.resize( m_numAreas ); + m_areas[0].x = position.x; + m_areas[0].y = position.y; + m_areas[0].height = baseDim.height; + m_areas[0].width = baseDim.width; + m_areas[1].x = position.x + baseDim.width; + m_areas[1].y = position.y; + m_areas[1].height = baseDim.height; + m_areas[1].width = baseDim.width; + m_areas[2].y = position.y; + m_areas[2].x = position.x + 2 * baseDim.width; + m_areas[2].height = baseDim.height; + m_areas[2].width = baseDim.width; + m_initMean = 128; + m_initSigma = INITSIGMA( m_numAreas ); + valid = true; + } + else + CV_Assert( false ); + } + + m_initSize = patchSize; + m_curSize = m_initSize; + m_scaleFactorWidth = m_scaleFactorHeight = 1.0f; + m_scaleAreas.resize( m_numAreas ); + m_scaleWeights.resize( m_numAreas ); + for ( int curArea = 0; curArea < m_numAreas; curArea++ ) + { + m_scaleAreas[curArea] = m_areas[curArea]; + m_scaleWeights[curArea] = (float) m_weights[curArea] / (float) ( m_areas[curArea].width * m_areas[curArea].height ); + } +} + +bool CvHaarEvaluator::FeatureHaar::eval( const Mat& image, Rect /*ROI*/, float* result ) const +{ + + *result = 0.0f; + + for ( int curArea = 0; curArea < m_numAreas; curArea++ ) + { + *result += (float) getSum( image, Rect( m_areas[curArea].x, m_areas[curArea].y, m_areas[curArea].width, m_areas[curArea].height ) ) + * m_scaleWeights[curArea]; + } + + /* + if( image->getUseVariance() ) + { + float variance = (float) image->getVariance( ROI ); + *result /= variance; + } + */ + + return true; +} + +float CvHaarEvaluator::FeatureHaar::getSum( const Mat& image, Rect imageROI ) const +{ +// left upper Origin + int OriginX = imageROI.x; + int OriginY = imageROI.y; + +// Check and fix width and height + int Width = imageROI.width; + int Height = imageROI.height; + + if( OriginX + Width >= image.cols - 1 ) + Width = ( image.cols - 1 ) - OriginX; + if( OriginY + Height >= image.rows - 1 ) + Height = ( image.rows - 1 ) - OriginY; + + float value = 0; + int depth = image.depth(); + + if( depth == CV_8U || depth == CV_32S ) + value = image.at( OriginY + Height, OriginX + Width ) + image.at( OriginY, OriginX ) - image.at( OriginY, OriginX + Width ) + - image.at( OriginY + Height, OriginX ); + else if( depth == CV_64F ) + value = image.at( OriginY + Height, OriginX + Width ) + image.at( OriginY, OriginX ) + - image.at( OriginY, OriginX + Width ) - image.at( OriginY + Height, OriginX ); + else if( depth == CV_32F ) + value = image.at( OriginY + Height, OriginX + Width ) + image.at( OriginY, OriginX ) - image.at( OriginY, OriginX + Width ) + - image.at( OriginY + Height, OriginX ); + + return value; +} + +int CvHaarEvaluator::FeatureHaar::getNumAreas() +{ + return m_numAreas; +} + +const std::vector& CvHaarEvaluator::FeatureHaar::getWeights() const +{ + return m_weights; +} + +const std::vector& CvHaarEvaluator::FeatureHaar::getAreas() const +{ + return m_areas; +} + +CvHOGFeatureParams::CvHOGFeatureParams() +{ + maxCatCount = 0; + name = HOGF_NAME; + featSize = N_BINS * N_CELLS; +} + +void CvHOGEvaluator::init( const CvFeatureParams *_featureParams, int _maxSampleCount, Size _winSize ) +{ + CV_Assert( _maxSampleCount > 0 ); + int cols = ( _winSize.width + 1 ) * ( _winSize.height + 1 ); + for ( int bin = 0; bin < N_BINS; bin++ ) + { + hist.push_back( Mat( _maxSampleCount, cols, CV_32FC1 ) ); + } + normSum.create( (int) _maxSampleCount, cols, CV_32FC1 ); + CvFeatureEvaluator::init( _featureParams, _maxSampleCount, _winSize ); +} + +void CvHOGEvaluator::setImage( const Mat &img, uchar clsLabel, int idx ) +{ + CV_DbgAssert( !hist.empty()); + CvFeatureEvaluator::setImage( img, clsLabel, idx ); + std::vector integralHist; + for ( int bin = 0; bin < N_BINS; bin++ ) + { + integralHist.push_back( Mat( winSize.height + 1, winSize.width + 1, hist[bin].type(), hist[bin].ptr( (int) idx ) ) ); + } + Mat integralNorm( winSize.height + 1, winSize.width + 1, normSum.type(), normSum.ptr( (int) idx ) ); + integralHistogram( img, integralHist, integralNorm, (int) N_BINS ); +} + +//void CvHOGEvaluator::writeFeatures( FileStorage &fs, const Mat& featureMap ) const +//{ +// _writeFeatures( features, fs, featureMap ); +//} + +void CvHOGEvaluator::writeFeatures( FileStorage &fs, const Mat& featureMap ) const +{ + int featIdx; + int componentIdx; + const Mat_& featureMap_ = (const Mat_&) featureMap; + fs << FEATURES << "["; + for ( int fi = 0; fi < featureMap.cols; fi++ ) + if( featureMap_( 0, fi ) >= 0 ) + { + fs << "{"; + featIdx = fi / getFeatureSize(); + componentIdx = fi % getFeatureSize(); + features[featIdx].write( fs, componentIdx ); + fs << "}"; + } + fs << "]"; +} + +void CvHOGEvaluator::generateFeatures() +{ + int offset = winSize.width + 1; + Size blockStep; + int x, y, t, w, h; + + for ( t = 8; t <= winSize.width / 2; t += 8 ) //t = size of a cell. blocksize = 4*cellSize + { + blockStep = Size( 4, 4 ); + w = 2 * t; //width of a block + h = 2 * t; //height of a block + for ( x = 0; x <= winSize.width - w; x += blockStep.width ) + { + for ( y = 0; y <= winSize.height - h; y += blockStep.height ) + { + features.push_back( Feature( offset, x, y, t, t ) ); + } + } + w = 2 * t; + h = 4 * t; + for ( x = 0; x <= winSize.width - w; x += blockStep.width ) + { + for ( y = 0; y <= winSize.height - h; y += blockStep.height ) + { + features.push_back( Feature( offset, x, y, t, 2 * t ) ); + } + } + w = 4 * t; + h = 2 * t; + for ( x = 0; x <= winSize.width - w; x += blockStep.width ) + { + for ( y = 0; y <= winSize.height - h; y += blockStep.height ) + { + features.push_back( Feature( offset, x, y, 2 * t, t ) ); + } + } + } + + numFeatures = (int) features.size(); +} + +CvHOGEvaluator::Feature::Feature() +{ + for ( int i = 0; i < N_CELLS; i++ ) + { + rect[i] = Rect( 0, 0, 0, 0 ); + } +} + +CvHOGEvaluator::Feature::Feature( int offset, int x, int y, int cellW, int cellH ) +{ + rect[0] = Rect( x, y, cellW, cellH ); //cell0 + rect[1] = Rect( x + cellW, y, cellW, cellH ); //cell1 + rect[2] = Rect( x, y + cellH, cellW, cellH ); //cell2 + rect[3] = Rect( x + cellW, y + cellH, cellW, cellH ); //cell3 + + for ( int i = 0; i < N_CELLS; i++ ) + { + CV_SUM_OFFSETS( fastRect[i].p0, fastRect[i].p1, fastRect[i].p2, fastRect[i].p3, rect[i], offset ); + } +} + +void CvHOGEvaluator::Feature::write( FileStorage &fs ) const +{ + fs << CC_RECTS << "["; + for ( int i = 0; i < N_CELLS; i++ ) + { + fs << "[:" << rect[i].x << rect[i].y << rect[i].width << rect[i].height << "]"; + } + fs << "]"; +} + +//cell and bin idx writing +//void CvHOGEvaluator::Feature::write(FileStorage &fs, int varIdx) const +//{ +// int featComponent = varIdx % (N_CELLS * N_BINS); +// int cellIdx = featComponent / N_BINS; +// int binIdx = featComponent % N_BINS; +// +// fs << CC_RECTS << "[:" << rect[cellIdx].x << rect[cellIdx].y << +// rect[cellIdx].width << rect[cellIdx].height << binIdx << "]"; +//} + +//cell[0] and featComponent idx writing. By cell[0] it's possible to recover all block +//All block is nessesary for block normalization +void CvHOGEvaluator::Feature::write( FileStorage &fs, int featComponentIdx ) const +{ + fs << CC_RECT << "[:" << rect[0].x << rect[0].y << rect[0].width << rect[0].height << featComponentIdx << "]"; +} + +void CvHOGEvaluator::integralHistogram( const Mat &img, std::vector &histogram, Mat &norm, int nbins ) const +{ + CV_Assert( img.type() == CV_8U || img.type() == CV_8UC3 ); + int x, y, binIdx; + + Size gradSize( img.size() ); + Size histSize( histogram[0].size() ); + Mat grad( gradSize, CV_32F ); + Mat qangle( gradSize, CV_8U ); + + AutoBuffer mapbuf( gradSize.width + gradSize.height + 4 ); + int* xmap = (int*) mapbuf + 1; + int* ymap = xmap + gradSize.width + 2; + + const int borderType = (int) BORDER_REPLICATE; + + for ( x = -1; x < gradSize.width + 1; x++ ) + xmap[x] = borderInterpolate( x, gradSize.width, borderType ); + for ( y = -1; y < gradSize.height + 1; y++ ) + ymap[y] = borderInterpolate( y, gradSize.height, borderType ); + + int width = gradSize.width; + AutoBuffer _dbuf( width * 4 ); + float* dbuf = _dbuf; + Mat Dx( 1, width, CV_32F, dbuf ); + Mat Dy( 1, width, CV_32F, dbuf + width ); + Mat Mag( 1, width, CV_32F, dbuf + width * 2 ); + Mat Angle( 1, width, CV_32F, dbuf + width * 3 ); + + float angleScale = (float) ( nbins / CV_PI ); + + for ( y = 0; y < gradSize.height; y++ ) + { + const uchar* currPtr = img.data + img.step * ymap[y]; + const uchar* prevPtr = img.data + img.step * ymap[y - 1]; + const uchar* nextPtr = img.data + img.step * ymap[y + 1]; + float* gradPtr = (float*) grad.ptr( y ); + uchar* qanglePtr = (uchar*) qangle.ptr( y ); + + for ( x = 0; x < width; x++ ) + { + dbuf[x] = (float) ( currPtr[xmap[x + 1]] - currPtr[xmap[x - 1]] ); + dbuf[width + x] = (float) ( nextPtr[xmap[x]] - prevPtr[xmap[x]] ); + } + cartToPolar( Dx, Dy, Mag, Angle, false ); + for ( x = 0; x < width; x++ ) + { + float mag = dbuf[x + width * 2]; + float angle = dbuf[x + width * 3]; + angle = angle * angleScale - 0.5f; + int bidx = cvFloor( angle ); + angle -= bidx; + if( bidx < 0 ) + bidx += nbins; + else if( bidx >= nbins ) + bidx -= nbins; + + qanglePtr[x] = (uchar) bidx; + gradPtr[x] = mag; + } + } + integral( grad, norm, grad.depth() ); + + float* histBuf; + const float* magBuf; + const uchar* binsBuf; + + int binsStep = (int) ( qangle.step / sizeof(uchar) ); + int histStep = (int) ( histogram[0].step / sizeof(float) ); + int magStep = (int) ( grad.step / sizeof(float) ); + for ( binIdx = 0; binIdx < nbins; binIdx++ ) + { + histBuf = (float*) histogram[binIdx].data; + magBuf = (const float*) grad.data; + binsBuf = (const uchar*) qangle.data; + + memset( histBuf, 0, histSize.width * sizeof ( histBuf[0] ) ); + histBuf += histStep + 1; + for ( y = 0; y < qangle.rows; y++ ) + { + histBuf[-1] = 0.f; + float strSum = 0.f; + for ( x = 0; x < qangle.cols; x++ ) + { + if( binsBuf[x] == binIdx ) + strSum += magBuf[x]; + histBuf[x] = histBuf[-histStep + x] + strSum; + } + histBuf += histStep; + binsBuf += binsStep; + magBuf += magStep; + } + } +} + +CvLBPFeatureParams::CvLBPFeatureParams() +{ + maxCatCount = 256; + name = LBPF_NAME; +} + +void CvLBPEvaluator::init( const CvFeatureParams *_featureParams, int _maxSampleCount, Size _winSize ) +{ + CV_Assert( _maxSampleCount > 0 ); + sum.create( (int) _maxSampleCount, ( _winSize.width + 1 ) * ( _winSize.height + 1 ), CV_32SC1 ); + CvFeatureEvaluator::init( _featureParams, _maxSampleCount, _winSize ); +} + +void CvLBPEvaluator::setImage( const Mat &img, uchar clsLabel, int idx ) +{ + CV_DbgAssert( !sum.empty() ); + CvFeatureEvaluator::setImage( img, clsLabel, idx ); + Mat innSum( winSize.height + 1, winSize.width + 1, sum.type(), sum.ptr( (int) idx ) ); + integral( img, innSum ); +} + +void CvLBPEvaluator::writeFeatures( FileStorage &fs, const Mat& featureMap ) const +{ + _writeFeatures( features, fs, featureMap ); +} + +void CvLBPEvaluator::generateFeatures() +{ + int offset = winSize.width + 1; + for ( int x = 0; x < winSize.width; x++ ) + for ( int y = 0; y < winSize.height; y++ ) + for ( int w = 1; w <= winSize.width / 3; w++ ) + for ( int h = 1; h <= winSize.height / 3; h++ ) + if( ( x + 3 * w <= winSize.width ) && ( y + 3 * h <= winSize.height ) ) + features.push_back( Feature( offset, x, y, w, h ) ); + numFeatures = (int) features.size(); +} + +CvLBPEvaluator::Feature::Feature() +{ + rect = Rect( 0, 0, 0, 0 ); +} + +CvLBPEvaluator::Feature::Feature( int offset, int x, int y, int _blockWidth, int _blockHeight ) +{ + Rect tr = rect = Rect( x, y, _blockWidth, _blockHeight ); + CV_SUM_OFFSETS( p[0], p[1], p[4], p[5], tr, offset ) + tr.x += 2 * rect.width; + CV_SUM_OFFSETS( p[2], p[3], p[6], p[7], tr, offset ) + tr.y += 2 * rect.height; + CV_SUM_OFFSETS( p[10], p[11], p[14], p[15], tr, offset ) + tr.x -= 2 * rect.width; + CV_SUM_OFFSETS( p[8], p[9], p[12], p[13], tr, offset ) +} + +void CvLBPEvaluator::Feature::write( FileStorage &fs ) const +{ + fs << CC_RECT << "[:" << rect.x << rect.y << rect.width << rect.height << "]"; +} + +} /* namespace cv */ diff --git a/modules/tracking/src/onlineBoosting.cpp b/modules/tracking/src/onlineBoosting.cpp new file mode 100644 index 000000000..b1b185168 --- /dev/null +++ b/modules/tracking/src/onlineBoosting.cpp @@ -0,0 +1,735 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#include "precomp.hpp" +#include "opencv2/tracking/onlineBoosting.hpp" + +namespace cv +{ + +StrongClassifierDirectSelection::StrongClassifierDirectSelection( int numBaseClf, int numWeakClf, Size patchSz, const Rect& sampleROI, + bool useFeatureEx, int iterationInit ) +{ + //StrongClassifier + numBaseClassifier = numBaseClf; + numAllWeakClassifier = numWeakClf + iterationInit; + iterInit = iterationInit; + numWeakClassifier = numWeakClf; + + alpha.assign( numBaseClf, 0 ); + + patchSize = patchSz; + useFeatureExchange = useFeatureEx; + + m_errorMask.resize( numAllWeakClassifier ); + m_errors.resize( numAllWeakClassifier ); + m_sumErrors.resize( numAllWeakClassifier ); + + ROI = sampleROI; + detector = new Detector( this ); +} + +void StrongClassifierDirectSelection::initBaseClassifier() +{ + baseClassifier = new BaseClassifier*[numBaseClassifier]; + baseClassifier[0] = new BaseClassifier( numWeakClassifier, iterInit ); + + for ( int curBaseClassifier = 1; curBaseClassifier < numBaseClassifier; curBaseClassifier++ ) + baseClassifier[curBaseClassifier] = new BaseClassifier( numWeakClassifier, iterInit, baseClassifier[0]->getReferenceWeakClassifier() ); +} + +StrongClassifierDirectSelection::~StrongClassifierDirectSelection() +{ + for ( int curBaseClassifier = 0; curBaseClassifier < numBaseClassifier; curBaseClassifier++ ) + delete baseClassifier[curBaseClassifier]; + delete[] baseClassifier; + alpha.clear(); + delete detector; +} + +Size StrongClassifierDirectSelection::getPatchSize() const +{ + return patchSize; +} + +Rect StrongClassifierDirectSelection::getROI() const +{ + return ROI; +} + +float StrongClassifierDirectSelection::classifySmooth( const std::vector& images, const Rect& sampleROI, int& idx ) +{ + ROI = sampleROI; + idx = 0; + float confidence = 0; + //detector->classify (image, patches); + detector->classifySmooth( images ); + + //move to best detection + if( detector->getNumDetections() <= 0 ) + { + confidence = 0; + return confidence; + } + idx = detector->getPatchIdxOfBestDetection(); + confidence = detector->getConfidenceOfBestDetection(); + + return confidence; +} + +bool StrongClassifierDirectSelection::getUseFeatureExchange() const +{ + return useFeatureExchange; +} + +int StrongClassifierDirectSelection::getReplacedClassifier() const +{ + return replacedClassifier; +} + +int StrongClassifierDirectSelection::getSwappedClassifier() const +{ + return swappedClassifier; +} + +bool StrongClassifierDirectSelection::update( const Mat& image, int target, float importance ) +{ + m_errorMask.assign( numAllWeakClassifier, 0 ); + m_errors.assign( numAllWeakClassifier, 0 ); + m_sumErrors.assign( numAllWeakClassifier, 0 ); + + baseClassifier[0]->trainClassifier( image, target, importance, m_errorMask ); + for ( int curBaseClassifier = 0; curBaseClassifier < numBaseClassifier; curBaseClassifier++ ) + { + int selectedClassifier = baseClassifier[curBaseClassifier]->selectBestClassifier( m_errorMask, importance, m_errors ); + + if( m_errors[selectedClassifier] >= 0.5 ) + alpha[curBaseClassifier] = 0; + else + alpha[curBaseClassifier] = logf( ( 1.0f - m_errors[selectedClassifier] ) / m_errors[selectedClassifier] ); + + if( m_errorMask[selectedClassifier] ) + importance *= (float) sqrt( ( 1.0f - m_errors[selectedClassifier] ) / m_errors[selectedClassifier] ); + else + importance *= (float) sqrt( m_errors[selectedClassifier] / ( 1.0f - m_errors[selectedClassifier] ) ); + + //weight limitation + //if (importance > 100) importance = 100; + + //sum up errors + for ( int curWeakClassifier = 0; curWeakClassifier < numAllWeakClassifier; curWeakClassifier++ ) + { + if( m_errors[curWeakClassifier] != FLT_MAX && m_sumErrors[curWeakClassifier] >= 0 ) + m_sumErrors[curWeakClassifier] += m_errors[curWeakClassifier]; + } + + //mark feature as used + m_sumErrors[selectedClassifier] = -1; + m_errors[selectedClassifier] = FLT_MAX; + } + + if( useFeatureExchange ) + { + replacedClassifier = baseClassifier[0]->computeReplaceWeakestClassifier( m_sumErrors ); + swappedClassifier = baseClassifier[0]->getIdxOfNewWeakClassifier(); + } + + return true; +} + +void StrongClassifierDirectSelection::replaceWeakClassifier( int idx ) +{ + if( useFeatureExchange && idx >= 0 ) + { + baseClassifier[0]->replaceWeakClassifier( idx ); + for ( int curBaseClassifier = 1; curBaseClassifier < numBaseClassifier; curBaseClassifier++ ) + baseClassifier[curBaseClassifier]->replaceClassifierStatistic( baseClassifier[0]->getIdxOfNewWeakClassifier(), idx ); + } +} + +std::vector StrongClassifierDirectSelection::getSelectedWeakClassifier() +{ + std::vector selected; + int curBaseClassifier = 0; + for ( curBaseClassifier = 0; curBaseClassifier < numBaseClassifier; curBaseClassifier++ ) + { + selected.push_back( baseClassifier[curBaseClassifier]->getSelectedClassifier() ); + } + return selected; +} + +float StrongClassifierDirectSelection::eval( const Mat& response ) +{ + float value = 0.0f; + int curBaseClassifier = 0; + + for ( curBaseClassifier = 0; curBaseClassifier < numBaseClassifier; curBaseClassifier++ ) + value += baseClassifier[curBaseClassifier]->eval( response ) * alpha[curBaseClassifier]; + + return value; +} + +int StrongClassifierDirectSelection::getNumBaseClassifier() +{ + return numBaseClassifier; +} + +BaseClassifier::BaseClassifier( int numWeakClassifier, int iterationInit ) +{ + this->m_numWeakClassifier = numWeakClassifier; + this->m_iterationInit = iterationInit; + + weakClassifier = new WeakClassifierHaarFeature*[numWeakClassifier + iterationInit]; + m_idxOfNewWeakClassifier = numWeakClassifier; + + generateRandomClassifier(); + + m_referenceWeakClassifier = false; + m_selectedClassifier = 0; + + m_wCorrect.assign( numWeakClassifier + iterationInit, 0 ); + + m_wWrong.assign( numWeakClassifier + iterationInit, 0 ); + + for ( int curWeakClassifier = 0; curWeakClassifier < numWeakClassifier + iterationInit; curWeakClassifier++ ) + m_wWrong[curWeakClassifier] = m_wCorrect[curWeakClassifier] = 1; +} + +BaseClassifier::BaseClassifier( int numWeakClassifier, int iterationInit, WeakClassifierHaarFeature** weakCls ) +{ + m_numWeakClassifier = numWeakClassifier; + m_iterationInit = iterationInit; + weakClassifier = weakCls; + m_referenceWeakClassifier = true; + m_selectedClassifier = 0; + m_idxOfNewWeakClassifier = numWeakClassifier; + + m_wCorrect.assign( numWeakClassifier + iterationInit, 0 ); + m_wWrong.assign( numWeakClassifier + iterationInit, 0 ); + + for ( int curWeakClassifier = 0; curWeakClassifier < numWeakClassifier + iterationInit; curWeakClassifier++ ) + m_wWrong[curWeakClassifier] = m_wCorrect[curWeakClassifier] = 1; +} + +BaseClassifier::~BaseClassifier() +{ + if( !m_referenceWeakClassifier ) + { + for ( int curWeakClassifier = 0; curWeakClassifier < m_numWeakClassifier + m_iterationInit; curWeakClassifier++ ) + delete weakClassifier[curWeakClassifier]; + + delete[] weakClassifier; + } + m_wCorrect.clear(); + m_wWrong.clear(); +} + +void BaseClassifier::generateRandomClassifier() +{ + for ( int curWeakClassifier = 0; curWeakClassifier < m_numWeakClassifier + m_iterationInit; curWeakClassifier++ ) + { + weakClassifier[curWeakClassifier] = new WeakClassifierHaarFeature(); + } +} + +int BaseClassifier::eval( const Mat& image ) +{ + return weakClassifier[m_selectedClassifier]->eval( image.at( m_selectedClassifier ) ); +} + +int BaseClassifier::getSelectedClassifier() const +{ + return m_selectedClassifier; +} + +void BaseClassifier::trainClassifier( const Mat& image, int target, float importance, std::vector& errorMask ) +{ + + //get poisson value + double A = 1; + int K = 0; + int K_max = 10; + while ( 1 ) + { + double U_k = (double) rand() / RAND_MAX; + A *= U_k; + if( K > K_max || A < exp( -importance ) ) + break; + K++; + } + + for ( int curK = 0; curK <= K; curK++ ) + { + for ( int curWeakClassifier = 0; curWeakClassifier < m_numWeakClassifier + m_iterationInit; curWeakClassifier++ ) + { + errorMask[curWeakClassifier] = weakClassifier[curWeakClassifier]->update( image.at( curWeakClassifier ), target ); + } + } + +} + +float BaseClassifier::getError( int curWeakClassifier ) +{ + if( curWeakClassifier == -1 ) + curWeakClassifier = m_selectedClassifier; + return m_wWrong[curWeakClassifier] / ( m_wWrong[curWeakClassifier] + m_wCorrect[curWeakClassifier] ); +} + +int BaseClassifier::selectBestClassifier( std::vector& errorMask, float importance, std::vector & errors ) +{ + float minError = FLT_MAX; + int tmp_selectedClassifier = m_selectedClassifier; + + for ( int curWeakClassifier = 0; curWeakClassifier < m_numWeakClassifier + m_iterationInit; curWeakClassifier++ ) + { + if( errorMask[curWeakClassifier] ) + { + m_wWrong[curWeakClassifier] += importance; + } + else + { + m_wCorrect[curWeakClassifier] += importance; + } + + if( errors[curWeakClassifier] == FLT_MAX ) + continue; + + errors[curWeakClassifier] = m_wWrong[curWeakClassifier] / ( m_wWrong[curWeakClassifier] + m_wCorrect[curWeakClassifier] ); + + /*if(errors[curWeakClassifier] < 0.001 || !(errors[curWeakClassifier]>0.0)) + { + errors[curWeakClassifier] = 0.001; + } + + if(errors[curWeakClassifier] >= 1.0) + errors[curWeakClassifier] = 0.999; + + assert (errors[curWeakClassifier] > 0.0); + assert (errors[curWeakClassifier] < 1.0);*/ + + if( curWeakClassifier < m_numWeakClassifier ) + { + if( errors[curWeakClassifier] < minError ) + { + minError = errors[curWeakClassifier]; + tmp_selectedClassifier = curWeakClassifier; + } + } + } + + m_selectedClassifier = tmp_selectedClassifier; + return m_selectedClassifier; +} + +void BaseClassifier::getErrors( float* errors ) +{ + for ( int curWeakClassifier = 0; curWeakClassifier < m_numWeakClassifier + m_iterationInit; curWeakClassifier++ ) + { + if( errors[curWeakClassifier] == FLT_MAX ) + continue; + + errors[curWeakClassifier] = m_wWrong[curWeakClassifier] / ( m_wWrong[curWeakClassifier] + m_wCorrect[curWeakClassifier] ); + + CV_Assert( errors[curWeakClassifier] > 0 ); + } +} + +void BaseClassifier::replaceWeakClassifier( int index ) +{ + delete weakClassifier[index]; + weakClassifier[index] = weakClassifier[m_idxOfNewWeakClassifier]; + m_wWrong[index] = m_wWrong[m_idxOfNewWeakClassifier]; + m_wWrong[m_idxOfNewWeakClassifier] = 1; + m_wCorrect[index] = m_wCorrect[m_idxOfNewWeakClassifier]; + m_wCorrect[m_idxOfNewWeakClassifier] = 1; + + weakClassifier[m_idxOfNewWeakClassifier] = new WeakClassifierHaarFeature(); +} + +int BaseClassifier::computeReplaceWeakestClassifier( const std::vector & errors ) +{ + float maxError = 0.0f; + int index = -1; + + //search the classifier with the largest error + for ( int curWeakClassifier = m_numWeakClassifier - 1; curWeakClassifier >= 0; curWeakClassifier-- ) + { + if( errors[curWeakClassifier] > maxError ) + { + maxError = errors[curWeakClassifier]; + index = curWeakClassifier; + } + } + + CV_Assert( index > -1 ); + CV_Assert( index != m_selectedClassifier ); + + //replace + m_idxOfNewWeakClassifier++; + if( m_idxOfNewWeakClassifier == m_numWeakClassifier + m_iterationInit ) + m_idxOfNewWeakClassifier = m_numWeakClassifier; + + if( maxError > errors[m_idxOfNewWeakClassifier] ) + { + return index; + } + else + return -1; + +} + +void BaseClassifier::replaceClassifierStatistic( int sourceIndex, int targetIndex ) +{ + CV_Assert( targetIndex >= 0 ); + CV_Assert( targetIndex != m_selectedClassifier ); + CV_Assert( targetIndex < m_numWeakClassifier ); + + //replace + m_wWrong[targetIndex] = m_wWrong[sourceIndex]; + m_wWrong[sourceIndex] = 1.0f; + m_wCorrect[targetIndex] = m_wCorrect[sourceIndex]; + m_wCorrect[sourceIndex] = 1.0f; +} + +EstimatedGaussDistribution::EstimatedGaussDistribution() +{ + m_mean = 0; + m_sigma = 1; + this->m_P_mean = 1000; + this->m_R_mean = 0.01f; + this->m_P_sigma = 1000; + this->m_R_sigma = 0.01f; +} + +EstimatedGaussDistribution::EstimatedGaussDistribution( float P_mean, float R_mean, float P_sigma, float R_sigma ) +{ + m_mean = 0; + m_sigma = 1; + this->m_P_mean = P_mean; + this->m_R_mean = R_mean; + this->m_P_sigma = P_sigma; + this->m_R_sigma = R_sigma; +} + +EstimatedGaussDistribution::~EstimatedGaussDistribution() +{ +} + +void EstimatedGaussDistribution::update( float value ) +{ +//update distribution (mean and sigma) using a kalman filter for each + + float K; + float minFactor = 0.001f; + +//mean + + K = m_P_mean / ( m_P_mean + m_R_mean ); + if( K < minFactor ) + K = minFactor; + + m_mean = K * value + ( 1.0f - K ) * m_mean; + m_P_mean = m_P_mean * m_R_mean / ( m_P_mean + m_R_mean ); + + K = m_P_sigma / ( m_P_sigma + m_R_sigma ); + if( K < minFactor ) + K = minFactor; + + float tmp_sigma = K * ( m_mean - value ) * ( m_mean - value ) + ( 1.0f - K ) * m_sigma * m_sigma; + m_P_sigma = m_P_sigma * m_R_mean / ( m_P_sigma + m_R_sigma ); + + m_sigma = static_cast( sqrt( tmp_sigma ) ); + if( m_sigma <= 1.0f ) + m_sigma = 1.0f; + +} + +void EstimatedGaussDistribution::setValues( float mean, float sigma ) +{ + this->m_mean = mean; + this->m_sigma = sigma; +} + +float EstimatedGaussDistribution::getMean() +{ + return m_mean; +} + +float EstimatedGaussDistribution::getSigma() +{ + return m_sigma; +} + +WeakClassifierHaarFeature::WeakClassifierHaarFeature() +{ + sigma = 1; + mean = 0; + + EstimatedGaussDistribution* m_posSamples = new EstimatedGaussDistribution(); + EstimatedGaussDistribution* m_negSamples = new EstimatedGaussDistribution(); + generateRandomClassifier( m_posSamples, m_negSamples ); + + getInitialDistribution( (EstimatedGaussDistribution*) m_classifier->getDistribution( -1 ) ); + getInitialDistribution( (EstimatedGaussDistribution*) m_classifier->getDistribution( 1 ) ); +} + +WeakClassifierHaarFeature::~WeakClassifierHaarFeature() +{ + delete m_classifier; +} + +void WeakClassifierHaarFeature::getInitialDistribution( EstimatedGaussDistribution* distribution ) +{ + distribution->setValues( mean, sigma ); +} + +void WeakClassifierHaarFeature::generateRandomClassifier( EstimatedGaussDistribution* m_posSamples, EstimatedGaussDistribution* m_negSamples ) +{ + m_classifier = new ClassifierThreshold( m_posSamples, m_negSamples ); +} + +bool WeakClassifierHaarFeature::update( float value, int target ) +{ + m_classifier->update( value, target ); + return ( m_classifier->eval( value ) != target ); +} + +int WeakClassifierHaarFeature::eval( float value ) +{ + return m_classifier->eval( value ); +} + +Detector::Detector( StrongClassifierDirectSelection* classifier ) : + m_sizeDetections( 0 ) +{ + this->m_classifier = classifier; + + m_sizeConfidences = 0; + m_maxConfidence = -FLT_MAX; + m_numDetections = 0; + m_idxBestDetection = -1; +} + +Detector::~Detector() +{ +} + +void Detector::prepareConfidencesMemory( int numPatches ) +{ + if( numPatches <= m_sizeConfidences ) + return; + + m_sizeConfidences = numPatches; + m_confidences.resize( numPatches ); +} + +void Detector::prepareDetectionsMemory( int numDetections ) +{ + if( numDetections <= m_sizeDetections ) + return; + + m_sizeDetections = numDetections; + m_idxDetections.resize( numDetections ); +} + +void Detector::classifySmooth( const std::vector& images, float minMargin ) +{ + int numPatches = images.size(); + + prepareConfidencesMemory( numPatches ); + + m_numDetections = 0; + m_idxBestDetection = -1; + m_maxConfidence = -FLT_MAX; + + //compute grid + //TODO 0.99 overlap from params + Size patchSz = m_classifier->getPatchSize(); + int stepCol = (int) floor( ( 1.0f - 0.99f ) * (float) patchSz.width + 0.5f ); + int stepRow = (int) floor( ( 1.0f - 0.99f ) * (float) patchSz.height + 0.5f ); + if( stepCol <= 0 ) + stepCol = 1; + if( stepRow <= 0 ) + stepRow = 1; + + Size patchGrid; + Rect ROI = m_classifier->getROI(); + patchGrid.height = ( (int) ( (float) ( ROI.height - patchSz.height ) / stepRow ) + 1 ); + patchGrid.width = ( (int) ( (float) ( ROI.width - patchSz.width ) / stepCol ) + 1 ); + + if( ( patchGrid.width != m_confMatrix.cols ) || ( patchGrid.height != m_confMatrix.rows ) ) + { + m_confMatrix.create( patchGrid.height, patchGrid.width ); + m_confMatrixSmooth.create( patchGrid.height, patchGrid.width ); + m_confImageDisplay.create( patchGrid.height, patchGrid.width ); + } + + int curPatch = 0; + // Eval and filter + for ( int row = 0; row < patchGrid.height; row++ ) + { + for ( int col = 0; col < patchGrid.width; col++ ) + { + m_confidences[curPatch] = m_classifier->eval( images[curPatch] ); + + // fill matrix + m_confMatrix( row, col ) = m_confidences[curPatch]; + curPatch++; + } + } + + // Filter + //cv::GaussianBlur(m_confMatrix,m_confMatrixSmooth,cv::Size(3,3),0.8); + cv::GaussianBlur( m_confMatrix, m_confMatrixSmooth, cv::Size( 3, 3 ), 0 ); + + // Make display friendly + double min_val, max_val; + cv::minMaxLoc( m_confMatrixSmooth, &min_val, &max_val ); + for ( int y = 0; y < m_confImageDisplay.rows; y++ ) + { + unsigned char* pConfImg = m_confImageDisplay[y]; + const float* pConfData = m_confMatrixSmooth[y]; + for ( int x = 0; x < m_confImageDisplay.cols; x++, pConfImg++, pConfData++ ) + { + *pConfImg = static_cast( 255.0 * ( *pConfData - min_val ) / ( max_val - min_val ) ); + } + } + + // Get best detection + curPatch = 0; + for ( int row = 0; row < patchGrid.height; row++ ) + { + for ( int col = 0; col < patchGrid.width; col++ ) + { + // fill matrix + m_confidences[curPatch] = m_confMatrixSmooth( row, col ); + + if( m_confidences[curPatch] > m_maxConfidence ) + { + m_maxConfidence = m_confidences[curPatch]; + m_idxBestDetection = curPatch; + } + if( m_confidences[curPatch] > minMargin ) + { + m_numDetections++; + } + curPatch++; + } + } + + prepareDetectionsMemory( m_numDetections ); + int curDetection = -1; + for ( int currentPatch = 0; currentPatch < numPatches; currentPatch++ ) + { + if( m_confidences[currentPatch] > minMargin ) + m_idxDetections[++curDetection] = currentPatch; + } +} + +int Detector::getNumDetections() +{ + return m_numDetections; +} + +float Detector::getConfidence( int patchIdx ) +{ + return m_confidences[patchIdx]; +} + +float Detector::getConfidenceOfDetection( int detectionIdx ) +{ + return m_confidences[getPatchIdxOfDetection( detectionIdx )]; +} + +int Detector::getPatchIdxOfBestDetection() +{ + return m_idxBestDetection; +} + +int Detector::getPatchIdxOfDetection( int detectionIdx ) +{ + return m_idxDetections[detectionIdx]; +} + +ClassifierThreshold::ClassifierThreshold( EstimatedGaussDistribution* posSamples, EstimatedGaussDistribution* negSamples ) +{ + m_posSamples = posSamples; + m_negSamples = negSamples; + m_threshold = 0.0f; + m_parity = 0; +} + +ClassifierThreshold::~ClassifierThreshold() +{ + if( m_posSamples != NULL ) + delete m_posSamples; + if( m_negSamples != NULL ) + delete m_negSamples; +} + +void* +ClassifierThreshold::getDistribution( int target ) +{ + if( target == 1 ) + return m_posSamples; + else + return m_negSamples; +} + +void ClassifierThreshold::update( float value, int target ) +{ + //update distribution + if( target == 1 ) + m_posSamples->update( value ); + else + m_negSamples->update( value ); + + //adapt threshold and parity + m_threshold = ( m_posSamples->getMean() + m_negSamples->getMean() ) / 2.0f; + m_parity = ( m_posSamples->getMean() > m_negSamples->getMean() ) ? 1 : -1; +} + +int ClassifierThreshold::eval( float value ) +{ + return ( ( ( m_parity * ( value - m_threshold ) ) > 0 ) ? 1 : -1 ); +} + +} /* namespace cv */ diff --git a/modules/tracking/src/onlineMIL.cpp b/modules/tracking/src/onlineMIL.cpp new file mode 100644 index 000000000..9b8532b25 --- /dev/null +++ b/modules/tracking/src/onlineMIL.cpp @@ -0,0 +1,379 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#include "precomp.hpp" +#include "opencv2/tracking/onlineMIL.hpp" + +template class SortableElementRev +{ + public: + T _val; + int _ind; + SortableElementRev() : + _ind( 0 ) + { + } + SortableElementRev( T val, int ind ) + { + _val = val; + _ind = ind; + } + bool operator<( SortableElementRev &b ) + { + return ( _val < b._val ); + } + ; +}; + +static bool CompareSortableElementRev( const SortableElementRev& i, const SortableElementRev& j ) +{ + return i._val < j._val; +} + +template void sort_order_des( std::vector &v, std::vector &order ) +{ + uint n = (uint) v.size(); + std::vector > v2; + v2.resize( n ); + order.clear(); + order.resize( n ); + for ( uint i = 0; i < n; i++ ) + { + v2[i]._ind = i; + v2[i]._val = v[i]; + } + //std::sort( v2.begin(), v2.end() ); + std::sort( v2.begin(), v2.end(), CompareSortableElementRev ); + for ( uint i = 0; i < n; i++ ) + { + order[i] = v2[i]._ind; + v[i] = v2[i]._val; + } +} +; + +namespace cv +{ + +//implementations for strong classifier + +ClfMilBoost::Params::Params() +{ + _numSel = 50; + _numFeat = 250; + _lRate = 0.85; +} + +ClfMilBoost::ClfMilBoost() +{ + _myParams = ClfMilBoost::Params(); + _numsamples = 0; +} + +ClfMilBoost::~ClfMilBoost() +{ + _selectors.clear(); + for ( size_t i = 0; i < _weakclf.size(); i++ ) + delete _weakclf.at( i ); +} + +void ClfMilBoost::init( const ClfMilBoost::Params ¶meters ) +{ + _myParams = parameters; + _numsamples = 0; + + //_ftrs = Ftr::generate( _myParams->_ftrParams, _myParams->_numFeat ); + // if( params->_storeFtrHistory ) + // Ftr::toViz( _ftrs, "haarftrs" ); + _weakclf.resize( _myParams._numFeat ); + for ( int k = 0; k < _myParams._numFeat; k++ ) + { + _weakclf[k] = new ClfOnlineStump( k ); + _weakclf[k]->_lRate = _myParams._lRate; + + } + _counter = 0; +} + +void ClfMilBoost::update( const Mat& posx, const Mat& negx ) +{ + int numneg = negx.rows; + int numpos = posx.rows; + + // compute ftrs + //if( !posx.ftrsComputed() ) + // Ftr::compute( posx, _ftrs ); + //if( !negx.ftrsComputed() ) + // Ftr::compute( negx, _ftrs ); + + // initialize H + static std::vector Hpos, Hneg; + Hpos.clear(); + Hneg.clear(); + Hpos.resize( posx.rows, 0.0f ), Hneg.resize( negx.rows, 0.0f ); + + _selectors.clear(); + std::vector posw( posx.rows ), negw( negx.rows ); + std::vector > pospred( _weakclf.size() ), negpred( _weakclf.size() ); + + // train all weak classifiers without weights +#ifdef _OPENMP +#pragma omp parallel for +#endif + for ( int m = 0; m < _myParams._numFeat; m++ ) + { + _weakclf[m]->update( posx, negx ); + pospred[m] = _weakclf[m]->classifySetF( posx ); + negpred[m] = _weakclf[m]->classifySetF( negx ); + } + + // pick the best features + for ( int s = 0; s < _myParams._numSel; s++ ) + { + + // compute errors/likl for all weak clfs + std::vector poslikl( _weakclf.size(), 1.0f ), neglikl( _weakclf.size() ), likl( _weakclf.size() ); +#ifdef _OPENMP +#pragma omp parallel for +#endif + for ( int w = 0; w < (int) _weakclf.size(); w++ ) + { + float lll = 1.0f; + for ( int j = 0; j < numpos; j++ ) + lll *= ( 1 - sigmoid( Hpos[j] + pospred[w][j] ) ); + poslikl[w] = (float) -log( 1 - lll + 1e-5 ); + + lll = 0.0f; + for ( int j = 0; j < numneg; j++ ) + lll += (float) -log( 1e-5f + 1 - sigmoid( Hneg[j] + negpred[w][j] ) ); + neglikl[w] = lll; + + likl[w] = poslikl[w] / numpos + neglikl[w] / numneg; + } + + // pick best weak clf + std::vector order; + sort_order_des( likl, order ); + + // find best weakclf that isn't already included + for ( uint k = 0; k < order.size(); k++ ) + if( std::count( _selectors.begin(), _selectors.end(), order[k] ) == 0 ) + { + _selectors.push_back( order[k] ); + break; + } + + // update H = H + h_m +#ifdef _OPENMP +#pragma omp parallel for +#endif + for ( int k = 0; k < posx.rows; k++ ) + Hpos[k] += pospred[_selectors[s]][k]; +#ifdef _OPENMP +#pragma omp parallel for +#endif + for ( int k = 0; k < negx.rows; k++ ) + Hneg[k] += negpred[_selectors[s]][k]; + + } + + //if( _myParams->_storeFtrHistory ) + //for ( uint j = 0; j < _selectors.size(); j++ ) + // _ftrHist( _selectors[j], _counter ) = 1.0f / ( j + 1 ); + + _counter++; + /* */ + return; +} + +std::vector ClfMilBoost::classify( const Mat& x, bool logR ) +{ + int numsamples = x.rows; + std::vector res( numsamples ); + std::vector tr; + + for ( uint w = 0; w < _selectors.size(); w++ ) + { + tr = _weakclf[_selectors[w]]->classifySetF( x ); +#ifdef _OPENMP +#pragma omp parallel for +#endif + for ( int j = 0; j < numsamples; j++ ) + { + res[j] += tr[j]; + } + } + + // return probabilities or log odds ratio + if( !logR ) + { +#ifdef _OPENMP +#pragma omp parallel for +#endif + for ( int j = 0; j < (int) res.size(); j++ ) + { + res[j] = sigmoid( res[j] ); + } + } + + return res; +} + +//implementations for weak classifier + +ClfOnlineStump::ClfOnlineStump() +{ + _trained = false; + _ind = -1; + init(); +} + +ClfOnlineStump::ClfOnlineStump( int ind ) +{ + _trained = false; + _ind = ind; + init(); +} +void ClfOnlineStump::init() +{ + _mu0 = 0; + _mu1 = 0; + _sig0 = 1; + _sig1 = 1; + _lRate = 0.85f; + _trained = false; +} + +void ClfOnlineStump::update( const Mat& posx, const Mat& negx, const Mat_& /*posw*/, const Mat_& /*negw*/) +{ + //std::cout << " ClfOnlineStump::update" << _ind << std::endl; + float posmu = 0.0, negmu = 0.0; + if( posx.cols > 0 ) + posmu = float( mean( posx.col( _ind ) )[0] ); + if( negx.cols > 0 ) + negmu = float( mean( negx.col( _ind ) )[0] ); + + if( _trained ) + { + if( posx.cols > 0 ) + { + _mu1 = ( _lRate * _mu1 + ( 1 - _lRate ) * posmu ); + cv::Mat diff = posx.col( _ind ) - _mu1; + _sig1 = _lRate * _sig1 + ( 1 - _lRate ) * float( mean( diff.mul( diff ) )[0] ); + } + if( negx.cols > 0 ) + { + _mu0 = ( _lRate * _mu0 + ( 1 - _lRate ) * negmu ); + cv::Mat diff = negx.col( _ind ) - _mu0; + _sig0 = _lRate * _sig0 + ( 1 - _lRate ) * float( mean( diff.mul( diff ) )[0] ); + } + + _q = ( _mu1 - _mu0 ) / 2; + _s = sign( _mu1 - _mu0 ); + _log_n0 = std::log( float( 1.0f / pow( _sig0, 0.5f ) ) ); + _log_n1 = std::log( float( 1.0f / pow( _sig1, 0.5f ) ) ); + //_e1 = -1.0f/(2.0f*_sig1+1e-99f); + //_e0 = -1.0f/(2.0f*_sig0+1e-99f); + _e1 = -1.0f / ( 2.0f * _sig1 + std::numeric_limits::min() ); + _e0 = -1.0f / ( 2.0f * _sig0 + std::numeric_limits::min() ); + + } + else + { + _trained = true; + if( posx.cols > 0 ) + { + _mu1 = posmu; + cv::Scalar scal_mean, scal_std_dev; + cv::meanStdDev( posx.col( _ind ), scal_mean, scal_std_dev ); + _sig1 = float( scal_std_dev[0] ) * float( scal_std_dev[0] ) + 1e-9f; + } + + if( negx.cols > 0 ) + { + _mu0 = negmu; + cv::Scalar scal_mean, scal_std_dev; + cv::meanStdDev( negx.col( _ind ), scal_mean, scal_std_dev ); + _sig0 = float( scal_std_dev[0] ) * float( scal_std_dev[0] ) + 1e-9f; + } + + _q = ( _mu1 - _mu0 ) / 2; + _s = sign( _mu1 - _mu0 ); + _log_n0 = std::log( float( 1.0f / pow( _sig0, 0.5f ) ) ); + _log_n1 = std::log( float( 1.0f / pow( _sig1, 0.5f ) ) ); + //_e1 = -1.0f/(2.0f*_sig1+1e-99f); + //_e0 = -1.0f/(2.0f*_sig0+1e-99f); + _e1 = -1.0f / ( 2.0f * _sig1 + std::numeric_limits::min() ); + _e0 = -1.0f / ( 2.0f * _sig0 + std::numeric_limits::min() ); + } +} + +bool ClfOnlineStump::classify( const Mat& x, int i ) +{ + float xx = x.at( i, _ind ); + double log_p0 = ( xx - _mu0 ) * ( xx - _mu0 ) * _e0 + _log_n0; + double log_p1 = ( xx - _mu1 ) * ( xx - _mu1 ) * _e1 + _log_n1; + return log_p1 > log_p0; +} + +float ClfOnlineStump::classifyF( const Mat& x, int i ) +{ + float xx = x.at( i, _ind ); + double log_p0 = ( xx - _mu0 ) * ( xx - _mu0 ) * _e0 + _log_n0; + double log_p1 = ( xx - _mu1 ) * ( xx - _mu1 ) * _e1 + _log_n1; + return float( log_p1 - log_p0 ); +} + +inline std::vector ClfOnlineStump::classifySetF( const Mat& x ) +{ + std::vector res( x.rows ); + +#ifdef _OPENMP +#pragma omp parallel for +#endif + for ( int k = 0; k < (int) res.size(); k++ ) + { + res[k] = classifyF( x, k ); + } + return res; +} + +} /* namespace cv */ diff --git a/modules/tracking/src/precomp.hpp b/modules/tracking/src/precomp.hpp new file mode 100644 index 000000000..876306f0d --- /dev/null +++ b/modules/tracking/src/precomp.hpp @@ -0,0 +1,49 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#ifndef __OPENCV_PRECOMP_H__ +#define __OPENCV_PRECOMP_H__ + +#include "opencv2/tracking.hpp" +#include "opencv2/core/utility.hpp" +#include "opencv2/core/private.hpp" + +#endif diff --git a/modules/tracking/src/tracker.cpp b/modules/tracking/src/tracker.cpp new file mode 100644 index 000000000..b2b690db5 --- /dev/null +++ b/modules/tracking/src/tracker.cpp @@ -0,0 +1,107 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#include "precomp.hpp" + +namespace cv +{ + +/* + * Tracker + */ + +Tracker::~Tracker() +{ +} + +bool Tracker::init( const Mat& image, const Rect& boundingBox ) +{ + + if( isInit ) + { + return false; + } + + if( image.empty() ) + return false; + + sampler = Ptr( new TrackerSampler() ); + featureSet = Ptr( new TrackerFeatureSet() ); + model = Ptr(); + + bool initTracker = initImpl( image, boundingBox ); + + //check if the model component is initialized + if( model == 0 ) + { + CV_Error( -1, "The model are not initialized" ); + return false; + } + + if( initTracker ) + { + isInit = true; + } + + return initTracker; +} + +bool Tracker::update( const Mat& image, Rect& boundingBox ) +{ + + if( !isInit ) + { + return false; + } + + if( image.empty() ) + return false; + + return updateImpl( image, boundingBox ); +} + +Ptr Tracker::create( const String& trackerType ) +{ + + return Algorithm::create( "TRACKER." + trackerType ); +} + +} /* namespace cv */ diff --git a/modules/tracking/src/trackerBoosting.cpp b/modules/tracking/src/trackerBoosting.cpp new file mode 100644 index 000000000..5a158d0ee --- /dev/null +++ b/modules/tracking/src/trackerBoosting.cpp @@ -0,0 +1,308 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#include "precomp.hpp" +#include "trackerBoostingModel.hpp" + +namespace cv +{ + +/* + * TrackerBoosting + */ + +/* + * Parameters + */ +TrackerBoosting::Params::Params() +{ + numClassifiers = 100; + samplerOverlap = 0.99f; + samplerSearchFactor = 2; + iterationInit = 50; + featureSetNumFeatures = ( numClassifiers * 10 ) + iterationInit; +} + +void TrackerBoosting::Params::read( const cv::FileNode& fn ) +{ + numClassifiers = fn["numClassifiers"]; + samplerOverlap = fn["overlap"]; + samplerSearchFactor = fn["samplerSearchFactor"]; + iterationInit = fn["iterationInit"]; + samplerSearchFactor = fn["searchFactor"]; +} + +void TrackerBoosting::Params::write( cv::FileStorage& fs ) const +{ + fs << "numClassifiers" << numClassifiers; + fs << "overlap" << samplerOverlap; + fs << "searchFactor" << samplerSearchFactor; + fs << "iterationInit" << iterationInit; + fs << "samplerSearchFactor" << samplerSearchFactor; +} + +/* + * Constructor + */ +TrackerBoosting::TrackerBoosting( const TrackerBoosting::Params ¶meters ) : + params( parameters ) +{ + isInit = false; +} + +/* + * Destructor + */ +TrackerBoosting::~TrackerBoosting() +{ + +} + +void TrackerBoosting::read( const cv::FileNode& fn ) +{ + params.read( fn ); +} + +void TrackerBoosting::write( cv::FileStorage& fs ) const +{ + params.write( fs ); +} + +bool TrackerBoosting::initImpl( const Mat& image, const Rect& boundingBox ) +{ + //sampling + Mat_ intImage; + Mat_ intSqImage; + Mat image_; + cvtColor( image, image_, CV_RGB2GRAY ); + integral( image_, intImage, intSqImage, CV_32S ); + TrackerSamplerCS::Params CSparameters; + CSparameters.overlap = params.samplerOverlap; + CSparameters.searchFactor = params.samplerSearchFactor; + + Ptr CSSampler = Ptr( new TrackerSamplerCS( CSparameters ) ); + + if( !sampler->addTrackerSamplerAlgorithm( CSSampler ) ) + return false; + + CSSampler.staticCast()->setMode( TrackerSamplerCS::MODE_POSITIVE ); + sampler->sampling( intImage, boundingBox ); + const std::vector posSamples = sampler->getSamples(); + + CSSampler.staticCast()->setMode( TrackerSamplerCS::MODE_NEGATIVE ); + sampler->sampling( intImage, boundingBox ); + const std::vector negSamples = sampler->getSamples(); + + if( posSamples.empty() || negSamples.empty() ) + return false; + + Rect ROI = CSSampler.staticCast()->getROI(); + + //compute HAAR features + TrackerFeatureHAAR::Params HAARparameters; + HAARparameters.numFeatures = params.featureSetNumFeatures; + HAARparameters.isIntegral = true; + HAARparameters.rectSize = Size( boundingBox.width, boundingBox.height ); + Ptr trackerFeature = Ptr( new TrackerFeatureHAAR( HAARparameters ) ); + if( !featureSet->addTrackerFeature( trackerFeature ) ) + return false; + + featureSet->extraction( posSamples ); + const std::vector posResponse = featureSet->getResponses(); + featureSet->extraction( negSamples ); + const std::vector negResponse = featureSet->getResponses(); + + //Model + model = Ptr( new TrackerBoostingModel( boundingBox ) ); + Ptr stateEstimator = Ptr( + new TrackerStateEstimatorAdaBoosting( params.numClassifiers, params.iterationInit, params.featureSetNumFeatures, + Size( boundingBox.width, boundingBox.height ), ROI ) ); + model->setTrackerStateEstimator( stateEstimator ); + + //Run model estimation and update for iterationInit iterations + for ( int i = 0; i < params.iterationInit; i++ ) + { + //compute temp features + TrackerFeatureHAAR::Params HAARparameters2; + HAARparameters2.numFeatures = ( posSamples.size() + negSamples.size() ); + HAARparameters2.isIntegral = true; + HAARparameters2.rectSize = Size( boundingBox.width, boundingBox.height ); + Ptr trackerFeature2 = Ptr( new TrackerFeatureHAAR( HAARparameters2 ) ); + + model.staticCast()->setMode( TrackerBoostingModel::MODE_NEGATIVE, negSamples ); + model->modelEstimation( negResponse ); + model.staticCast()->setMode( TrackerBoostingModel::MODE_POSITIVE, posSamples ); + model->modelEstimation( posResponse ); + model->modelUpdate(); + + //get replaced classifier and change the features + std::vector replacedClassifier = stateEstimator->computeReplacedClassifier(); + std::vector swappedClassified = stateEstimator->computeSwappedClassifier(); + for ( size_t j = 0; j < replacedClassifier.size(); j++ ) + { + if( replacedClassifier[j] != -1 && swappedClassified[j] != -1 ) + { + trackerFeature.staticCast()->swapFeature( replacedClassifier[j], swappedClassified[j] ); + trackerFeature.staticCast()->swapFeature( swappedClassified[j], trackerFeature2->getFeatureAt( j ) ); + } + } + } + + return true; +} + +bool TrackerBoosting::updateImpl( const Mat& image, Rect& boundingBox ) +{ + Mat_ intImage; + Mat_ intSqImage; + Mat image_; + cvtColor( image, image_, CV_RGB2GRAY ); + integral( image_, intImage, intSqImage, CV_32S ); + //get the last location [AAM] X(k-1) + Ptr lastLocation = model->getLastTargetState(); + Rect lastBoundingBox( lastLocation->getTargetPosition().x, lastLocation->getTargetPosition().y, lastLocation->getTargetWidth(), + lastLocation->getTargetHeight() ); + + //sampling new frame based on last location + ( sampler->getSamplers().at( 0 ).second ).staticCast()->setMode( TrackerSamplerCS::MODE_CLASSIFY ); + sampler->sampling( intImage, lastBoundingBox ); + const std::vector detectSamples = sampler->getSamples(); + Rect ROI = ( sampler->getSamplers().at( 0 ).second ).staticCast()->getROI(); + + if( detectSamples.empty() ) + return false; + + /*//TODO debug samples + Mat f; + image.copyTo( f ); + + for ( size_t i = 0; i < detectSamples.size(); i = i + 10 ) + { + Size sz; + Point off; + detectSamples.at( i ).locateROI( sz, off ); + rectangle( f, Rect( off.x, off.y, detectSamples.at( i ).cols, detectSamples.at( i ).rows ), Scalar( 255, 0, 0 ), 1 ); + }*/ + + std::vector responses; + Mat response; + + std::vector classifiers = model->getTrackerStateEstimator().staticCast()->computeSelectedWeakClassifier(); + Ptr extractor = featureSet->getTrackerFeature()[0].second.staticCast(); + extractor->extractSelected( classifiers, detectSamples, response ); + responses.push_back( response ); + + //predict new location + ConfidenceMap cmap; + model.staticCast()->setMode( TrackerBoostingModel::MODE_CLASSIFY, detectSamples ); + model.staticCast()->responseToConfidenceMap( responses, cmap ); + model->getTrackerStateEstimator().staticCast()->setCurrentConfidenceMap( cmap ); + model->getTrackerStateEstimator().staticCast()->setSampleROI( ROI ); + + if( !model->runStateEstimator() ) + { + return false; + } + + Ptr currentState = model->getLastTargetState(); + boundingBox = Rect( currentState->getTargetPosition().x, currentState->getTargetPosition().y, currentState->getTargetWidth(), + currentState->getTargetHeight() ); + + /*//TODO debug + rectangle( f, lastBoundingBox, Scalar( 0, 255, 0 ), 1 ); + rectangle( f, boundingBox, Scalar( 0, 0, 255 ), 1 ); + imshow( "f", f ); + //waitKey( 0 );*/ + + //sampling new frame based on new location + //Positive sampling + ( sampler->getSamplers().at( 0 ).second ).staticCast()->setMode( TrackerSamplerCS::MODE_POSITIVE ); + sampler->sampling( intImage, boundingBox ); + const std::vector posSamples = sampler->getSamples(); + + //Negative sampling + ( sampler->getSamplers().at( 0 ).second ).staticCast()->setMode( TrackerSamplerCS::MODE_NEGATIVE ); + sampler->sampling( intImage, boundingBox ); + const std::vector negSamples = sampler->getSamples(); + + if( posSamples.empty() || negSamples.empty() ) + return false; + + //extract features + featureSet->extraction( posSamples ); + const std::vector posResponse = featureSet->getResponses(); + + featureSet->extraction( negSamples ); + const std::vector negResponse = featureSet->getResponses(); + + //compute temp features + TrackerFeatureHAAR::Params HAARparameters2; + HAARparameters2.numFeatures = ( posSamples.size() + negSamples.size() ); + HAARparameters2.isIntegral = true; + HAARparameters2.rectSize = Size( boundingBox.width, boundingBox.height ); + Ptr trackerFeature2 = Ptr( new TrackerFeatureHAAR( HAARparameters2 ) ); + + //model estimate + model.staticCast()->setMode( TrackerBoostingModel::MODE_NEGATIVE, negSamples ); + model->modelEstimation( negResponse ); + model.staticCast()->setMode( TrackerBoostingModel::MODE_POSITIVE, posSamples ); + model->modelEstimation( posResponse ); + + //model update + model->modelUpdate(); + + //get replaced classifier and change the features + std::vector replacedClassifier = model->getTrackerStateEstimator().staticCast()->computeReplacedClassifier(); + std::vector swappedClassified = model->getTrackerStateEstimator().staticCast()->computeSwappedClassifier(); + for ( size_t j = 0; j < replacedClassifier.size(); j++ ) + { + if( replacedClassifier[j] != -1 && swappedClassified[j] != -1 ) + { + featureSet->getTrackerFeature().at( 0 ).second.staticCast()->swapFeature( replacedClassifier[j], swappedClassified[j] ); + featureSet->getTrackerFeature().at( 0 ).second.staticCast()->swapFeature( swappedClassified[j], + trackerFeature2->getFeatureAt( j ) ); + } + } + + return true; +} + +} /* namespace cv */ diff --git a/modules/tracking/src/trackerBoostingModel.cpp b/modules/tracking/src/trackerBoostingModel.cpp new file mode 100644 index 000000000..ead4cb15e --- /dev/null +++ b/modules/tracking/src/trackerBoostingModel.cpp @@ -0,0 +1,123 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#include "trackerBoostingModel.hpp" + +/** + * TrackerBoostingModel + */ + +namespace cv +{ + +TrackerBoostingModel::TrackerBoostingModel( const Rect& boundingBox ) +{ + + mode = MODE_POSITIVE; + + Ptr initState = + Ptr( + new TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState( Point2f( boundingBox.x, boundingBox.y ), boundingBox.width, + boundingBox.height, true, Mat() ) ); + trajectory.push_back( initState ); + maxCMLength = 10; +} + +void TrackerBoostingModel::modelEstimationImpl( const std::vector& responses ) +{ + responseToConfidenceMap( responses, currentConfidenceMap ); +} + +void TrackerBoostingModel::modelUpdateImpl() +{ + +} + +void TrackerBoostingModel::setMode( int trainingMode, const std::vector& samples ) +{ + currentSample.clear(); + currentSample = samples; + + mode = trainingMode; +} + +std::vector TrackerBoostingModel::getSelectedWeakClassifier() +{ + return stateEstimator.staticCast()->computeSelectedWeakClassifier(); +} + +void TrackerBoostingModel::responseToConfidenceMap( const std::vector& responses, ConfidenceMap& confidenceMap ) +{ + if( currentSample.empty() ) + { + CV_Error( -1, "The samples in Model estimation are empty" ); + return; + } + + for ( size_t i = 0; i < currentSample.size(); i++ ) + { + + Size currentSize; + Point currentOfs; + currentSample.at( i ).locateROI( currentSize, currentOfs ); + bool foreground; + if( mode == MODE_POSITIVE || mode == MODE_CLASSIFY ) + { + foreground = true; + } + else if( mode == MODE_NEGATIVE ) + { + foreground = false; + } + const Mat resp = responses[0].col( i ); + + //create the state + Ptr currentState = Ptr< + TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState>( + new TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState( currentOfs, currentSample.at( i ).cols, currentSample.at( i ).rows, + foreground, resp ) ); + + confidenceMap.push_back( std::make_pair( currentState, 0 ) ); + + } +} + +} diff --git a/modules/tracking/src/trackerBoostingModel.hpp b/modules/tracking/src/trackerBoostingModel.hpp new file mode 100644 index 000000000..6fa69b032 --- /dev/null +++ b/modules/tracking/src/trackerBoostingModel.hpp @@ -0,0 +1,109 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#ifndef __OPENCV_TRACKER_BOOSTING_MODEL_HPP__ +#define __OPENCV_TRACKER_BOOSTING_MODEL_HPP__ + +#include "precomp.hpp" +#include "opencv2/core.hpp" + +namespace cv +{ + +/** + * \brief Implementation of TrackerModel for BOOSTING algorithm + */ +class TrackerBoostingModel : public TrackerModel +{ + public: + enum + { + MODE_POSITIVE = 1, // mode for positive features + MODE_NEGATIVE = 2, // mode for negative features + MODE_CLASSIFY = 3 // mode for classify step + }; + /** + * \brief Constructor + * \param boundingBox The first boundingBox + */ + TrackerBoostingModel( const Rect& boundingBox ); + + /** + * \brief Destructor + */ + ~TrackerBoostingModel() + { + } + ; + + /** + * \brief Set the mode + */ + void setMode( int trainingMode, const std::vector& samples ); + + /** + * \brief Create the ConfidenceMap from a list of responses + * \param responses The list of the responses + * \param confidenceMap The output + */ + void responseToConfidenceMap( const std::vector& responses, ConfidenceMap& confidenceMap ); + + /** + * \brief return the selected weak classifiers for the detect + * @return the selected weak classifiers + */ + std::vector getSelectedWeakClassifier(); + + protected: + void modelEstimationImpl( const std::vector& responses ); + void modelUpdateImpl(); + + private: + + std::vector currentSample; + std::vector > meanSigmaPair; + + int mode; +}; + +} /* namespace cv */ + +#endif diff --git a/modules/tracking/src/trackerFeature.cpp b/modules/tracking/src/trackerFeature.cpp new file mode 100644 index 000000000..c17e4c733 --- /dev/null +++ b/modules/tracking/src/trackerFeature.cpp @@ -0,0 +1,325 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#include "precomp.hpp" + +namespace cv +{ + +/* + * TrackerFeature + */ + +TrackerFeature::~TrackerFeature() +{ + +} + +void TrackerFeature::compute( const std::vector& images, Mat& response ) +{ + if( images.empty() ) + return; + + computeImpl( images, response ); +} + +Ptr TrackerFeature::create( const String& trackerFeatureType ) +{ + if( trackerFeatureType.find( "FEATURE2D" ) == 0 ) + { + size_t firstSep = trackerFeatureType.find_first_of( "." ); + size_t secondSep = trackerFeatureType.find_last_of( "." ); + + String detector = trackerFeatureType.substr( firstSep, secondSep - firstSep ); + String descriptor = trackerFeatureType.substr( secondSep, trackerFeatureType.length() - secondSep ); + + return Ptr( new TrackerFeatureFeature2d( detector, descriptor ) ); + } + + if( trackerFeatureType.find( "HOG" ) == 0 ) + { + return Ptr( new TrackerFeatureHOG() ); + } + + if( trackerFeatureType.find( "HAAR" ) == 0 ) + { + return Ptr( new TrackerFeatureHAAR() ); + } + + if( trackerFeatureType.find( "LBP" ) == 0 ) + { + return Ptr( new TrackerFeatureLBP() ); + } + + CV_Error( -1, "Tracker feature type not supported" ); + return Ptr(); +} + +String TrackerFeature::getClassName() const +{ + return className; +} + +/** + * TrackerFeatureFeature2d + */ +TrackerFeatureFeature2d::TrackerFeatureFeature2d( String /*detectorType*/, String /*descriptorType*/) +{ + className = "FEATURE2D"; +} + +TrackerFeatureFeature2d::~TrackerFeatureFeature2d() +{ + +} + +bool TrackerFeatureFeature2d::computeImpl( const std::vector& /*images*/, Mat& /*response*/) +{ + return false; +} + +void TrackerFeatureFeature2d::selection( Mat& /*response*/, int /*npoints*/) +{ + +} + +/** + * TrackerFeatureHOG + */ +TrackerFeatureHOG::TrackerFeatureHOG() +{ + className = "HOG"; +} + +TrackerFeatureHOG::~TrackerFeatureHOG() +{ + +} + +bool TrackerFeatureHOG::computeImpl( const std::vector& /*images*/, Mat& /*response*/) +{ + return false; +} + +void TrackerFeatureHOG::selection( Mat& /*response*/, int /*npoints*/) +{ + +} + +/** + * TrackerFeatureHAAR + */ + +/** + * Parameters + */ + +TrackerFeatureHAAR::Params::Params() +{ + numFeatures = 250; + rectSize = Size( 100, 100 ); + isIntegral = false; +} + +TrackerFeatureHAAR::TrackerFeatureHAAR( const TrackerFeatureHAAR::Params ¶meters ) : + params( parameters ) +{ + className = "HAAR"; + + CvHaarFeatureParams haarParams; + haarParams.numFeatures = params.numFeatures; + haarParams.isIntegral = params.isIntegral; + featureEvaluator = CvFeatureEvaluator::create( CvFeatureParams::HAAR ).staticCast(); + featureEvaluator->init( &haarParams, 1, params.rectSize ); +} + +TrackerFeatureHAAR::~TrackerFeatureHAAR() +{ + +} + +CvHaarEvaluator::FeatureHaar& TrackerFeatureHAAR::getFeatureAt( int id ) +{ + return featureEvaluator->getFeatures( id ); +} + +bool TrackerFeatureHAAR::swapFeature( int id, CvHaarEvaluator::FeatureHaar& feature ) +{ + featureEvaluator->getFeatures( id ) = feature; + return true; +} + +bool TrackerFeatureHAAR::swapFeature( int source, int target ) +{ + CvHaarEvaluator::FeatureHaar feature = featureEvaluator->getFeatures( source ); + featureEvaluator->getFeatures( source ) = featureEvaluator->getFeatures( target ); + featureEvaluator->getFeatures( target ) = feature; + return true; +} + +bool TrackerFeatureHAAR::extractSelected( const std::vector selFeatures, const std::vector& images, Mat& response ) +{ + if( images.empty() ) + { + return false; + } + + int numFeatures = featureEvaluator->getNumFeatures(); + int numSelFeatures = selFeatures.size(); + + //response = Mat_( Size( images.size(), numFeatures ) ); + response.create( Size( images.size(), numFeatures ), CV_32F ); + response.setTo( 0 ); + + //double t = getTickCount(); + //for each sample compute #n_feature -> put each feature (n Rect) in response + for ( size_t i = 0; i < images.size(); i++ ) + { + int c = images[i].cols; + int r = images[i].rows; + for ( int j = 0; j < numSelFeatures; j++ ) + { + float res = 0; + //const feat + CvHaarEvaluator::FeatureHaar& feature = featureEvaluator->getFeatures( selFeatures[j] ); + feature.eval( images[i], Rect( 0, 0, c, r ), &res ); + //( Mat_( response ) )( j, i ) = res; + response.at( selFeatures[j], i ) = res; + } + } + //t = ( (double) getTickCount() - t ) / getTickFrequency(); + //std::cout << "StrongClassifierDirectSelection time " << t << std::endl; + + return true; +} + +class Parallel_compute : public cv::ParallelLoopBody +{ + private: + Ptr featureEvaluator; + std::vector images; + Mat response; + //std::vector features; + public: + Parallel_compute( Ptr& fe, const std::vector& img, Mat& resp ) : + featureEvaluator( fe ), + images( img ), + response( resp ) + { + + //features = featureEvaluator->getFeatures(); + } + + virtual void operator()( const cv::Range &r ) const + { + for ( register int jf = r.start; jf != r.end; ++jf ) + { + int cols = images[jf].cols; + int rows = images[jf].rows; + for ( int j = 0; j < featureEvaluator->getNumFeatures(); j++ ) + { + float res = 0; + featureEvaluator->getFeatures()[j].eval( images[jf], Rect( 0, 0, cols, rows ), &res ); + ( Mat_( response ) )( j, jf ) = res; + } + } + } +}; + +bool TrackerFeatureHAAR::computeImpl( const std::vector& images, Mat& response ) +{ + if( images.empty() ) + { + return false; + } + + int numFeatures = featureEvaluator->getNumFeatures(); + + response = Mat_( Size( images.size(), numFeatures ) ); + + std::vector f = featureEvaluator->getFeatures(); + //for each sample compute #n_feature -> put each feature (n Rect) in response + parallel_for_( Range( 0, images.size() ), Parallel_compute( featureEvaluator, images, response ) ); + + /*for ( size_t i = 0; i < images.size(); i++ ) + { + int c = images[i].cols; + int r = images[i].rows; + for ( int j = 0; j < numFeatures; j++ ) + { + float res = 0; + featureEvaluator->getFeatures( j ).eval( images[i], Rect( 0, 0, c, r ), &res ); + ( Mat_( response ) )( j, i ) = res; + } + }*/ + + return true; +} + +void TrackerFeatureHAAR::selection( Mat& /*response*/, int /*npoints*/) +{ + +} + +/** + * TrackerFeatureLBP + */ +TrackerFeatureLBP::TrackerFeatureLBP() +{ + className = "LBP"; +} + +TrackerFeatureLBP::~TrackerFeatureLBP() +{ + +} + +bool TrackerFeatureLBP::computeImpl( const std::vector& /*images*/, Mat& /*response*/) +{ + return false; +} + +void TrackerFeatureLBP::selection( Mat& /*response*/, int /*npoints*/) +{ + +} + +} /* namespace cv */ diff --git a/modules/tracking/src/trackerFeatureSet.cpp b/modules/tracking/src/trackerFeatureSet.cpp new file mode 100644 index 000000000..99ec4944b --- /dev/null +++ b/modules/tracking/src/trackerFeatureSet.cpp @@ -0,0 +1,142 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#include "precomp.hpp" + +namespace cv +{ + +/* + * TrackerFeatureSet + */ + +/* + * Constructor + */ +TrackerFeatureSet::TrackerFeatureSet() +{ + blockAddTrackerFeature = false; +} + +/* + * Destructor + */ +TrackerFeatureSet::~TrackerFeatureSet() +{ + +} + +void TrackerFeatureSet::extraction( const std::vector& images ) +{ + + clearResponses(); + responses.resize( features.size() ); + + for ( size_t i = 0; i < features.size(); i++ ) + { + Mat response; + features[i].second->compute( images, response ); + responses[i] = response; + } + + if( !blockAddTrackerFeature ) + { + blockAddTrackerFeature = true; + } +} + +void TrackerFeatureSet::selection() +{ + +} + +void TrackerFeatureSet::removeOutliers() +{ + +} + +bool TrackerFeatureSet::addTrackerFeature( String trackerFeatureType ) +{ + if( blockAddTrackerFeature ) + { + return false; + } + Ptr feature = TrackerFeature::create( trackerFeatureType ); + + if( feature == 0 ) + { + return false; + } + + features.push_back( std::make_pair( trackerFeatureType, feature ) ); + + return true; +} + +bool TrackerFeatureSet::addTrackerFeature( Ptr& feature ) +{ + if( blockAddTrackerFeature ) + { + return false; + } + + String trackerFeatureType = feature->getClassName(); + features.push_back( std::make_pair( trackerFeatureType, feature ) ); + + return true; +} + +const std::vector > >& TrackerFeatureSet::getTrackerFeature() const +{ + return features; +} + +const std::vector& TrackerFeatureSet::getResponses() const +{ + return responses; +} + +void TrackerFeatureSet::clearResponses() +{ + responses.clear(); +} + +} /* namespace cv */ diff --git a/modules/tracking/src/trackerMIL.cpp b/modules/tracking/src/trackerMIL.cpp new file mode 100644 index 000000000..a20f9939c --- /dev/null +++ b/modules/tracking/src/trackerMIL.cpp @@ -0,0 +1,273 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#include "precomp.hpp" +#include "trackerMILModel.hpp" + +namespace cv +{ + +/* + * TrackerMIL + */ + +/* + * Parameters + */ +TrackerMIL::Params::Params() +{ + samplerInitInRadius = 3; + samplerTrackInRadius = 4; + samplerSearchWinSize = 25; + samplerInitMaxNegNum = 65; + samplerTrackMaxPosNum = 100000; + samplerTrackMaxNegNum = 65; + featureSetNumFeatures = 250; +} + +void TrackerMIL::Params::read( const cv::FileNode& fn ) +{ + samplerInitInRadius = fn["samplerInitInRadius"]; + samplerSearchWinSize = fn["samplerSearchWinSize"]; + samplerInitInRadius = fn["samplerInitInRadius"]; + samplerTrackInRadius = fn["samplerTrackInRadius"]; + samplerTrackMaxPosNum = fn["samplerTrackMaxPosNum"]; + samplerTrackMaxNegNum = fn["samplerTrackMaxNegNum"]; + featureSetNumFeatures = fn["featureSetNumFeatures"]; +} + +void TrackerMIL::Params::write( cv::FileStorage& fs ) const +{ + fs << "samplerInitInRadius" << samplerInitInRadius; + fs << "samplerSearchWinSize" << samplerSearchWinSize; + fs << "samplerInitInRadius" << samplerInitInRadius; + fs << "samplerTrackInRadius" << samplerTrackInRadius; + fs << "samplerTrackMaxPosNum" << samplerTrackMaxPosNum; + fs << "samplerTrackMaxNegNum" << samplerTrackMaxNegNum; + fs << "featureSetNumFeatures" << featureSetNumFeatures; + +} + +/* + * Constructor + */ +TrackerMIL::TrackerMIL( const TrackerMIL::Params ¶meters ) : + params( parameters ) +{ + isInit = false; +} + +/* + * Destructor + */ +TrackerMIL::~TrackerMIL() +{ + +} + +void TrackerMIL::read( const cv::FileNode& fn ) +{ + params.read( fn ); +} + +void TrackerMIL::write( cv::FileStorage& fs ) const +{ + params.write( fs ); +} + +void TrackerMIL::compute_integral( const Mat & img, Mat & ii_img ) +{ + Mat ii; + std::vector ii_imgs; + integral( img, ii, CV_32F ); + split( ii, ii_imgs ); + ii_img = ii_imgs[0]; +} + +bool TrackerMIL::initImpl( const Mat& image, const Rect& boundingBox ) +{ + Mat intImage; + compute_integral( image, intImage ); + TrackerSamplerCSC::Params CSCparameters; + CSCparameters.initInRad = params.samplerInitInRadius; + CSCparameters.searchWinSize = params.samplerSearchWinSize; + CSCparameters.initMaxNegNum = params.samplerInitMaxNegNum; + CSCparameters.trackInPosRad = params.samplerTrackInRadius; + CSCparameters.trackMaxPosNum = params.samplerTrackMaxPosNum; + CSCparameters.trackMaxNegNum = params.samplerTrackMaxNegNum; + + Ptr CSCSampler = Ptr( new TrackerSamplerCSC( CSCparameters ) ); + if( !sampler->addTrackerSamplerAlgorithm( CSCSampler ) ) + return false; + + //or add CSC sampler with default parameters + //sampler->addTrackerSamplerAlgorithm( "CSC" ); + + //Positive sampling + CSCSampler.staticCast()->setMode( TrackerSamplerCSC::MODE_INIT_POS ); + sampler->sampling( intImage, boundingBox ); + std::vector posSamples = sampler->getSamples(); + + //Negative sampling + CSCSampler.staticCast()->setMode( TrackerSamplerCSC::MODE_INIT_NEG ); + sampler->sampling( intImage, boundingBox ); + std::vector negSamples = sampler->getSamples(); + + if( posSamples.empty() || negSamples.empty() ) + return false; + + //compute HAAR features + TrackerFeatureHAAR::Params HAARparameters; + HAARparameters.numFeatures = params.featureSetNumFeatures; + HAARparameters.rectSize = Size( boundingBox.width, boundingBox.height ); + HAARparameters.isIntegral = true; + Ptr trackerFeature = Ptr( new TrackerFeatureHAAR( HAARparameters ) ); + featureSet->addTrackerFeature( trackerFeature ); + + featureSet->extraction( posSamples ); + const std::vector posResponse = featureSet->getResponses(); + + featureSet->extraction( negSamples ); + const std::vector negResponse = featureSet->getResponses(); + + model = Ptr( new TrackerMILModel( boundingBox ) ); + Ptr stateEstimator = Ptr( + new TrackerStateEstimatorMILBoosting( params.featureSetNumFeatures ) ); + model->setTrackerStateEstimator( stateEstimator ); + + //Run model estimation and update + model.staticCast()->setMode( TrackerMILModel::MODE_POSITIVE, posSamples ); + model->modelEstimation( posResponse ); + model.staticCast()->setMode( TrackerMILModel::MODE_NEGATIVE, negSamples ); + model->modelEstimation( negResponse ); + model->modelUpdate(); + + return true; +} + +bool TrackerMIL::updateImpl( const Mat& image, Rect& boundingBox ) +{ + Mat intImage; + compute_integral( image, intImage ); + + //get the last location [AAM] X(k-1) + Ptr lastLocation = model->getLastTargetState(); + Rect lastBoundingBox( lastLocation->getTargetPosition().x, lastLocation->getTargetPosition().y, lastLocation->getTargetWidth(), + lastLocation->getTargetHeight() ); + + //sampling new frame based on last location + ( sampler->getSamplers().at( 0 ).second ).staticCast()->setMode( TrackerSamplerCSC::MODE_DETECT ); + sampler->sampling( intImage, lastBoundingBox ); + std::vector detectSamples = sampler->getSamples(); + if( detectSamples.empty() ) + return false; + + /*//TODO debug samples + Mat f; + image.copyTo(f); + + for( size_t i = 0; i < detectSamples.size(); i=i+10 ) + { + Size sz; + Point off; + detectSamples.at(i).locateROI(sz, off); + rectangle(f, Rect(off.x,off.y,detectSamples.at(i).cols,detectSamples.at(i).rows), Scalar(255,0,0), 1); + }*/ + + //extract features from new samples + featureSet->extraction( detectSamples ); + std::vector response = featureSet->getResponses(); + + //predict new location + ConfidenceMap cmap; + model.staticCast()->setMode( TrackerMILModel::MODE_ESTIMATON, detectSamples ); + model.staticCast()->responseToConfidenceMap( response, cmap ); + model->getTrackerStateEstimator().staticCast()->setCurrentConfidenceMap( cmap ); + + if( !model->runStateEstimator() ) + { + return false; + } + + Ptr currentState = model->getLastTargetState(); + boundingBox = Rect( currentState->getTargetPosition().x, currentState->getTargetPosition().y, currentState->getTargetWidth(), + currentState->getTargetHeight() ); + + /*//TODO debug + rectangle(f, lastBoundingBox, Scalar(0,255,0), 1); + rectangle(f, boundingBox, Scalar(0,0,255), 1); + imshow("f", f); + //waitKey( 0 );*/ + + //sampling new frame based on new location + //Positive sampling + ( sampler->getSamplers().at( 0 ).second ).staticCast()->setMode( TrackerSamplerCSC::MODE_INIT_POS ); + sampler->sampling( intImage, boundingBox ); + std::vector posSamples = sampler->getSamples(); + + //Negative sampling + ( sampler->getSamplers().at( 0 ).second ).staticCast()->setMode( TrackerSamplerCSC::MODE_INIT_NEG ); + sampler->sampling( intImage, boundingBox ); + std::vector negSamples = sampler->getSamples(); + + if( posSamples.empty() || negSamples.empty() ) + return false; + + //extract features + featureSet->extraction( posSamples ); + std::vector posResponse = featureSet->getResponses(); + + featureSet->extraction( negSamples ); + std::vector negResponse = featureSet->getResponses(); + + //model estimate + model.staticCast()->setMode( TrackerMILModel::MODE_POSITIVE, posSamples ); + model->modelEstimation( posResponse ); + model.staticCast()->setMode( TrackerMILModel::MODE_NEGATIVE, negSamples ); + model->modelEstimation( negResponse ); + + //model update + model->modelUpdate(); + + return true; +} + +} /* namespace cv */ diff --git a/modules/tracking/src/trackerMILModel.cpp b/modules/tracking/src/trackerMILModel.cpp new file mode 100644 index 000000000..b4cd2211d --- /dev/null +++ b/modules/tracking/src/trackerMILModel.cpp @@ -0,0 +1,126 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#include "precomp.hpp" +#include "trackerMILModel.hpp" + +/** + * TrackerMILModel + */ + +namespace cv +{ + +TrackerMILModel::TrackerMILModel( const Rect& boundingBox ) +{ + currentSample.clear(); + mode = MODE_POSITIVE; + width = boundingBox.width; + height = boundingBox.height; + + Ptr initState = Ptr( + new TrackerStateEstimatorMILBoosting::TrackerMILTargetState( Point2f( boundingBox.x, boundingBox.y ), boundingBox.width, boundingBox.height, + true, Mat() ) ); + trajectory.push_back( initState ); +} + +void TrackerMILModel::responseToConfidenceMap( const std::vector& responses, ConfidenceMap& confidenceMap ) +{ + if( currentSample.empty() ) + { + CV_Error( -1, "The samples in Model estimation are empty" ); + return; + } + + for ( size_t i = 0; i < responses.size(); i++ ) + { + //for each column (one sample) there are #num_feature + //get informations from currentSample + for ( int j = 0; j < responses.at( i ).cols; j++ ) + { + + Size currentSize; + Point currentOfs; + currentSample.at( j ).locateROI( currentSize, currentOfs ); + bool foreground = false; + if( mode == MODE_POSITIVE || mode == MODE_ESTIMATON ) + { + foreground = true; + } + else if( mode == MODE_NEGATIVE ) + { + foreground = false; + } + + //get the column of the HAAR responses + Mat singleResponse = responses.at( i ).col( j ); + + //create the state + Ptr currentState = Ptr( + new TrackerStateEstimatorMILBoosting::TrackerMILTargetState( currentOfs, width, height, foreground, singleResponse ) ); + + confidenceMap.push_back( std::make_pair( currentState, 0 ) ); + + } + + } +} + +void TrackerMILModel::modelEstimationImpl( const std::vector& responses ) +{ + responseToConfidenceMap( responses, currentConfidenceMap ); + +} + +void TrackerMILModel::modelUpdateImpl() +{ + +} + +void TrackerMILModel::setMode( int trainingMode, const std::vector& samples ) +{ + currentSample.clear(); + currentSample = samples; + + mode = trainingMode; +} + +} diff --git a/modules/tracking/src/trackerMILModel.hpp b/modules/tracking/src/trackerMILModel.hpp new file mode 100644 index 000000000..213e6c71f --- /dev/null +++ b/modules/tracking/src/trackerMILModel.hpp @@ -0,0 +1,103 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#ifndef __OPENCV_TRACKER_MIL_MODEL_HPP__ +#define __OPENCV_TRACKER_MIL_MODEL_HPP__ + +#include "opencv2/core.hpp" + +namespace cv +{ + +/** + * \brief Implementation of TrackerModel for MIL algorithm + */ +class TrackerMILModel : public TrackerModel +{ + public: + enum + { + MODE_POSITIVE = 1, // mode for positive features + MODE_NEGATIVE = 2, // mode for negative features + MODE_ESTIMATON = 3 // mode for estimation step + }; + + /** + * \brief Constructor + * \param boundingBox The first boundingBox + */ + TrackerMILModel( const Rect& boundingBox ); + + /** + * \brief Destructor + */ + ~TrackerMILModel() + { + } + ; + + /** + * \brief Set the mode + */ + void setMode( int trainingMode, const std::vector& samples ); + + /** + * \brief Create the ConfidenceMap from a list of responses + * \param responses The list of the responses + * \param confidenceMap The output + */ + void responseToConfidenceMap( const std::vector& responses, ConfidenceMap& confidenceMap ); + + protected: + void modelEstimationImpl( const std::vector& responses ); + void modelUpdateImpl(); + + private: + int mode; + std::vector currentSample; + + int width; //initial width of the boundingBox + int height; //initial height of the boundingBox +}; + +} /* namespace cv */ + +#endif diff --git a/modules/tracking/src/trackerModel.cpp b/modules/tracking/src/trackerModel.cpp new file mode 100644 index 000000000..79499d587 --- /dev/null +++ b/modules/tracking/src/trackerModel.cpp @@ -0,0 +1,178 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#include "precomp.hpp" + +namespace cv +{ + +/* + * TrackerModel + */ + +TrackerModel::TrackerModel() +{ + stateEstimator = Ptr(); + maxCMLength = 1; +} + +TrackerModel::~TrackerModel() +{ + +} + +bool TrackerModel::setTrackerStateEstimator( Ptr trackerStateEstimator ) +{ + if( stateEstimator != 0 ) + { + return false; + } + + stateEstimator = trackerStateEstimator; + return true; +} + +Ptr TrackerModel::getTrackerStateEstimator() const +{ + return stateEstimator; +} + +void TrackerModel::modelEstimation( const std::vector& responses ) +{ + modelEstimationImpl( responses ); + +} + +void TrackerModel::clearCurrentConfidenceMap() +{ + currentConfidenceMap.clear(); +} + +void TrackerModel::modelUpdate() +{ + modelUpdateImpl(); + + if( maxCMLength != -1 && (int) confidenceMaps.size() >= maxCMLength - 1 ) + { + int l = maxCMLength / 2; + confidenceMaps.erase( confidenceMaps.begin(), confidenceMaps.begin() + l ); + } + if( maxCMLength != -1 && (int) trajectory.size() >= maxCMLength - 1 ) + { + int l = maxCMLength / 2; + trajectory.erase( trajectory.begin(), trajectory.begin() + l ); + } + confidenceMaps.push_back( currentConfidenceMap ); + stateEstimator->update( confidenceMaps ); + + clearCurrentConfidenceMap(); + +} + +bool TrackerModel::runStateEstimator() +{ + if( stateEstimator == 0 ) + { + CV_Error( -1, "Tracker state estimator is not setted" ); + return false; + } + Ptr targetState = stateEstimator->estimate( confidenceMaps ); + if( targetState == 0 ) + return false; + + setLastTargetState( targetState ); + return true; +} + +void TrackerModel::setLastTargetState( const Ptr& lastTargetState ) +{ + trajectory.push_back( lastTargetState ); +} + +Ptr TrackerModel::getLastTargetState() const +{ + return trajectory.back(); +} + +const std::vector& TrackerModel::getConfidenceMaps() const +{ + return confidenceMaps; +} + +const ConfidenceMap& TrackerModel::getLastConfidenceMap() const +{ + return confidenceMaps.back(); +} + +/* + * TrackerTargetState + */ + +Point2f TrackerTargetState::getTargetPosition() const +{ + return targetPosition; +} + +void TrackerTargetState::setTargetPosition( const Point2f& position ) +{ + targetPosition = position; +} + +int TrackerTargetState::getTargetWidth() const +{ + return targetWidth; +} + +void TrackerTargetState::setTargetWidth( int width ) +{ + targetWidth = width; +} +int TrackerTargetState::getTargetHeight() const +{ + return targetHeight; +} + +void TrackerTargetState::setTargetHeight( int height ) +{ + targetHeight = height; +} + +} /* namespace cv */ diff --git a/modules/tracking/src/trackerSampler.cpp b/modules/tracking/src/trackerSampler.cpp new file mode 100644 index 000000000..2a0b591ff --- /dev/null +++ b/modules/tracking/src/trackerSampler.cpp @@ -0,0 +1,142 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#include "precomp.hpp" + +namespace cv +{ + +/* + * TrackerSampler + */ + +/* + * Constructor + */ +TrackerSampler::TrackerSampler() +{ + blockAddTrackerSampler = false; +} + +/* + * Destructor + */ +TrackerSampler::~TrackerSampler() +{ + +} + +void TrackerSampler::sampling( const Mat& image, Rect boundingBox ) +{ + + clearSamples(); + + for ( size_t i = 0; i < samplers.size(); i++ ) + { + std::vector current_samples; + samplers[i].second->sampling( image, boundingBox, current_samples ); + + //push in samples all current_samples + for ( size_t j = 0; j < current_samples.size(); j++ ) + { + std::vector::iterator it = samples.end(); + samples.insert( it, current_samples.at( j ) ); + } + } + + if( !blockAddTrackerSampler ) + { + blockAddTrackerSampler = true; + } +} + +bool TrackerSampler::addTrackerSamplerAlgorithm( String trackerSamplerAlgorithmType ) +{ + if( blockAddTrackerSampler ) + { + return false; + } + Ptr sampler = TrackerSamplerAlgorithm::create( trackerSamplerAlgorithmType ); + + if( sampler == 0 ) + { + return false; + } + + samplers.push_back( std::make_pair( trackerSamplerAlgorithmType, sampler ) ); + + return true; +} + +bool TrackerSampler::addTrackerSamplerAlgorithm( Ptr& sampler ) +{ + if( blockAddTrackerSampler ) + { + return false; + } + + if( sampler == 0 ) + { + return false; + } + + String trackerSamplerAlgorithmType = sampler->getClassName(); + samplers.push_back( std::make_pair( trackerSamplerAlgorithmType, sampler ) ); + + return true; +} + +const std::vector > >& TrackerSampler::getSamplers() const +{ + return samplers; +} + +const std::vector& TrackerSampler::getSamples() const +{ + return samples; +} + +void TrackerSampler::clearSamples() +{ + samples.clear(); +} + +} /* namespace cv */ diff --git a/modules/tracking/src/trackerSamplerAlgorithm.cpp b/modules/tracking/src/trackerSamplerAlgorithm.cpp new file mode 100644 index 000000000..41d2012cb --- /dev/null +++ b/modules/tracking/src/trackerSamplerAlgorithm.cpp @@ -0,0 +1,382 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#include "precomp.hpp" +#include + +#ifdef _WIN32 +#define TIME( arg ) (((double) clock()) / CLOCKS_PER_SEC) +#else +#define TIME( arg ) (time( arg )) +#endif + +namespace cv +{ + +/* + * TrackerSamplerAlgorithm + */ + +TrackerSamplerAlgorithm::~TrackerSamplerAlgorithm() +{ + +} + +bool TrackerSamplerAlgorithm::sampling( const Mat& image, Rect boundingBox, std::vector& sample ) +{ + if( image.empty() ) + return false; + + return samplingImpl( image, boundingBox, sample ); +} + +Ptr TrackerSamplerAlgorithm::create( const String& trackerSamplerType ) +{ + if( trackerSamplerType.find( "CSC" ) == 0 ) + { + return Ptr( new TrackerSamplerCSC() ); + } + + if( trackerSamplerType.find( "CS" ) == 0 ) + { + return Ptr( new TrackerSamplerCS() ); + } + + CV_Error( -1, "Tracker sampler algorithm type not supported" ); + return Ptr(); +} + +String TrackerSamplerAlgorithm::getClassName() const +{ + return className; +} + +/** + * TrackerSamplerCSC + */ + +/** + * Parameters + */ + +TrackerSamplerCSC::Params::Params() +{ + initInRad = 3; + initMaxNegNum = 65; + searchWinSize = 25; + trackInPosRad = 4; + trackMaxNegNum = 65; + trackMaxPosNum = 100000; + +} + +TrackerSamplerCSC::TrackerSamplerCSC( const TrackerSamplerCSC::Params ¶meters ) : + params( parameters ) +{ + className = "CSC"; + mode = MODE_INIT_POS; + rng = RNG( uint64( TIME( 0 ) ) ); + +} + +TrackerSamplerCSC::~TrackerSamplerCSC() +{ + +} + +bool TrackerSamplerCSC::samplingImpl( const Mat& image, Rect boundingBox, std::vector& sample ) +{ + float inrad = 0; + float outrad = 0; + int maxnum = 0; + + switch ( mode ) + { + case MODE_INIT_POS: + inrad = params.initInRad; + sample = sampleImage( image, boundingBox.x, boundingBox.y, boundingBox.width, boundingBox.height, inrad ); + break; + case MODE_INIT_NEG: + inrad = 2.0f * params.searchWinSize; + outrad = 1.5f * params.initInRad; + maxnum = params.initMaxNegNum; + sample = sampleImage( image, boundingBox.x, boundingBox.y, boundingBox.width, boundingBox.height, inrad, outrad, maxnum ); + break; + case MODE_TRACK_POS: + inrad = params.trackInPosRad; + outrad = 0; + maxnum = params.trackMaxPosNum; + sample = sampleImage( image, boundingBox.x, boundingBox.y, boundingBox.width, boundingBox.height, inrad, outrad, maxnum ); + break; + case MODE_TRACK_NEG: + inrad = 1.5f * params.searchWinSize; + outrad = params.trackInPosRad + 5; + maxnum = params.trackMaxNegNum; + sample = sampleImage( image, boundingBox.x, boundingBox.y, boundingBox.width, boundingBox.height, inrad, outrad, maxnum ); + break; + case MODE_DETECT: + inrad = params.searchWinSize; + sample = sampleImage( image, boundingBox.x, boundingBox.y, boundingBox.width, boundingBox.height, inrad ); + break; + default: + inrad = params.initInRad; + sample = sampleImage( image, boundingBox.x, boundingBox.y, boundingBox.width, boundingBox.height, inrad ); + break; + } + return false; +} + +void TrackerSamplerCSC::setMode( int samplingMode ) +{ + mode = samplingMode; +} + +std::vector TrackerSamplerCSC::sampleImage( const Mat& img, int x, int y, int w, int h, float inrad, float outrad, int maxnum ) +{ + int rowsz = img.rows - h - 1; + int colsz = img.cols - w - 1; + float inradsq = inrad * inrad; + float outradsq = outrad * outrad; + int dist; + + uint minrow = max( 0, (int) y - (int) inrad ); + uint maxrow = min( (int) rowsz - 1, (int) y + (int) inrad ); + uint mincol = max( 0, (int) x - (int) inrad ); + uint maxcol = min( (int) colsz - 1, (int) x + (int) inrad ); + + //fprintf(stderr,"inrad=%f minrow=%d maxrow=%d mincol=%d maxcol=%d\n",inrad,minrow,maxrow,mincol,maxcol); + + std::vector samples; + samples.resize( ( maxrow - minrow + 1 ) * ( maxcol - mincol + 1 ) ); + int i = 0; + + float prob = ( (float) ( maxnum ) ) / samples.size(); + + for ( int r = minrow; r <= int( maxrow ); r++ ) + for ( int c = mincol; c <= int( maxcol ); c++ ) + { + dist = ( y - r ) * ( y - r ) + ( x - c ) * ( x - c ); + if( float( rng.uniform( 0.f, 1.f ) ) < prob && dist < inradsq && dist >= outradsq ) + { + samples[i] = img( Rect( c, r, w, h ) ); + i++; + } + } + + samples.resize( min( i, maxnum ) ); + return samples; +} +; + +/** + * TrackerSamplerCS + */ +TrackerSamplerCS::Params::Params() +{ + overlap = 0.99f; + searchFactor = 2; +} + +TrackerSamplerCS::TrackerSamplerCS( const TrackerSamplerCS::Params ¶meters ) : + params( parameters ) +{ + className = "CS"; + mode = MODE_POSITIVE; +} + +void TrackerSamplerCS::setMode( int samplingMode ) +{ + mode = samplingMode; +} + +TrackerSamplerCS::~TrackerSamplerCS() +{ + +} + +bool TrackerSamplerCS::samplingImpl( const Mat& image, Rect boundingBox, std::vector& sample ) +{ + + trackedPatch = boundingBox; + Size imageSize( image.cols, image.rows ); + validROI = Rect( 0, 0, imageSize.width, imageSize.height ); + + Size trackedPatchSize( trackedPatch.width, trackedPatch.height ); + Rect trackingROI = getTrackingROI( params.searchFactor ); + + sample = patchesRegularScan( image, trackingROI, trackedPatchSize ); + + return true; +} + +Rect TrackerSamplerCS::getTrackingROI( float searchFactor ) +{ + Rect searchRegion; + + searchRegion = RectMultiply( trackedPatch, searchFactor ); + //check + if( searchRegion.y + searchRegion.height > validROI.height ) + searchRegion.height = validROI.height - searchRegion.y; + if( searchRegion.x + searchRegion.width > validROI.width ) + searchRegion.width = validROI.width - searchRegion.x; + + return searchRegion; +} + +Rect TrackerSamplerCS::RectMultiply( const Rect & rect, float f ) +{ + cv::Rect r_tmp; + r_tmp.y = (int) ( rect.y - ( (float) rect.height * f - rect.height ) / 2 ); + if( r_tmp.y < 0 ) + r_tmp.y = 0; + r_tmp.x = (int) ( rect.x - ( (float) rect.width * f - rect.width ) / 2 ); + if( r_tmp.x < 0 ) + r_tmp.x = 0; + r_tmp.height = (int) ( rect.height * f ); + r_tmp.width = (int) ( rect.width * f ); + + return r_tmp; +} + +Rect TrackerSamplerCS::getROI() const +{ + return ROI; +} + +void TrackerSamplerCS::setCheckedROI( Rect imageROI ) +{ + int dCol, dRow; + dCol = imageROI.x - validROI.x; + dRow = imageROI.y - validROI.y; + ROI.y = ( dRow < 0 ) ? validROI.y : imageROI.y; + ROI.x = ( dCol < 0 ) ? validROI.x : imageROI.x; + dCol = imageROI.x + imageROI.width - ( validROI.x + validROI.width ); + dRow = imageROI.y + imageROI.height - ( validROI.y + validROI.height ); + ROI.height = ( dRow > 0 ) ? validROI.height + validROI.y - ROI.y : imageROI.height + imageROI.y - ROI.y; + ROI.width = ( dCol > 0 ) ? validROI.width + validROI.x - ROI.x : imageROI.width + imageROI.x - ROI.x; +} + +std::vector TrackerSamplerCS::patchesRegularScan( const Mat& image, Rect trackingROI, Size patchSize ) +{ + std::vector sample; + if( ( validROI == trackingROI ) ) + ROI = trackingROI; + else + setCheckedROI( trackingROI ); + + if( mode == MODE_POSITIVE ) + { + int num = 4; + sample.resize( num ); + Mat singleSample = image( trackedPatch ); + for ( int i = 0; i < num; i++ ) + sample[i] = singleSample; + return sample; + } + + int stepCol = (int) floor( ( 1.0f - params.overlap ) * (float) patchSize.width + 0.5f ); + int stepRow = (int) floor( ( 1.0f - params.overlap ) * (float) patchSize.height + 0.5f ); + if( stepCol <= 0 ) + stepCol = 1; + if( stepRow <= 0 ) + stepRow = 1; + + Size m_patchGrid; + Rect m_rectUpperLeft; + Rect m_rectUpperRight; + Rect m_rectLowerLeft; + Rect m_rectLowerRight; + int num; + + m_patchGrid.height = ( (int) ( (float) ( ROI.height - patchSize.height ) / stepRow ) + 1 ); + m_patchGrid.width = ( (int) ( (float) ( ROI.width - patchSize.width ) / stepCol ) + 1 ); + + num = m_patchGrid.width * m_patchGrid.height; + sample.resize( num ); + int curPatch = 0; + + m_rectUpperLeft = m_rectUpperRight = m_rectLowerLeft = m_rectLowerRight = cv::Rect( 0, 0, patchSize.width, patchSize.height ); + m_rectUpperLeft.y = ROI.y; + m_rectUpperLeft.x = ROI.x; + m_rectUpperRight.y = ROI.y; + m_rectUpperRight.x = ROI.x + ROI.width - patchSize.width; + m_rectLowerLeft.y = ROI.y + ROI.height - patchSize.height; + m_rectLowerLeft.x = ROI.x; + m_rectLowerRight.y = ROI.y + ROI.height - patchSize.height; + m_rectLowerRight.x = ROI.x + ROI.width - patchSize.width; + + if( mode == MODE_NEGATIVE ) + { + int numSamples = 4; + sample.resize( numSamples ); + sample[0] = image( m_rectUpperLeft ); + sample[1] = image( m_rectUpperRight ); + sample[2] = image( m_rectLowerLeft ); + sample[3] = image( m_rectLowerRight ); + return sample; + } + + int numPatchesX; + int numPatchesY; + + numPatchesX = 0; + numPatchesY = 0; + for ( int curRow = 0; curRow < ROI.height - patchSize.height + 1; curRow += stepRow ) + { + numPatchesY++; + + for ( int curCol = 0; curCol < ROI.width - patchSize.width + 1; curCol += stepCol ) + { + if( curRow == 0 ) + numPatchesX++; + + Mat singleSample = image( Rect( curCol + ROI.x, curRow + ROI.y, patchSize.width, patchSize.height ) ); + sample[curPatch] = singleSample; + curPatch++; + } + } + + CV_Assert( curPatch == num ); + + return sample; +} + +} /* namespace cv */ diff --git a/modules/tracking/src/trackerStateEstimator.cpp b/modules/tracking/src/trackerStateEstimator.cpp new file mode 100644 index 000000000..3d1f505b9 --- /dev/null +++ b/modules/tracking/src/trackerStateEstimator.cpp @@ -0,0 +1,445 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#include "precomp.hpp" + +namespace cv +{ + +/* + * TrackerStateEstimator + */ + +TrackerStateEstimator::~TrackerStateEstimator() +{ + +} + +Ptr TrackerStateEstimator::estimate( const std::vector& confidenceMaps ) +{ + if( confidenceMaps.empty() ) + return Ptr(); + + return estimateImpl( confidenceMaps ); + +} + +void TrackerStateEstimator::update( std::vector& confidenceMaps ) +{ + if( confidenceMaps.empty() ) + return; + + return updateImpl( confidenceMaps ); + +} + +Ptr TrackerStateEstimator::create( const String& trackeStateEstimatorType ) +{ + + if( trackeStateEstimatorType.find( "SVM" ) == 0 ) + { + return Ptr( new TrackerStateEstimatorSVM() ); + } + + if( trackeStateEstimatorType.find( "BOOSTING" ) == 0 ) + { + return Ptr( new TrackerStateEstimatorMILBoosting() ); + } + + CV_Error( -1, "Tracker state estimator type not supported" ); + return Ptr(); +} + +String TrackerStateEstimator::getClassName() const +{ + return className; +} + +/** + * TrackerStateEstimatorMILBoosting::TrackerMILTargetState + */ +TrackerStateEstimatorMILBoosting::TrackerMILTargetState::TrackerMILTargetState( const Point2f& position, int width, int height, bool foreground, + const Mat& features ) +{ + setTargetPosition( position ); + setTargetWidth( width ); + setTargetHeight( height ); + setTargetFg( foreground ); + setFeatures( features ); +} + +void TrackerStateEstimatorMILBoosting::TrackerMILTargetState::setTargetFg( bool foreground ) +{ + isTarget = foreground; +} + +void TrackerStateEstimatorMILBoosting::TrackerMILTargetState::setFeatures( const Mat& features ) +{ + targetFeatures = features; +} + +bool TrackerStateEstimatorMILBoosting::TrackerMILTargetState::isTargetFg() const +{ + return isTarget; +} + +Mat TrackerStateEstimatorMILBoosting::TrackerMILTargetState::getFeatures() const +{ + return targetFeatures; +} + +TrackerStateEstimatorMILBoosting::TrackerStateEstimatorMILBoosting( int nFeatures ) +{ + className = "BOOSTING"; + trained = false; + numFeatures = nFeatures; +} + +TrackerStateEstimatorMILBoosting::~TrackerStateEstimatorMILBoosting() +{ + +} + +void TrackerStateEstimatorMILBoosting::setCurrentConfidenceMap( ConfidenceMap& confidenceMap ) +{ + currentConfidenceMap.clear(); + currentConfidenceMap = confidenceMap; +} + +uint TrackerStateEstimatorMILBoosting::max_idx( const std::vector &v ) +{ + const float* findPtr = & ( *std::max_element( v.begin(), v.end() ) ); + const float* beginPtr = & ( *v.begin() ); + return (uint) ( findPtr - beginPtr ); +} + +Ptr TrackerStateEstimatorMILBoosting::estimateImpl( const std::vector& /*confidenceMaps*/) +{ + //run ClfMilBoost classify in order to compute next location + if( currentConfidenceMap.empty() ) + return Ptr(); + + Mat positiveStates; + Mat negativeStates; + + prepareData( currentConfidenceMap, positiveStates, negativeStates ); + + std::vector prob = boostMILModel.classify( positiveStates ); + + int bestind = max_idx( prob ); + //float resp = prob[bestind]; + + return currentConfidenceMap.at( bestind ).first; +} + +void TrackerStateEstimatorMILBoosting::prepareData( const ConfidenceMap& confidenceMap, Mat& positive, Mat& negative ) +{ + + int posCounter = 0; + int negCounter = 0; + + for ( size_t i = 0; i < confidenceMap.size(); i++ ) + { + Ptr currentTargetState = confidenceMap.at( i ).first.staticCast(); + if( currentTargetState->isTargetFg() ) + posCounter++; + else + negCounter++; + } + + positive.create( posCounter, numFeatures, CV_32FC1 ); + negative.create( negCounter, numFeatures, CV_32FC1 ); + + //TODO change with mat fast access + //initialize trainData (positive and negative) + + int pc = 0; + int nc = 0; + for ( size_t i = 0; i < confidenceMap.size(); i++ ) + { + Ptr currentTargetState = confidenceMap.at( i ).first.staticCast(); + Mat stateFeatures = currentTargetState->getFeatures(); + + if( currentTargetState->isTargetFg() ) + { + for ( int j = 0; j < stateFeatures.rows; j++ ) + { + //fill the positive trainData with the value of the feature j for sample i + positive.at( pc, j ) = stateFeatures.at( j, 0 ); + } + pc++; + } + else + { + for ( int j = 0; j < stateFeatures.rows; j++ ) + { + //fill the negative trainData with the value of the feature j for sample i + negative.at( nc, j ) = stateFeatures.at( j, 0 ); + } + nc++; + } + + } +} + +void TrackerStateEstimatorMILBoosting::updateImpl( std::vector& confidenceMaps ) +{ + + if( !trained ) + { + //this is the first time that the classifier is built + //init MIL + boostMILModel.init(); + trained = true; + } + + ConfidenceMap lastConfidenceMap = confidenceMaps.back(); + Mat positiveStates; + Mat negativeStates; + + prepareData( lastConfidenceMap, positiveStates, negativeStates ); + //update MIL + boostMILModel.update( positiveStates, negativeStates ); + +} + +/** + * TrackerStateEstimatorAdaBoosting + */ +TrackerStateEstimatorAdaBoosting::TrackerStateEstimatorAdaBoosting( int numClassifer, int initIterations, int nFeatures, Size patchSize, const Rect& ROI ) +{ + className = "ADABOOSTING"; + numBaseClassifier = numClassifer; + numFeatures = nFeatures; + iterationInit = initIterations; + initPatchSize = patchSize; + trained = false; + sampleROI = ROI; + +} + +Rect TrackerStateEstimatorAdaBoosting::getSampleROI() const +{ + return sampleROI; +} + +void TrackerStateEstimatorAdaBoosting::setSampleROI( const Rect& ROI ) +{ + sampleROI = ROI; +} + +/** + * TrackerAdaBoostingTargetState::TrackerAdaBoostingTargetState + */ +TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::TrackerAdaBoostingTargetState( const Point2f& position, int width, int height, + bool foreground, const Mat& responses ) +{ + setTargetPosition( position ); + setTargetWidth( width ); + setTargetHeight( height ); + + setTargetFg( foreground ); + setTargetResponses( responses ); +} + +void TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::setTargetFg( bool foreground ) +{ + isTarget = foreground; +} + +bool TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::isTargetFg() const +{ + return isTarget; +} + +void TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::setTargetResponses( const Mat& responses ) +{ + targetResponses = responses; +} + +Mat TrackerStateEstimatorAdaBoosting::TrackerAdaBoostingTargetState::getTargetResponses() const +{ + return targetResponses; +} + +TrackerStateEstimatorAdaBoosting::~TrackerStateEstimatorAdaBoosting() +{ + +} +void TrackerStateEstimatorAdaBoosting::setCurrentConfidenceMap( ConfidenceMap& confidenceMap ) +{ + currentConfidenceMap.clear(); + currentConfidenceMap = confidenceMap; +} + +std::vector TrackerStateEstimatorAdaBoosting::computeReplacedClassifier() +{ + return replacedClassifier; +} + +std::vector TrackerStateEstimatorAdaBoosting::computeSwappedClassifier() +{ + return swappedClassifier; +} + +std::vector TrackerStateEstimatorAdaBoosting::computeSelectedWeakClassifier() +{ + return boostClassifier->getSelectedWeakClassifier(); +} + +Ptr TrackerStateEstimatorAdaBoosting::estimateImpl( const std::vector& /*confidenceMaps*/ ) +{ + //run classify in order to compute next location + if( currentConfidenceMap.empty() ) + return Ptr(); + + std::vector images; + + for ( size_t i = 0; i < currentConfidenceMap.size(); i++ ) + { + Ptr currentTargetState = currentConfidenceMap.at( i ).first.staticCast(); + images.push_back( currentTargetState->getTargetResponses() ); + } + + int bestIndex; + boostClassifier->classifySmooth( images, sampleROI, bestIndex ); + + // get bestIndex from classifySmooth + return currentConfidenceMap.at( bestIndex ).first; + +} + +void TrackerStateEstimatorAdaBoosting::updateImpl( std::vector& confidenceMaps ) +{ + if( !trained ) + { + //this is the first time that the classifier is built + int numWeakClassifier = numBaseClassifier * 10; + + bool useFeatureExchange = true; + boostClassifier = Ptr( + new StrongClassifierDirectSelection( numBaseClassifier, numWeakClassifier, initPatchSize, sampleROI, useFeatureExchange, iterationInit ) ); + //init base classifiers + boostClassifier->initBaseClassifier(); + + trained = true; + } + + ConfidenceMap lastConfidenceMap = confidenceMaps.back(); + bool featureEx = boostClassifier->getUseFeatureExchange(); + + replacedClassifier.clear(); + replacedClassifier.resize( lastConfidenceMap.size(), -1 ); + swappedClassifier.clear(); + swappedClassifier.resize( lastConfidenceMap.size(), -1 ); + + for ( size_t i = 0; i < lastConfidenceMap.size() / 2; i++ ) + { + Ptr currentTargetState = lastConfidenceMap.at( i ).first.staticCast(); + + int currentFg = 1; + if( !currentTargetState->isTargetFg() ) + currentFg = -1; + Mat res = currentTargetState->getTargetResponses(); + + boostClassifier->update( res, currentFg ); + if( featureEx ) + { + replacedClassifier[i] = boostClassifier->getReplacedClassifier(); + swappedClassifier[i] = boostClassifier->getSwappedClassifier(); + if( replacedClassifier[i] >= 0 && swappedClassifier[i] >= 0 ) + boostClassifier->replaceWeakClassifier( replacedClassifier[i] ); + } + else + { + replacedClassifier[i] = -1; + swappedClassifier[i] = -1; + } + + int mapPosition = i + lastConfidenceMap.size() / 2; + Ptr currentTargetState2 = lastConfidenceMap.at( mapPosition ).first.staticCast(); + + currentFg = 1; + if( !currentTargetState2->isTargetFg() ) + currentFg = -1; + const Mat res2 = currentTargetState2->getTargetResponses(); + + boostClassifier->update( res2, currentFg ); + if( featureEx ) + { + replacedClassifier[mapPosition] = boostClassifier->getReplacedClassifier(); + swappedClassifier[mapPosition] = boostClassifier->getSwappedClassifier(); + if( replacedClassifier[mapPosition] >= 0 && swappedClassifier[mapPosition] >= 0 ) + boostClassifier->replaceWeakClassifier( replacedClassifier[mapPosition] ); + } + else + { + replacedClassifier[mapPosition] = -1; + swappedClassifier[mapPosition] = -1; + } + } + +} + +/** + * TrackerStateEstimatorSVM + */ +TrackerStateEstimatorSVM::TrackerStateEstimatorSVM() +{ + className = "SVM"; +} + +TrackerStateEstimatorSVM::~TrackerStateEstimatorSVM() +{ + +} + +Ptr TrackerStateEstimatorSVM::estimateImpl( const std::vector& confidenceMaps ) +{ + return confidenceMaps.back().back().first; +} + +void TrackerStateEstimatorSVM::updateImpl( std::vector& /*confidenceMaps*/) +{ + +} + +} /* namespace cv */ diff --git a/modules/tracking/src/tracking_init.cpp b/modules/tracking/src/tracking_init.cpp new file mode 100644 index 000000000..f203058f3 --- /dev/null +++ b/modules/tracking/src/tracking_init.cpp @@ -0,0 +1,61 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#include "precomp.hpp" +#include "opencv2/tracking.hpp" + +namespace cv +{ + +CV_INIT_ALGORITHM(TrackerMIL, "TRACKER.MIL",); + +CV_INIT_ALGORITHM(TrackerBoosting, "TRACKER.BOOSTING",); + +bool initModule_tracking(void) +{ + bool all = true; + all &= !TrackerMIL_info_auto.name().empty(); + all &= !TrackerBoosting_info_auto.name().empty(); + + return all; +} + +} diff --git a/modules/tracking/test/test_main.cpp b/modules/tracking/test/test_main.cpp new file mode 100644 index 000000000..6b2499344 --- /dev/null +++ b/modules/tracking/test/test_main.cpp @@ -0,0 +1,3 @@ +#include "test_precomp.hpp" + +CV_TEST_MAIN("cv") diff --git a/modules/tracking/test/test_precomp.hpp b/modules/tracking/test/test_precomp.hpp new file mode 100644 index 000000000..b83795189 --- /dev/null +++ b/modules/tracking/test/test_precomp.hpp @@ -0,0 +1,18 @@ +#ifdef __GNUC__ +# pragma GCC diagnostic ignored "-Wmissing-declarations" +# if defined __clang__ || defined __APPLE__ +# pragma GCC diagnostic ignored "-Wmissing-prototypes" +# pragma GCC diagnostic ignored "-Wextra" +# endif +#endif + +#ifndef __OPENCV_TEST_PRECOMP_HPP__ +#define __OPENCV_TEST_PRECOMP_HPP__ + +#include +#include "opencv2/ts.hpp" +#include "opencv2/imgproc.hpp" +#include "opencv2/tracking.hpp" +#include "opencv2/highgui.hpp" + +#endif diff --git a/modules/tracking/test/test_tracker.cpp b/modules/tracking/test/test_tracker.cpp new file mode 100644 index 000000000..900772b92 --- /dev/null +++ b/modules/tracking/test/test_tracker.cpp @@ -0,0 +1,130 @@ +/*M/////////////////////////////////////////////////////////////////////////////////////// + // + // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. + // + // By downloading, copying, installing or using the software you agree to this license. + // If you do not agree to this license, do not download, install, + // copy or use the software. + // + // + // License Agreement + // For Open Source Computer Vision Library + // + // Copyright (C) 2013, OpenCV Foundation, all rights reserved. + // Third party copyrights are property of their respective owners. + // + // Redistribution and use in source and binary forms, with or without modification, + // are permitted provided that the following conditions are met: + // + // * Redistribution's of source code must retain the above copyright notice, + // this list of conditions and the following disclaimer. + // + // * Redistribution's in binary form must reproduce the above copyright notice, + // this list of conditions and the following disclaimer in the documentation + // and/or other materials provided with the distribution. + // + // * The name of the copyright holders may not be used to endorse or promote products + // derived from this software without specific prior written permission. + // + // This software is provided by the copyright holders and contributors "as is" and + // any express or implied warranties, including, but not limited to, the implied + // warranties of merchantability and fitness for a particular purpose are disclaimed. + // In no event shall the Intel Corporation or contributors be liable for any direct, + // indirect, incidental, special, exemplary, or consequential damages + // (including, but not limited to, procurement of substitute goods or services; + // loss of use, data, or profits; or business interruption) however caused + // and on any theory of liability, whether in contract, strict liability, + // or tort (including negligence or otherwise) arising in any way out of + // the use of this software, even if advised of the possibility of such damage. + // + //M*/ + +#include "test_precomp.hpp" +#include "opencv2/tracking.hpp" + +using namespace cv; +using namespace std; + +class CV_TrackerBaseTest : public cvtest::BaseTest +{ + public: + CV_TrackerBaseTest(); + virtual ~CV_TrackerBaseTest(); + +}; + +CV_TrackerBaseTest::CV_TrackerBaseTest() +{ + +} + +CV_TrackerBaseTest::~CV_TrackerBaseTest() +{ + +} + +/************************************ TrackerMIL ************************************/ + +class CV_TrackerMILTest : public CV_TrackerBaseTest +{ + public: + CV_TrackerMILTest(); + ~CV_TrackerMILTest(); + + protected: + void run( int ); +}; + +CV_TrackerMILTest::CV_TrackerMILTest() +{ +} + +CV_TrackerMILTest::~CV_TrackerMILTest() +{ +} + +void CV_TrackerMILTest::run( int ) +{ + ts->set_failed_test_info( cvtest::TS::FAIL_GENERIC ); + ts->printf( cvtest::TS::LOG, "CV_TrackerMILTest to be implemented" ); +} + +TEST(DISABLED_Tracking_TrackerMIL, accuracy) +{ + CV_TrackerMILTest test; + test.safe_run(); +} + +/************************************ TrackerBoosting ************************************/ + +class CV_TrackerBoostingTest : public CV_TrackerBaseTest +{ + public: + CV_TrackerBoostingTest(); + ~CV_TrackerBoostingTest(); + + protected: + void run( int ); +}; + +CV_TrackerBoostingTest::CV_TrackerBoostingTest() +{ +} + +CV_TrackerBoostingTest::~CV_TrackerBoostingTest() +{ +} + +void CV_TrackerBoostingTest::run( int ) +{ + ts->set_failed_test_info( cvtest::TS::FAIL_GENERIC ); + ts->printf( cvtest::TS::LOG, "CV_TrackerBoostingTest to be implemented" ); +} + +TEST(DISABLED_Tracking_TrackerBoosting, accuracy) +{ + CV_TrackerBoostingTest test; + test.safe_run(); +} + +/* End of file. */