Revert opencv_videostab to the state of 2.4.2

pull/51/head
Andrey Kamaev 12 years ago
parent e5437e5486
commit 7225f89ea2
  1. 2
      CMakeLists.txt
  2. 39
      cmake/OpenCVFindLibsPerf.cmake
  3. 2
      modules/videostab/CMakeLists.txt
  4. 56
      modules/videostab/doc/fast_marching.rst
  5. 301
      modules/videostab/doc/global_motion.rst
  6. 13
      modules/videostab/doc/introduction.rst
  7. 10
      modules/videostab/doc/videostab.rst
  8. 5
      modules/videostab/include/opencv2/videostab/deblurring.hpp
  9. 11
      modules/videostab/include/opencv2/videostab/frame_source.hpp
  10. 194
      modules/videostab/include/opencv2/videostab/global_motion.hpp
  11. 25
      modules/videostab/include/opencv2/videostab/inpainting.hpp
  12. 108
      modules/videostab/include/opencv2/videostab/motion_core.hpp
  13. 99
      modules/videostab/include/opencv2/videostab/motion_stabilizing.hpp
  14. 33
      modules/videostab/include/opencv2/videostab/optical_flow.hpp
  15. 96
      modules/videostab/include/opencv2/videostab/outlier_rejection.hpp
  16. 67
      modules/videostab/include/opencv2/videostab/ring_buffer.hpp
  17. 58
      modules/videostab/include/opencv2/videostab/stabilizer.hpp
  18. 7
      modules/videostab/include/opencv2/videostab/videostab.hpp
  19. 139
      modules/videostab/include/opencv2/videostab/wobble_suppression.hpp
  20. 76
      modules/videostab/src/clp.hpp
  21. 1
      modules/videostab/src/deblurring.cpp
  22. 1
      modules/videostab/src/fast_marching.cpp
  23. 78
      modules/videostab/src/frame_source.cpp
  24. 617
      modules/videostab/src/global_motion.cpp
  25. 62
      modules/videostab/src/inpainting.cpp
  26. 527
      modules/videostab/src/motion_stabilizing.cpp
  27. 52
      modules/videostab/src/optical_flow.cpp
  28. 201
      modules/videostab/src/outlier_rejection.cpp
  29. 27
      modules/videostab/src/precomp.hpp
  30. 343
      modules/videostab/src/stabilizer.cpp
  31. 156
      modules/videostab/src/wobble_suppression.cpp
  32. 637
      samples/cpp/videostab.cpp

@ -140,7 +140,6 @@ OCV_OPTION(WITH_V4L "Include Video 4 Linux support" ON
OCV_OPTION(WITH_VIDEOINPUT "Build HighGUI with DirectShow support" ON IF WIN32 )
OCV_OPTION(WITH_XIMEA "Include XIMEA cameras support" OFF IF (NOT ANDROID AND NOT APPLE) )
OCV_OPTION(WITH_XINE "Include Xine support (GPL)" OFF IF (UNIX AND NOT APPLE AND NOT ANDROID) )
OCV_OPTION(WITH_CLP "Include Clp support (EPL)" OFF)
OCV_OPTION(WITH_OPENCL "Include OpenCL Runtime support" OFF IF (NOT ANDROID AND NOT IOS) )
OCV_OPTION(WITH_OPENCLAMDFFT "Include AMD OpenCL FFT library support" OFF IF (NOT ANDROID AND NOT IOS) )
OCV_OPTION(WITH_OPENCLAMDBLAS "Include AMD OpenCL BLAS library support" OFF IF (NOT ANDROID AND NOT IOS) )
@ -763,7 +762,6 @@ endif(DEFINED WITH_CUDA)
status(" Use OpenCL:" HAVE_OPENCL THEN YES ELSE NO)
status(" Use Eigen:" HAVE_EIGEN THEN "YES (ver ${EIGEN_WORLD_VERSION}.${EIGEN_MAJOR_VERSION}.${EIGEN_MINOR_VERSION})" ELSE NO)
status(" Use Clp:" HAVE_CLP THEN YES ELSE NO)
if(HAVE_CUDA)
status("")

@ -43,42 +43,3 @@ if(WITH_EIGEN)
set(HAVE_EIGEN 1)
endif()
endif(WITH_EIGEN)
# --- Clp ---
# Ubuntu: sudo apt-get install coinor-libclp-dev coinor-libcoinutils-dev
ocv_clear_vars(HAVE_CLP)
if(WITH_CLP)
if(UNIX)
PKG_CHECK_MODULES(CLP clp)
if(CLP_FOUND)
set(HAVE_CLP TRUE)
if(NOT ${CLP_INCLUDE_DIRS} STREQUAL "")
ocv_include_directories(${CLP_INCLUDE_DIRS})
endif()
link_directories(${CLP_LIBRARY_DIRS})
set(OPENCV_LINKER_LIBS ${OPENCV_LINKER_LIBS} ${CLP_LIBRARIES})
endif()
endif()
if(NOT CLP_FOUND)
find_path(CLP_INCLUDE_PATH "coin"
PATHS "/usr/local/include" "/usr/include" "/opt/include"
DOC "The path to Clp headers")
if(CLP_INCLUDE_PATH)
ocv_include_directories(${CLP_INCLUDE_PATH} "${CLP_INCLUDE_PATH}/coin")
get_filename_component(_CLP_LIBRARY_DIR "${CLP_INCLUDE_PATH}/../lib" ABSOLUTE)
set(CLP_LIBRARY_DIR "${_CLP_LIBRARY_DIR}" CACHE PATH "Full path of Clp library directory")
link_directories(${CLP_LIBRARY_DIR})
if(UNIX)
set(OPENCV_LINKER_LIBS ${OPENCV_LINKER_LIBS} Clp CoinUtils m)
else()
if(MINGW)
set(OPENCV_LINKER_LIBS ${OPENCV_LINKER_LIBS} Clp CoinUtils)
else()
set(OPENCV_LINKER_LIBS ${OPENCV_LINKER_LIBS} libClp libCoinUtils)
endif()
endif()
set(HAVE_CLP TRUE)
endif()
endif()
endif(WITH_CLP)

@ -1,3 +1,3 @@
set(the_description "Video stabilization")
ocv_define_module(videostab opencv_imgproc opencv_features2d opencv_video opencv_photo opencv_calib3d OPTIONAL opencv_gpu opencv_highgui)
ocv_define_module(videostab opencv_imgproc opencv_features2d opencv_video opencv_photo opencv_calib3d opencv_highgui OPTIONAL opencv_gpu)

@ -1,56 +0,0 @@
Fast Marching Method
====================
.. highlight:: cpp
The Fast Marching Method [T04]_ is used in of the video stabilization routines to do motion and color inpainting. The method is implemented is a flexible way and it's made public for other users.
videostab::FastMarchingMethod
-----------------------------
.. ocv:class:: videostab::FastMarchingMethod
Describes the Fast Marching Method implementation.
::
class CV_EXPORTS FastMarchingMethod
{
public:
FastMarchingMethod();
template <typename Inpaint>
Inpaint run(const Mat &mask, Inpaint inpaint);
Mat distanceMap() const;
};
videostab::FastMarchingMethod::FastMarchingMethod
-------------------------------------------------
Constructor.
.. ocv:function:: videostab::FastMarchingMethod::FastMarchingMethod()
videostab::FastMarchingMethod::run
----------------------------------
Template method that runs the Fast Marching Method.
.. ocv:function:: Inpaint FastMarchingMethod::run(const Mat &mask, Inpaint inpaint)
:param mask: Image mask. ``0`` value indicates that the pixel value must be inpainted, ``255`` indicates that the pixel value is known, other values aren't acceptable.
:param inpaint: Inpainting functor that overloads ``void operator ()(int x, int y)``.
:return: Inpainting functor.
videostab::FastMarchingMethod::distanceMap
------------------------------------------
.. ocv:function:: Mat videostab::FastMarchingMethod::distanceMap() const
:return: Distance map that's created during working of the method.

@ -1,301 +0,0 @@
Global Motion Estimation
========================
.. highlight:: cpp
The video stabilization module contains a set of functions and classes for global motion estimation between point clouds or between images. In the last case features are extracted and matched internally. For the sake of convenience the motion estimation functions are wrapped into classes. Both the functions and the classes are available.
videostab::MotionModel
----------------------
.. ocv:class:: videostab::MotionModel
Describes motion model between two point clouds.
::
enum MotionModel
{
MM_TRANSLATION = 0,
MM_TRANSLATION_AND_SCALE = 1,
MM_ROTATION = 2,
MM_RIGID = 3,
MM_SIMILARITY = 4,
MM_AFFINE = 5,
MM_HOMOGRAPHY = 6,
MM_UNKNOWN = 7
};
videostab::RansacParams
-----------------------
.. ocv:class:: videostab::RansacParams
Describes RANSAC method parameters.
::
struct CV_EXPORTS RansacParams
{
int size; // subset size
float thresh; // max error to classify as inlier
float eps; // max outliers ratio
float prob; // probability of success
RansacParams() : size(0), thresh(0), eps(0), prob(0) {}
RansacParams(int size, float thresh, float eps, float prob);
int niters() const;
static RansacParams default2dMotion(MotionModel model);
};
videostab::RansacParams::RansacParams
-------------------------------------
.. ocv:function:: RansacParams::RansacParams()
:return: RANSAC method empty parameters object.
videostab::RansacParams::RansacParams
-------------------------------------
.. ocv:function:: RansacParams::RansacParams(int size, float thresh, float eps, float prob)
:param size: Subset size.
:param thresh: Maximum re-projection error value to classify as inlier.
:param eps: Maximum ratio of incorrect correspondences.
:param prob: Required success probability.
:return: RANSAC method parameters object.
videostab::RansacParams::niters
-------------------------------
.. ocv:function:: int RansacParams::niters() const
:return: Number of iterations that'll be performed by RANSAC method.
videostab::RansacParams::default2dMotion
----------------------------------------
.. ocv:function:: static RansacParams RansacParams::default2dMotion(MotionModel model)
:param model: Motion model. See :ocv:class:`videostab::MotionModel`.
:return: Default RANSAC method parameters for the given motion model.
videostab::estimateGlobalMotionLeastSquares
-------------------------------------------
Estimates best global motion between two 2D point clouds in the least-squares sense.
.. note:: Works in-place and changes input point arrays.
.. ocv:function:: Mat estimateGlobalMotionLeastSquares(InputOutputArray points0, InputOutputArray points1, int model = MM_AFFINE, float *rmse = 0)
:param points0: Source set of 2D points (``32F``).
:param points1: Destination set of 2D points (``32F``).
:param model: Motion model (up to ``MM_AFFINE``).
:param rmse: Final root-mean-square error.
:return: 3x3 2D transformation matrix (``32F``).
videostab::estimateGlobalMotionRansac
-------------------------------------
Estimates best global motion between two 2D point clouds robustly (using RANSAC method).
.. ocv:function:: Mat estimateGlobalMotionRansac(InputArray points0, InputArray points1, int model = MM_AFFINE, const RansacParams &params = RansacParams::default2dMotion(MM_AFFINE), float *rmse = 0, int *ninliers = 0)
:param points0: Source set of 2D points (``32F``).
:param points1: Destination set of 2D points (``32F``).
:param model: Motion model. See :ocv:class:`videostab::MotionModel`.
:param params: RANSAC method parameters. See :ocv:class:`videostab::RansacParams`.
:param rmse: Final root-mean-square error.
:param ninliers: Final number of inliers.
videostab::getMotion
--------------------
Computes motion between two frames assuming that all the intermediate motions are known.
.. ocv:function:: Mat getMotion(int from, int to, const std::vector<Mat> &motions)
:param from: Source frame index.
:param to: Destination frame index.
:param motions: Pair-wise motions. ``motions[i]`` denotes motion from the frame ``i`` to the frame ``i+1``
:return: Motion from the frame ``from`` to the frame ``to``.
videostab::MotionEstimatorBase
------------------------------
.. ocv:class:: videostab::MotionEstimatorBase
Base class for all global motion estimation methods.
::
class CV_EXPORTS MotionEstimatorBase
{
public:
virtual ~MotionEstimatorBase();
virtual void setMotionModel(MotionModel val);
virtual MotionModel motionModel() const;
virtual Mat estimate(InputArray points0, InputArray points1, bool *ok = 0) = 0;
};
videostab::MotionEstimatorBase::setMotionModel
----------------------------------------------
Sets motion model.
.. ocv:function:: void MotionEstimatorBase::setMotionModel(MotionModel val)
:param val: Motion model. See :ocv:class:`videostab::MotionModel`.
videostab::MotionEstimatorBase::motionModel
----------------------------------------------
.. ocv:function:: MotionModel MotionEstimatorBase::motionModel() const
:return: Motion model. See :ocv:class:`videostab::MotionModel`.
videostab::MotionEstimatorBase::estimate
----------------------------------------
Estimates global motion between two 2D point clouds.
.. ocv:function:: Mat MotionEstimatorBase::estimate(InputArray points0, InputArray points1, bool *ok = 0)
:param points0: Source set of 2D points (``32F``).
:param points1: Destination set of 2D points (``32F``).
:param ok: Indicates whether motion was estimated successfully.
:return: 3x3 2D transformation matrix (``32F``).
videostab::MotionEstimatorRansacL2
----------------------------------
.. ocv:class:: videostab::MotionEstimatorRansacL2
Describes a robust RANSAC-based global 2D motion estimation method which minimizes L2 error.
::
class CV_EXPORTS MotionEstimatorRansacL2 : public MotionEstimatorBase
{
public:
MotionEstimatorRansacL2(MotionModel model = MM_AFFINE);
void setRansacParams(const RansacParams &val);
RansacParams ransacParams() const;
void setMinInlierRatio(float val);
float minInlierRatio() const;
virtual Mat estimate(InputArray points0, InputArray points1, bool *ok = 0);
};
videostab::MotionEstimatorL1
----------------------------
.. ocv:class:: videostab::MotionEstimatorL1
Describes a global 2D motion estimation method which minimizes L1 error.
.. note:: To be able to use this method you must build OpenCV with CLP library support.
::
class CV_EXPORTS MotionEstimatorL1 : public MotionEstimatorBase
{
public:
MotionEstimatorL1(MotionModel model = MM_AFFINE);
virtual Mat estimate(InputArray points0, InputArray points1, bool *ok = 0);
};
videostab::ImageMotionEstimatorBase
-----------------------------------
.. ocv:class:: videostab::ImageMotionEstimatorBase
Base class for global 2D motion estimation methods which take frames as input.
::
class CV_EXPORTS ImageMotionEstimatorBase
{
public:
virtual ~ImageMotionEstimatorBase();
virtual void setMotionModel(MotionModel val);
virtual MotionModel motionModel() const;
virtual Mat estimate(const Mat &frame0, const Mat &frame1, bool *ok = 0) = 0;
};
videostab::KeypointBasedMotionEstimator
---------------------------------------
.. ocv:class:: videostab::KeypointBasedMotionEstimator
Describes a global 2D motion estimation method which uses keypoints detection and optical flow for matching.
::
class CV_EXPORTS KeypointBasedMotionEstimator : public ImageMotionEstimatorBase
{
public:
KeypointBasedMotionEstimator(Ptr<MotionEstimatorBase> estimator);
virtual void setMotionModel(MotionModel val);
virtual MotionModel motionModel() const;
void setDetector(Ptr<FeatureDetector> val);
Ptr<FeatureDetector> detector() const;
void setOpticalFlowEstimator(Ptr<ISparseOptFlowEstimator> val);
Ptr<ISparseOptFlowEstimator> opticalFlowEstimator() const;
void setOutlierRejector(Ptr<IOutlierRejector> val);
Ptr<IOutlierRejector> outlierRejector() const;
virtual Mat estimate(const Mat &frame0, const Mat &frame1, bool *ok = 0);
};

@ -1,13 +0,0 @@
Introduction
============
The video stabilization module contains a set of functions and classes that can be used to solve the problem of video stabilization. There are a few methods implemented, most of them are descibed in the papers [OF06]_ and [G11]_. However, there are some extensions and deviations from the orginal paper methods.
References
----------
.. [OF06] Full-Frame Video Stabilization with Motion Inpainting. Matsushita, Y. Ofek, E. Ge, W. Tang, X. Shum, H.-Y., IEEE TRANSACTIONS ON PATTERN ANALYSIS AND MACHINE INTELLIGENCE, 2006
.. [G11] Auto-directed video stabilization with robust L1 optimal camera paths, M. Grundmann, V. Kwatra, I. Essa, Computer Vision and Pattern Recognition (CVPR), 2011
.. [T04] An Image Inpainting Technique Based on the Fast Marching Method, Alexandru Telea, Journal of graphics tools, 2004

@ -1,10 +0,0 @@
******************************
videostab. Video Stabilization
******************************
.. toctree::
:maxdepth: 2
introduction
global_motion
fast_marching

@ -56,15 +56,13 @@ CV_EXPORTS float calcBlurriness(const Mat &frame);
class CV_EXPORTS DeblurerBase
{
public:
DeblurerBase() : radius_(0), frames_(0), motions_(0), blurrinessRates_(0) {}
DeblurerBase() : radius_(0), frames_(0), motions_(0) {}
virtual ~DeblurerBase() {}
virtual void setRadius(int val) { radius_ = val; }
virtual int radius() const { return radius_; }
// data from stabilizer
virtual void setFrames(const std::vector<Mat> &val) { frames_ = &val; }
virtual const std::vector<Mat>& frames() const { return *frames_; }
@ -75,6 +73,7 @@ public:
virtual const std::vector<float>& blurrinessRates() const { return *blurrinessRates_; }
virtual void update() {}
virtual void deblur(int idx, Mat &frame) = 0;
protected:

@ -46,6 +46,7 @@
#include <vector>
#include <string>
#include "opencv2/core/core.hpp"
#include "opencv2/highgui/highgui.hpp"
namespace cv
{
@ -75,13 +76,13 @@ public:
virtual void reset();
virtual Mat nextFrame();
int width();
int height();
int count();
double fps();
int frameCount() { return static_cast<int>(reader_.get(CV_CAP_PROP_FRAME_COUNT)); }
double fps() { return reader_.get(CV_CAP_PROP_FPS); }
private:
Ptr<IFrameSource> impl;
std::string path_;
bool volatileFrame_;
VideoCapture reader_;
};
} // namespace videostab

@ -44,192 +44,94 @@
#define __OPENCV_VIDEOSTAB_GLOBAL_MOTION_HPP__
#include <vector>
#include <string>
#include <fstream>
#include "opencv2/core/core.hpp"
#include "opencv2/features2d/features2d.hpp"
#include "opencv2/opencv_modules.hpp"
#include "opencv2/videostab/optical_flow.hpp"
#include "opencv2/videostab/motion_core.hpp"
#include "opencv2/videostab/outlier_rejection.hpp"
#ifdef HAVE_OPENCV_GPU
#include "opencv2/gpu/gpu.hpp"
#endif
namespace cv
{
namespace videostab
{
CV_EXPORTS Mat estimateGlobalMotionLeastSquares(
InputOutputArray points0, InputOutputArray points1, int model = MM_AFFINE,
float *rmse = 0);
CV_EXPORTS Mat estimateGlobalMotionRansac(
InputArray points0, InputArray points1, int model = MM_AFFINE,
const RansacParams &params = RansacParams::default2dMotion(MM_AFFINE),
float *rmse = 0, int *ninliers = 0);
class CV_EXPORTS MotionEstimatorBase
enum MotionModel
{
public:
virtual ~MotionEstimatorBase() {}
virtual void setMotionModel(MotionModel val) { motionModel_ = val; }
virtual MotionModel motionModel() const { return motionModel_; }
virtual Mat estimate(InputArray points0, InputArray points1, bool *ok = 0) = 0;
protected:
MotionEstimatorBase(MotionModel model) { setMotionModel(model); }
private:
MotionModel motionModel_;
TRANSLATION = 0,
TRANSLATION_AND_SCALE = 1,
LINEAR_SIMILARITY = 2,
AFFINE = 3
};
class CV_EXPORTS MotionEstimatorRansacL2 : public MotionEstimatorBase
{
public:
MotionEstimatorRansacL2(MotionModel model = MM_AFFINE);
void setRansacParams(const RansacParams &val) { ransacParams_ = val; }
RansacParams ransacParams() const { return ransacParams_; }
void setMinInlierRatio(float val) { minInlierRatio_ = val; }
float minInlierRatio() const { return minInlierRatio_; }
virtual Mat estimate(InputArray points0, InputArray points1, bool *ok = 0);
private:
RansacParams ransacParams_;
float minInlierRatio_;
};
CV_EXPORTS Mat estimateGlobalMotionLeastSquares(
const std::vector<Point2f> &points0, const std::vector<Point2f> &points1,
int model = AFFINE, float *rmse = 0);
class CV_EXPORTS MotionEstimatorL1 : public MotionEstimatorBase
struct CV_EXPORTS RansacParams
{
public:
MotionEstimatorL1(MotionModel model = MM_AFFINE);
virtual Mat estimate(InputArray points0, InputArray points1, bool *ok = 0);
private:
std::vector<double> obj_, collb_, colub_;
std::vector<double> elems_, rowlb_, rowub_;
std::vector<int> rows_, cols_;
void set(int row, int col, double coef)
{
rows_.push_back(row);
cols_.push_back(col);
elems_.push_back(coef);
}
int size; // subset size
float thresh; // max error to classify as inlier
float eps; // max outliers ratio
float prob; // probability of success
RansacParams(int _size, float _thresh, float _eps, float _prob)
: size(_size), thresh(_thresh), eps(_eps), prob(_prob) {}
static RansacParams translationMotionStd() { return RansacParams(2, 0.5f, 0.5f, 0.99f); }
static RansacParams translationAndScale2dMotionStd() { return RansacParams(3, 0.5f, 0.5f, 0.99f); }
static RansacParams linearSimilarityMotionStd() { return RansacParams(4, 0.5f, 0.5f, 0.99f); }
static RansacParams affine2dMotionStd() { return RansacParams(6, 0.5f, 0.5f, 0.99f); }
};
class CV_EXPORTS ImageMotionEstimatorBase
{
public:
virtual ~ImageMotionEstimatorBase() {}
virtual void setMotionModel(MotionModel val) { motionModel_ = val; }
virtual MotionModel motionModel() const { return motionModel_; }
virtual Mat estimate(const Mat &frame0, const Mat &frame1, bool *ok = 0) = 0;
protected:
ImageMotionEstimatorBase(MotionModel model) { setMotionModel(model); }
private:
MotionModel motionModel_;
};
CV_EXPORTS Mat estimateGlobalMotionRobust(
const std::vector<Point2f> &points0, const std::vector<Point2f> &points1,
int model = AFFINE, const RansacParams &params = RansacParams::affine2dMotionStd(),
float *rmse = 0, int *ninliers = 0);
class CV_EXPORTS FromFileMotionReader : public ImageMotionEstimatorBase
class CV_EXPORTS IGlobalMotionEstimator
{
public:
FromFileMotionReader(const std::string &path);
virtual Mat estimate(const Mat &frame0, const Mat &frame1, bool *ok = 0);
private:
std::ifstream file_;
virtual ~IGlobalMotionEstimator() {}
virtual Mat estimate(const Mat &frame0, const Mat &frame1) = 0;
};
class CV_EXPORTS ToFileMotionWriter : public ImageMotionEstimatorBase
class CV_EXPORTS PyrLkRobustMotionEstimator : public IGlobalMotionEstimator
{
public:
ToFileMotionWriter(const std::string &path, Ptr<ImageMotionEstimatorBase> estimator);
PyrLkRobustMotionEstimator();
virtual void setMotionModel(MotionModel val) { motionEstimator_->setMotionModel(val); }
virtual MotionModel motionModel() const { return motionEstimator_->motionModel(); }
virtual Mat estimate(const Mat &frame0, const Mat &frame1, bool *ok = 0);
private:
std::ofstream file_;
Ptr<ImageMotionEstimatorBase> motionEstimator_;
};
void setDetector(Ptr<FeatureDetector> val) { detector_ = val; }
Ptr<FeatureDetector> detector() const { return detector_; }
class CV_EXPORTS KeypointBasedMotionEstimator : public ImageMotionEstimatorBase
{
public:
KeypointBasedMotionEstimator(Ptr<MotionEstimatorBase> estimator);
void setOptFlowEstimator(Ptr<ISparseOptFlowEstimator> val) { optFlowEstimator_ = val; }
Ptr<ISparseOptFlowEstimator> optFlowEstimator() const { return optFlowEstimator_; }
virtual void setMotionModel(MotionModel val) { motionEstimator_->setMotionModel(val); }
virtual MotionModel motionModel() const { return motionEstimator_->motionModel(); }
void setMotionModel(MotionModel val) { motionModel_ = val; }
MotionModel motionModel() const { return motionModel_; }
void setDetector(Ptr<FeatureDetector> val) { detector_ = val; }
Ptr<FeatureDetector> detector() const { return detector_; }
void setRansacParams(const RansacParams &val) { ransacParams_ = val; }
RansacParams ransacParams() const { return ransacParams_; }
void setOpticalFlowEstimator(Ptr<ISparseOptFlowEstimator> val) { optFlowEstimator_ = val; }
Ptr<ISparseOptFlowEstimator> opticalFlowEstimator() const { return optFlowEstimator_; }
void setMaxRmse(float val) { maxRmse_ = val; }
float maxRmse() const { return maxRmse_; }
void setOutlierRejector(Ptr<IOutlierRejector> val) { outlierRejector_ = val; }
Ptr<IOutlierRejector> outlierRejector() const { return outlierRejector_; }
void setMinInlierRatio(float val) { minInlierRatio_ = val; }
float minInlierRatio() const { return minInlierRatio_; }
virtual Mat estimate(const Mat &frame0, const Mat &frame1, bool *ok = 0);
virtual Mat estimate(const Mat &frame0, const Mat &frame1);
private:
Ptr<MotionEstimatorBase> motionEstimator_;
Ptr<FeatureDetector> detector_;
Ptr<ISparseOptFlowEstimator> optFlowEstimator_;
Ptr<IOutlierRejector> outlierRejector_;
MotionModel motionModel_;
RansacParams ransacParams_;
std::vector<uchar> status_;
std::vector<KeyPoint> keypointsPrev_;
std::vector<Point2f> pointsPrev_, points_;
std::vector<Point2f> pointsPrevGood_, pointsGood_;
float maxRmse_;
float minInlierRatio_;
};
#ifdef HAVE_OPENCV_GPU
class CV_EXPORTS KeypointBasedMotionEstimatorGpu : public ImageMotionEstimatorBase
{
public:
KeypointBasedMotionEstimatorGpu(Ptr<MotionEstimatorBase> estimator);
virtual void setMotionModel(MotionModel val) { motionEstimator_->setMotionModel(val); }
virtual MotionModel motionModel() const { return motionEstimator_->motionModel(); }
void setOutlierRejector(Ptr<IOutlierRejector> val) { outlierRejector_ = val; }
Ptr<IOutlierRejector> outlierRejector() const { return outlierRejector_; }
virtual Mat estimate(const Mat &frame0, const Mat &frame1, bool *ok = 0);
Mat estimate(const gpu::GpuMat &frame0, const gpu::GpuMat &frame1, bool *ok = 0);
private:
Ptr<MotionEstimatorBase> motionEstimator_;
gpu::GoodFeaturesToTrackDetector_GPU detector_;
SparsePyrLkOptFlowEstimatorGpu optFlowEstimator_;
Ptr<IOutlierRejector> outlierRejector_;
gpu::GpuMat frame0_, grayFrame0_, frame1_;
gpu::GpuMat pointsPrev_, points_;
gpu::GpuMat status_;
Mat hostPointsPrev_, hostPoints_;
std::vector<Point2f> hostPointsPrevTmp_, hostPointsTmp_;
std::vector<uchar> rejectionStatus_;
};
#endif
CV_EXPORTS Mat getMotion(int from, int to, const Mat *motions, int size);
CV_EXPORTS Mat getMotion(int from, int to, const std::vector<Mat> &motions);

@ -47,7 +47,6 @@
#include "opencv2/core/core.hpp"
#include "opencv2/videostab/optical_flow.hpp"
#include "opencv2/videostab/fast_marching.hpp"
#include "opencv2/videostab/global_motion.hpp"
#include "opencv2/photo/photo.hpp"
namespace cv
@ -59,7 +58,7 @@ class CV_EXPORTS InpainterBase
{
public:
InpainterBase()
: radius_(0), motionModel_(MM_UNKNOWN), frames_(0), motions_(0),
: radius_(0), frames_(0), motions_(0),
stabilizedFrames_(0), stabilizationMotions_(0) {}
virtual ~InpainterBase() {}
@ -67,14 +66,6 @@ public:
virtual void setRadius(int val) { radius_ = val; }
virtual int radius() const { return radius_; }
virtual void setMotionModel(MotionModel val) { motionModel_ = val; }
virtual MotionModel motionModel() const { return motionModel_; }
virtual void inpaint(int idx, Mat &frame, Mat &mask) = 0;
// data from stabilizer
virtual void setFrames(const std::vector<Mat> &val) { frames_ = &val; }
virtual const std::vector<Mat>& frames() const { return *frames_; }
@ -87,9 +78,12 @@ public:
virtual void setStabilizationMotions(const std::vector<Mat> &val) { stabilizationMotions_ = &val; }
virtual const std::vector<Mat>& stabilizationMotions() const { return *stabilizationMotions_; }
virtual void update() {}
virtual void inpaint(int idx, Mat &frame, Mat &mask) = 0;
protected:
int radius_;
MotionModel motionModel_;
const std::vector<Mat> *frames_;
const std::vector<Mat> *motions_;
const std::vector<Mat> *stabilizedFrames_;
@ -109,12 +103,13 @@ public:
bool empty() const { return inpainters_.empty(); }
virtual void setRadius(int val);
virtual void setMotionModel(MotionModel val);
virtual void setFrames(const std::vector<Mat> &val);
virtual void setMotions(const std::vector<Mat> &val);
virtual void setStabilizedFrames(const std::vector<Mat> &val);
virtual void setStabilizationMotions(const std::vector<Mat> &val);
virtual void update();
virtual void inpaint(int idx, Mat &frame, Mat &mask);
private:
@ -180,7 +175,8 @@ private:
class CV_EXPORTS ColorInpainter : public InpainterBase
{
public:
ColorInpainter(int method = INPAINT_TELEA, double radius = 2.);
ColorInpainter(int method = INPAINT_TELEA, double _radius = 2.)
: method_(method), radius_(_radius) {}
virtual void inpaint(int idx, Mat &frame, Mat &mask);
@ -190,9 +186,6 @@ private:
Mat invMask_;
};
inline ColorInpainter::ColorInpainter(int _method, double _radius)
: method_(_method), radius_(_radius) {}
CV_EXPORTS void calcFlowMask(
const Mat &flowX, const Mat &flowY, const Mat &errors, float maxError,
const Mat &mask0, const Mat &mask1, Mat &flowMask);

@ -1,108 +0,0 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2000-2008, Intel Corporation, all rights reserved.
// Copyright (C) 2009-2011, Willow Garage Inc., all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#ifndef __OPENCV_VIDEOSTAB_MOTION_CORE_HPP__
#define __OPENCV_VIDEOSTAB_MOTION_CORE_HPP__
#include <cmath>
#include "opencv2/core/core.hpp"
namespace cv
{
namespace videostab
{
enum MotionModel
{
MM_TRANSLATION = 0,
MM_TRANSLATION_AND_SCALE = 1,
MM_ROTATION = 2,
MM_RIGID = 3,
MM_SIMILARITY = 4,
MM_AFFINE = 5,
MM_HOMOGRAPHY = 6,
MM_UNKNOWN = 7
};
struct CV_EXPORTS RansacParams
{
int size; // subset size
float thresh; // max error to classify as inlier
float eps; // max outliers ratio
float prob; // probability of success
RansacParams() : size(0), thresh(0), eps(0), prob(0) {}
RansacParams(int size, float thresh, float eps, float prob);
int niters() const
{
return static_cast<int>(
std::ceil(std::log(1 - prob) / std::log(1 - std::pow(1 - eps, size))));
}
static RansacParams default2dMotion(MotionModel model)
{
CV_Assert(model < MM_UNKNOWN);
if (model == MM_TRANSLATION)
return RansacParams(1, 0.5f, 0.5f, 0.99f);
if (model == MM_TRANSLATION_AND_SCALE)
return RansacParams(2, 0.5f, 0.5f, 0.99f);
if (model == MM_ROTATION)
return RansacParams(1, 0.5f, 0.5f, 0.99f);
if (model == MM_RIGID)
return RansacParams(2, 0.5f, 0.5f, 0.99f);
if (model == MM_SIMILARITY)
return RansacParams(2, 0.5f, 0.5f, 0.99f);
if (model == MM_AFFINE)
return RansacParams(3, 0.5f, 0.5f, 0.99f);
return RansacParams(4, 0.5f, 0.5f, 0.99f);
}
};
inline RansacParams::RansacParams(int _size, float _thresh, float _eps, float _prob)
: size(_size), thresh(_thresh), eps(_eps), prob(_prob) {}
} // namespace videostab
} // namespace cv
#endif

@ -44,9 +44,7 @@
#define __OPENCV_VIDEOSTAB_MOTION_STABILIZING_HPP__
#include <vector>
#include <utility>
#include "opencv2/core/core.hpp"
#include "opencv2/videostab/global_motion.hpp"
namespace cv
{
@ -56,109 +54,46 @@ namespace videostab
class CV_EXPORTS IMotionStabilizer
{
public:
virtual void stabilize(const Mat *motions, int size, Mat *stabilizationMotions) const = 0;
#ifdef OPENCV_CAN_BREAK_BINARY_COMPATIBILITY
virtual ~IMotionStabilizer() {}
#endif
// assumes that [0, size-1) is in or equals to [range.first, range.second)
virtual void stabilize(
int size, const std::vector<Mat> &motions, std::pair<int,int> range,
Mat *stabilizationMotions) = 0;
};
class CV_EXPORTS MotionStabilizationPipeline : public IMotionStabilizer
{
public:
void pushBack(Ptr<IMotionStabilizer> stabilizer) { stabilizers_.push_back(stabilizer); }
bool empty() const { return stabilizers_.empty(); }
virtual void stabilize(
int size, const std::vector<Mat> &motions, std::pair<int,int> range,
Mat *stabilizationMotions);
private:
std::vector<Ptr<IMotionStabilizer> > stabilizers_;
};
class CV_EXPORTS MotionFilterBase : public IMotionStabilizer
{
public:
MotionFilterBase() : radius_(0) {}
virtual ~MotionFilterBase() {}
virtual Mat stabilize(
int idx, const std::vector<Mat> &motions, std::pair<int,int> range) = 0;
virtual void setRadius(int val) { radius_ = val; }
virtual int radius() const { return radius_; }
virtual void stabilize(
int size, const std::vector<Mat> &motions, std::pair<int,int> range,
Mat *stabilizationMotions);
};
virtual void update() {}
class CV_EXPORTS GaussianMotionFilter : public MotionFilterBase
{
public:
GaussianMotionFilter(int radius = 15, float stdev = -1.f);
void setParams(int radius, float stdev = -1.f);
int radius() const { return radius_; }
float stdev() const { return stdev_; }
virtual Mat stabilize(int index, const Mat *motions, int size) const = 0;
virtual void stabilize(const Mat *motions, int size, Mat *stabilizationMotions) const;
virtual Mat stabilize(
int idx, const std::vector<Mat> &motions, std::pair<int,int> range);
private:
protected:
int radius_;
float stdev_;
std::vector<float> weight_;
};
inline GaussianMotionFilter::GaussianMotionFilter(int _radius, float _stdev) { setParams(_radius, _stdev); }
class CV_EXPORTS LpMotionStabilizer : public IMotionStabilizer
class CV_EXPORTS GaussianMotionFilter : public MotionFilterBase
{
public:
LpMotionStabilizer(MotionModel model = MM_SIMILARITY);
void setMotionModel(MotionModel val) { model_ = val; }
MotionModel motionModel() const { return model_; }
GaussianMotionFilter() : stdev_(-1.f) {}
void setFrameSize(Size val) { frameSize_ = val; }
Size frameSize() const { return frameSize_; }
void setTrimRatio(float val) { trimRatio_ = val; }
float trimRatio() const { return trimRatio_; }
void setWeight1(float val) { w1_ = val; }
float weight1() const { return w1_; }
void setWeight2(float val) { w2_ = val; }
float weight2() const { return w2_; }
void setWeight3(float val) { w3_ = val; }
float weight3() const { return w3_; }
void setStdev(float val) { stdev_ = val; }
float stdev() const { return stdev_; }
void setWeight4(float val) { w4_ = val; }
float weight4() const { return w4_; }
virtual void update();
virtual void stabilize(
int size, const std::vector<Mat> &motions, std::pair<int,int> range,
Mat *stabilizationMotions);
virtual Mat stabilize(int index, const Mat *motions, int size) const;
private:
MotionModel model_;
Size frameSize_;
float trimRatio_;
float w1_, w2_, w3_, w4_;
std::vector<double> obj_, collb_, colub_;
std::vector<int> rows_, cols_;
std::vector<double> elems_, rowlb_, rowub_;
void set(int row, int col, double coef)
{
rows_.push_back(row);
cols_.push_back(col);
elems_.push_back(coef);
}
float stdev_;
std::vector<float> weight_;
};
CV_EXPORTS Mat ensureInclusionConstraint(const Mat &M, Size size, float trimRatio);

@ -47,7 +47,7 @@
#include "opencv2/opencv_modules.hpp"
#ifdef HAVE_OPENCV_GPU
#include "opencv2/gpu/gpu.hpp"
# include "opencv2/gpu/gpu.hpp"
#endif
namespace cv
@ -78,12 +78,11 @@ class CV_EXPORTS PyrLkOptFlowEstimatorBase
public:
PyrLkOptFlowEstimatorBase() { setWinSize(Size(21, 21)); setMaxLevel(3); }
virtual void setWinSize(Size val) { winSize_ = val; }
virtual Size winSize() const { return winSize_; }
void setWinSize(Size val) { winSize_ = val; }
Size winSize() const { return winSize_; }
virtual void setMaxLevel(int val) { maxLevel_ = val; }
virtual int maxLevel() const { return maxLevel_; }
virtual ~PyrLkOptFlowEstimatorBase() {}
void setMaxLevel(int val) { maxLevel_ = val; }
int maxLevel() const { return maxLevel_; }
protected:
Size winSize_;
@ -100,27 +99,6 @@ public:
};
#ifdef HAVE_OPENCV_GPU
class CV_EXPORTS SparsePyrLkOptFlowEstimatorGpu
: public PyrLkOptFlowEstimatorBase, public ISparseOptFlowEstimator
{
public:
SparsePyrLkOptFlowEstimatorGpu();
virtual void run(
InputArray frame0, InputArray frame1, InputArray points0, InputOutputArray points1,
OutputArray status, OutputArray errors);
void run(const gpu::GpuMat &frame0, const gpu::GpuMat &frame1, const gpu::GpuMat &points0, gpu::GpuMat &points1,
gpu::GpuMat &status, gpu::GpuMat &errors);
void run(const gpu::GpuMat &frame0, const gpu::GpuMat &frame1, const gpu::GpuMat &points0, gpu::GpuMat &points1,
gpu::GpuMat &status);
private:
gpu::PyrLKOpticalFlow optFlowEstimator_;
gpu::GpuMat frame0_, frame1_, points0_, points1_, status_, errors_;
};
class CV_EXPORTS DensePyrLkOptFlowEstimatorGpu
: public PyrLkOptFlowEstimatorBase, public IDenseOptFlowEstimator
{
@ -130,7 +108,6 @@ public:
virtual void run(
InputArray frame0, InputArray frame1, InputOutputArray flowX, InputOutputArray flowY,
OutputArray errors);
private:
gpu::PyrLKOpticalFlow optFlowEstimator_;
gpu::GpuMat frame0_, frame1_, flowX_, flowY_, errors_;

@ -1,96 +0,0 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2000-2008, Intel Corporation, all rights reserved.
// Copyright (C) 2009-2011, Willow Garage Inc., all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#ifndef __OPENCV_VIDEOSTAB_OUTLIER_REJECTION_HPP__
#define __OPENCV_VIDEOSTAB_OUTLIER_REJECTION_HPP__
#include <vector>
#include "opencv2/core/core.hpp"
#include "opencv2/videostab/motion_core.hpp"
namespace cv
{
namespace videostab
{
class CV_EXPORTS IOutlierRejector
{
public:
virtual ~IOutlierRejector() {}
virtual void process(
Size frameSize, InputArray points0, InputArray points1, OutputArray mask) = 0;
};
class CV_EXPORTS NullOutlierRejector : public IOutlierRejector
{
public:
virtual void process(
Size frameSize, InputArray points0, InputArray points1, OutputArray mask);
};
class CV_EXPORTS TranslationBasedLocalOutlierRejector : public IOutlierRejector
{
public:
TranslationBasedLocalOutlierRejector();
void setCellSize(Size val) { cellSize_ = val; }
Size cellSize() const { return cellSize_; }
void setRansacParams(RansacParams val) { ransacParams_ = val; }
RansacParams ransacParams() const { return ransacParams_; }
virtual void process(
Size frameSize, InputArray points0, InputArray points1, OutputArray mask);
private:
Size cellSize_;
RansacParams ransacParams_;
typedef std::vector<int> Cell;
std::vector<Cell> grid_;
};
} // namespace videostab
} // namespace cv
#endif

@ -1,67 +0,0 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2000-2008, Intel Corporation, all rights reserved.
// Copyright (C) 2009-2011, Willow Garage Inc., all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#ifndef __OPENCV_VIDEOSTAB_RING_BUFFER_HPP__
#define __OPENCV_VIDEOSTAB_RING_BUFFER_HPP__
#include <vector>
#include "opencv2/imgproc/imgproc.hpp"
namespace cv
{
namespace videostab
{
template <typename T> inline T& at(int idx, std::vector<T> &items)
{
return items[cv::borderInterpolate(idx, static_cast<int>(items.size()), cv::BORDER_WRAP)];
}
template <typename T> inline const T& at(int idx, const std::vector<T> &items)
{
return items[cv::borderInterpolate(idx, static_cast<int>(items.size()), cv::BORDER_WRAP)];
}
} // namespace videostab
} // namespace cv
#endif

@ -44,7 +44,6 @@
#define __OPENCV_VIDEOSTAB_STABILIZER_HPP__
#include <vector>
#include <ctime>
#include "opencv2/core/core.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/videostab/global_motion.hpp"
@ -53,7 +52,6 @@
#include "opencv2/videostab/log.hpp"
#include "opencv2/videostab/inpainting.hpp"
#include "opencv2/videostab/deblurring.hpp"
#include "opencv2/videostab/wobble_suppression.hpp"
namespace cv
{
@ -65,7 +63,7 @@ class CV_EXPORTS StabilizerBase
public:
virtual ~StabilizerBase() {}
void setLog(Ptr<ILog> ilog) { log_ = ilog; }
void setLog(Ptr<ILog> _log) { log_ = _log; }
Ptr<ILog> log() const { return log_; }
void setRadius(int val) { radius_ = val; }
@ -74,8 +72,8 @@ public:
void setFrameSource(Ptr<IFrameSource> val) { frameSource_ = val; }
Ptr<IFrameSource> frameSource() const { return frameSource_; }
void setMotionEstimator(Ptr<ImageMotionEstimatorBase> val) { motionEstimator_ = val; }
Ptr<ImageMotionEstimatorBase> motionEstimator() const { return motionEstimator_; }
void setMotionEstimator(Ptr<IGlobalMotionEstimator> val) { motionEstimator_ = val; }
Ptr<IGlobalMotionEstimator> motionEstimator() const { return motionEstimator_; }
void setDeblurer(Ptr<DeblurerBase> val) { deblurer_ = val; }
Ptr<DeblurerBase> deblurrer() const { return deblurer_; }
@ -95,19 +93,18 @@ public:
protected:
StabilizerBase();
void reset();
void setUp(int cacheSize, const Mat &frame);
Mat nextStabilizedFrame();
bool doOneIteration();
virtual void setUp(const Mat &firstFrame);
virtual Mat estimateMotion() = 0;
virtual Mat estimateStabilizationMotion() = 0;
void stabilizeFrame();
virtual Mat postProcessFrame(const Mat &frame);
void logProcessingTime();
void stabilizeFrame(const Mat &stabilizationMotion);
virtual void setUp(Mat &firstFrame) = 0;
virtual void stabilizeFrame() = 0;
virtual void estimateMotion() = 0;
Ptr<ILog> log_;
Ptr<IFrameSource> frameSource_;
Ptr<ImageMotionEstimatorBase> motionEstimator_;
Ptr<IGlobalMotionEstimator> motionEstimator_;
Ptr<DeblurerBase> deblurer_;
Ptr<InpainterBase> inpainter_;
int radius_;
@ -123,14 +120,12 @@ protected:
Mat preProcessedFrame_;
bool doInpainting_;
Mat inpaintingMask_;
Mat finalFrame_;
std::vector<Mat> frames_;
std::vector<Mat> motions_; // motions_[i] is the motion from i-th to i+1-th frame
std::vector<float> blurrinessRates_;
std::vector<Mat> stabilizedFrames_;
std::vector<Mat> stabilizedMasks_;
std::vector<Mat> stabilizationMotions_;
clock_t processingStartTime_;
};
class CV_EXPORTS OnePassStabilizer : public StabilizerBase, public IFrameSource
@ -141,14 +136,15 @@ public:
void setMotionFilter(Ptr<MotionFilterBase> val) { motionFilter_ = val; }
Ptr<MotionFilterBase> motionFilter() const { return motionFilter_; }
virtual void reset();
virtual void reset() { resetImpl(); }
virtual Mat nextFrame() { return nextStabilizedFrame(); }
private:
virtual void setUp(const Mat &firstFrame);
virtual Mat estimateMotion();
virtual Mat estimateStabilizationMotion();
virtual Mat postProcessFrame(const Mat &frame);
void resetImpl();
virtual void setUp(Mat &firstFrame);
virtual void estimateMotion();
virtual void stabilizeFrame();
Ptr<MotionFilterBase> motionFilter_;
};
@ -159,34 +155,30 @@ public:
TwoPassStabilizer();
void setMotionStabilizer(Ptr<IMotionStabilizer> val) { motionStabilizer_ = val; }
Ptr<IMotionStabilizer> motionStabilizer() const { return motionStabilizer_; }
void setWobbleSuppressor(Ptr<WobbleSuppressorBase> val) { wobbleSuppressor_ = val; }
Ptr<WobbleSuppressorBase> wobbleSuppressor() const { return wobbleSuppressor_; }
Ptr<IMotionStabilizer> motionStabilizer() const { return motionStabilizer_; }
void setEstimateTrimRatio(bool val) { mustEstTrimRatio_ = val; }
bool mustEstimateTrimaRatio() const { return mustEstTrimRatio_; }
virtual void reset();
virtual void reset() { resetImpl(); }
virtual Mat nextFrame();
// available after pre-pass, before it's empty
std::vector<Mat> motions() const;
private:
void resetImpl();
void runPrePassIfNecessary();
virtual void setUp(const Mat &firstFrame);
virtual Mat estimateMotion();
virtual Mat estimateStabilizationMotion();
virtual Mat postProcessFrame(const Mat &frame);
virtual void setUp(Mat &firstFrame);
virtual void estimateMotion() { /* do nothing as motion was estimation in pre-pass */ }
virtual void stabilizeFrame();
Ptr<IMotionStabilizer> motionStabilizer_;
Ptr<WobbleSuppressorBase> wobbleSuppressor_;
bool mustEstTrimRatio_;
int frameCount_;
bool isPrePassDone_;
bool doWobbleSuppression_;
std::vector<Mat> motions2_;
Mat suppressedFrame_;
};
} // namespace videostab

@ -40,16 +40,9 @@
//
//M*/
// REFERENCES
// 1. "Full-Frame Video Stabilization with Motion Inpainting"
// Yasuyuki Matsushita, Eyal Ofek, Weina Ge, Xiaoou Tang, Senior Member, and Heung-Yeung Shum
// 2. "Auto-Directed Video Stabilization with Robust L1 Optimal Camera Paths"
// Matthias Grundmann, Vivek Kwatra, Irfan Essa
#ifndef __OPENCV_VIDEOSTAB_HPP__
#define __OPENCV_VIDEOSTAB_HPP__
#include "opencv2/videostab/stabilizer.hpp"
#include "opencv2/videostab/ring_buffer.hpp"
#endif

@ -1,139 +0,0 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2000-2008, Intel Corporation, all rights reserved.
// Copyright (C) 2009-2011, Willow Garage Inc., all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#ifndef __OPENCV_VIDEOSTAB_WOBBLE_SUPPRESSION_HPP__
#define __OPENCV_VIDEOSTAB_WOBBLE_SUPPRESSION_HPP__
#include <vector>
#include "opencv2/core/core.hpp"
#include "opencv2/videostab/global_motion.hpp"
#include "opencv2/videostab/log.hpp"
#ifdef HAVE_OPENCV_GPU
#include "opencv2/gpu/gpu.hpp"
#endif
namespace cv
{
namespace videostab
{
class CV_EXPORTS WobbleSuppressorBase
{
public:
WobbleSuppressorBase();
virtual ~WobbleSuppressorBase() {}
void setMotionEstimator(Ptr<ImageMotionEstimatorBase> val) { motionEstimator_ = val; }
Ptr<ImageMotionEstimatorBase> motionEstimator() const { return motionEstimator_; }
virtual void suppress(int idx, const Mat &frame, Mat &result) = 0;
// data from stabilizer
virtual void setFrameCount(int val) { frameCount_ = val; }
virtual int frameCount() const { return frameCount_; }
virtual void setMotions(const std::vector<Mat> &val) { motions_ = &val; }
virtual const std::vector<Mat>& motions() const { return *motions_; }
virtual void setMotions2(const std::vector<Mat> &val) { motions2_ = &val; }
virtual const std::vector<Mat>& motions2() const { return *motions2_; }
virtual void setStabilizationMotions(const std::vector<Mat> &val) { stabilizationMotions_ = &val; }
virtual const std::vector<Mat>& stabilizationMotions() const { return *stabilizationMotions_; }
protected:
Ptr<ImageMotionEstimatorBase> motionEstimator_;
int frameCount_;
const std::vector<Mat> *motions_;
const std::vector<Mat> *motions2_;
const std::vector<Mat> *stabilizationMotions_;
};
class CV_EXPORTS NullWobbleSuppressor : public WobbleSuppressorBase
{
public:
virtual void suppress(int idx, const Mat &frame, Mat &result);
};
class CV_EXPORTS MoreAccurateMotionWobbleSuppressorBase : public WobbleSuppressorBase
{
public:
virtual void setPeriod(int val) { period_ = val; }
virtual int period() const { return period_; }
protected:
MoreAccurateMotionWobbleSuppressorBase() { setPeriod(30); }
int period_;
};
class CV_EXPORTS MoreAccurateMotionWobbleSuppressor : public MoreAccurateMotionWobbleSuppressorBase
{
public:
virtual void suppress(int idx, const Mat &frame, Mat &result);
private:
Mat_<float> mapx_, mapy_;
};
#ifdef HAVE_OPENCV_GPU
class CV_EXPORTS MoreAccurateMotionWobbleSuppressorGpu : public MoreAccurateMotionWobbleSuppressorBase
{
public:
void suppress(int idx, const gpu::GpuMat &frame, gpu::GpuMat &result);
virtual void suppress(int idx, const Mat &frame, Mat &result);
private:
gpu::GpuMat frameDevice_, resultDevice_;
gpu::GpuMat mapx_, mapy_;
};
#endif
} // namespace videostab
} // namespace cv
#endif

@ -1,76 +0,0 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2000-2008, Intel Corporation, all rights reserved.
// Copyright (C) 2009-2011, Willow Garage Inc., all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#ifndef __OPENCV_VIDEOSTAB_CLP_HPP__
#define __OPENCV_VIDEOSTAB_CLP_HPP__
#ifdef HAVE_CLP
# undef PACKAGE
# undef PACKAGE_BUGREPORT
# undef PACKAGE_NAME
# undef PACKAGE_STRING
# undef PACKAGE_TARNAME
# undef PACKAGE_VERSION
# undef VERSION
# define COIN_BIG_INDEX 0
# define DEBUG_COIN 0
# define PRESOLVE_DEBUG 0
# define PRESOLVE_CONSISTENCY 0
# include "ClpSimplex.hpp"
# include "ClpPresolve.hpp"
# include "ClpPrimalColumnSteepest.hpp"
# include "ClpDualRowSteepest.hpp"
# define INF 1e10
#endif
// Clp replaces min and max with ?: globally, we can't use std::min and std::max in case
// when HAVE_CLP is true. We create the defines by ourselves when HAVE_CLP == 0.
#ifndef min
#define min(a,b) std::min(a,b)
#endif
#ifndef max
#define max(a,b) std::max(a,b)
#endif
#endif

@ -43,7 +43,6 @@
#include "precomp.hpp"
#include "opencv2/videostab/deblurring.hpp"
#include "opencv2/videostab/global_motion.hpp"
#include "opencv2/videostab/ring_buffer.hpp"
using namespace std;

@ -42,7 +42,6 @@
#include "precomp.hpp"
#include "opencv2/videostab/fast_marching.hpp"
#include "opencv2/videostab/ring_buffer.hpp"
using namespace std;

@ -42,12 +42,6 @@
#include "precomp.hpp"
#include "opencv2/videostab/frame_source.hpp"
#include "opencv2/videostab/ring_buffer.hpp"
#include "opencv2/opencv_modules.hpp"
#ifdef HAVE_OPENCV_HIGHGUI
# include "opencv2/highgui/highgui.hpp"
#endif
using namespace std;
@ -56,67 +50,25 @@ namespace cv
namespace videostab
{
namespace {
class VideoFileSourceImpl : public IFrameSource
{
public:
VideoFileSourceImpl(const std::string &path, bool volatileFrame)
: path_(path), volatileFrame_(volatileFrame) { reset(); }
virtual void reset()
{
#ifdef HAVE_OPENCV_HIGHGUI
vc.release();
vc.open(path_);
if (!vc.isOpened())
throw runtime_error("can't open file: " + path_);
#else
CV_Error(CV_StsNotImplemented, "OpenCV has been compiled without video I/O support");
#endif
}
virtual Mat nextFrame()
{
Mat frame;
#ifdef HAVE_OPENCV_HIGHGUI
vc >> frame;
#endif
return volatileFrame_ ? frame : frame.clone();
}
#ifdef HAVE_OPENCV_HIGHGUI
int width() {return static_cast<int>(vc.get(CV_CAP_PROP_FRAME_WIDTH));}
int height() {return static_cast<int>(vc.get(CV_CAP_PROP_FRAME_HEIGHT));}
int count() {return static_cast<int>(vc.get(CV_CAP_PROP_FRAME_COUNT));}
double fps() {return vc.get(CV_CAP_PROP_FPS);}
#else
int width() {return 0;}
int height() {return 0;}
int count() {return 0;}
double fps() {return 0;}
#endif
private:
std::string path_;
bool volatileFrame_;
#ifdef HAVE_OPENCV_HIGHGUI
VideoCapture vc;
#endif
};
VideoFileSource::VideoFileSource(const string &path, bool volatileFrame)
: path_(path), volatileFrame_(volatileFrame) { reset(); }
}//namespace
VideoFileSource::VideoFileSource(const string &path, bool volatileFrame)
: impl(new VideoFileSourceImpl(path, volatileFrame)) {}
void VideoFileSource::reset()
{
reader_.release();
reader_.open(path_);
if (!reader_.isOpened())
throw runtime_error("can't open file: " + path_);
}
void VideoFileSource::reset() { impl->reset(); }
Mat VideoFileSource::nextFrame() { return impl->nextFrame(); }
int VideoFileSource::width() { return ((VideoFileSourceImpl*)impl.obj)->width(); }
int VideoFileSource::height() { return ((VideoFileSourceImpl*)impl.obj)->height(); }
int VideoFileSource::count() { return ((VideoFileSourceImpl*)impl.obj)->count(); }
double VideoFileSource::fps() { return ((VideoFileSourceImpl*)impl.obj)->fps(); }
Mat VideoFileSource::nextFrame()
{
Mat frame;
reader_ >> frame;
return volatileFrame_ ? frame : frame.clone();
}
} // namespace videostab
} // namespace cv

@ -41,11 +41,8 @@
//M*/
#include "precomp.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/videostab/global_motion.hpp"
#include "opencv2/videostab/ring_buffer.hpp"
#include "opencv2/videostab/outlier_rejection.hpp"
#include "opencv2/opencv_modules.hpp"
#include "clp.hpp"
using namespace std;
@ -54,44 +51,8 @@ namespace cv
namespace videostab
{
// does isotropic normalization
static Mat normalizePoints(int npoints, Point2f *points)
{
float cx = 0.f, cy = 0.f;
for (int i = 0; i < npoints; ++i)
{
cx += points[i].x;
cy += points[i].y;
}
cx /= npoints;
cy /= npoints;
float d = 0.f;
for (int i = 0; i < npoints; ++i)
{
points[i].x -= cx;
points[i].y -= cy;
d += sqrt(sqr(points[i].x) + sqr(points[i].y));
}
d /= npoints;
float s = sqrt(2.f) / d;
for (int i = 0; i < npoints; ++i)
{
points[i].x *= s;
points[i].y *= s;
}
Mat_<float> T = Mat::eye(3, 3, CV_32F);
T(0,0) = T(1,1) = s;
T(0,2) = -cx*s;
T(1,2) = -cy*s;
return T;
}
static Mat estimateGlobMotionLeastSquaresTranslation(
int npoints, Point2f *points0, Point2f *points1, float *rmse)
int npoints, const Point2f *points0, const Point2f *points1, float *rmse)
{
Mat_<float> M = Mat::eye(3, 3, CV_32F);
for (int i = 0; i < npoints; ++i)
@ -101,7 +62,6 @@ static Mat estimateGlobMotionLeastSquaresTranslation(
}
M(0,2) /= npoints;
M(1,2) /= npoints;
if (rmse)
{
*rmse = 0;
@ -110,17 +70,13 @@ static Mat estimateGlobMotionLeastSquaresTranslation(
sqr(points1[i].y - points0[i].y - M(1,2));
*rmse = sqrt(*rmse / npoints);
}
return M;
}
static Mat estimateGlobMotionLeastSquaresTranslationAndScale(
int npoints, Point2f *points0, Point2f *points1, float *rmse)
int npoints, const Point2f *points0, const Point2f *points1, float *rmse)
{
Mat_<float> T0 = normalizePoints(npoints, points0);
Mat_<float> T1 = normalizePoints(npoints, points1);
Mat_<float> A(2*npoints, 3), b(2*npoints, 1);
float *a0, *a1;
Point2f p0, p1;
@ -138,7 +94,7 @@ static Mat estimateGlobMotionLeastSquaresTranslationAndScale(
}
Mat_<float> sol;
solve(A, b, sol, DECOMP_NORMAL | DECOMP_LU);
solve(A, b, sol, DECOMP_SVD);
if (rmse)
*rmse = static_cast<float>(norm(A*sol, b, NORM_L2) / sqrt(static_cast<double>(npoints)));
@ -147,115 +103,13 @@ static Mat estimateGlobMotionLeastSquaresTranslationAndScale(
M(0,0) = M(1,1) = sol(0,0);
M(0,2) = sol(1,0);
M(1,2) = sol(2,0);
return T1.inv() * M * T0;
}
static Mat estimateGlobMotionLeastSquaresRotation(
int npoints, Point2f *points0, Point2f *points1, float *rmse)
{
Point2f p0, p1;
float A(0), B(0);
for(int i=0; i<npoints; ++i)
{
p0 = points0[i];
p1 = points1[i];
A += p0.x*p1.x + p0.y*p1.y;
B += p0.x*p1.y - p1.x*p0.y;
}
// A*sin(alpha) + B*cos(alpha) = 0
float C = sqrt(A*A + B*B);
Mat_<float> M = Mat::eye(3, 3, CV_32F);
if ( C != 0 )
{
float sinAlpha = - B / C;
float cosAlpha = A / C;
M(0,0) = cosAlpha;
M(1,1) = M(0,0);
M(0,1) = sinAlpha;
M(1,0) = - M(0,1);
}
if (rmse)
{
*rmse = 0;
for (int i = 0; i < npoints; ++i)
{
p0 = points0[i];
p1 = points1[i];
*rmse += sqr(p1.x - M(0,0)*p0.x - M(0,1)*p0.y) +
sqr(p1.y - M(1,0)*p0.x - M(1,1)*p0.y);
}
*rmse = sqrt(*rmse / npoints);
}
return M;
}
static Mat estimateGlobMotionLeastSquaresRigid(
int npoints, Point2f *points0, Point2f *points1, float *rmse)
{
Point2f mean0(0.f, 0.f);
Point2f mean1(0.f, 0.f);
for (int i = 0; i < npoints; ++i)
{
mean0 += points0[i];
mean1 += points1[i];
}
mean0 *= 1.f / npoints;
mean1 *= 1.f / npoints;
Mat_<float> A = Mat::zeros(2, 2, CV_32F);
Point2f pt0, pt1;
for (int i = 0; i < npoints; ++i)
{
pt0 = points0[i] - mean0;
pt1 = points1[i] - mean1;
A(0,0) += pt1.x * pt0.x;
A(0,1) += pt1.x * pt0.y;
A(1,0) += pt1.y * pt0.x;
A(1,1) += pt1.y * pt0.y;
}
Mat_<float> M = Mat::eye(3, 3, CV_32F);
SVD svd(A);
Mat_<float> R = svd.u * svd.vt;
Mat tmp(M(Rect(0,0,2,2)));
R.copyTo(tmp);
M(0,2) = mean1.x - R(0,0)*mean0.x - R(0,1)*mean0.y;
M(1,2) = mean1.y - R(1,0)*mean0.x - R(1,1)*mean0.y;
if (rmse)
{
*rmse = 0;
for (int i = 0; i < npoints; ++i)
{
pt0 = points0[i];
pt1 = points1[i];
*rmse += sqr(pt1.x - M(0,0)*pt0.x - M(0,1)*pt0.y - M(0,2)) +
sqr(pt1.y - M(1,0)*pt0.x - M(1,1)*pt0.y - M(1,2));
}
*rmse = sqrt(*rmse / npoints);
}
return M;
}
static Mat estimateGlobMotionLeastSquaresSimilarity(
int npoints, Point2f *points0, Point2f *points1, float *rmse)
static Mat estimateGlobMotionLeastSquaresLinearSimilarity(
int npoints, const Point2f *points0, const Point2f *points1, float *rmse)
{
Mat_<float> T0 = normalizePoints(npoints, points0);
Mat_<float> T1 = normalizePoints(npoints, points1);
Mat_<float> A(2*npoints, 4), b(2*npoints, 1);
float *a0, *a1;
Point2f p0, p1;
@ -266,14 +120,14 @@ static Mat estimateGlobMotionLeastSquaresSimilarity(
a1 = A[2*i+1];
p0 = points0[i];
p1 = points1[i];
a0[0] = p0.x; a0[1] = p0.y; a0[2] = 1; a0[3] = 0;
a0[0] = p0.x; a0[1] = p0.y; a0[2] = 1; a0[3] = 0;
a1[0] = p0.y; a1[1] = -p0.x; a1[2] = 0; a1[3] = 1;
b(2*i,0) = p1.x;
b(2*i+1,0) = p1.y;
}
Mat_<float> sol;
solve(A, b, sol, DECOMP_NORMAL | DECOMP_LU);
solve(A, b, sol, DECOMP_SVD);
if (rmse)
*rmse = static_cast<float>(norm(A*sol, b, NORM_L2) / sqrt(static_cast<double>(npoints)));
@ -284,17 +138,13 @@ static Mat estimateGlobMotionLeastSquaresSimilarity(
M(1,0) = -sol(1,0);
M(0,2) = sol(2,0);
M(1,2) = sol(3,0);
return T1.inv() * M * T0;
return M;
}
static Mat estimateGlobMotionLeastSquaresAffine(
int npoints, Point2f *points0, Point2f *points1, float *rmse)
int npoints, const Point2f *points0, const Point2f *points1, float *rmse)
{
Mat_<float> T0 = normalizePoints(npoints, points0);
Mat_<float> T1 = normalizePoints(npoints, points1);
Mat_<float> A(2*npoints, 6), b(2*npoints, 1);
float *a0, *a1;
Point2f p0, p1;
@ -312,7 +162,7 @@ static Mat estimateGlobMotionLeastSquaresAffine(
}
Mat_<float> sol;
solve(A, b, sol, DECOMP_NORMAL | DECOMP_LU);
solve(A, b, sol, DECOMP_SVD);
if (rmse)
*rmse = static_cast<float>(norm(A*sol, b, NORM_L2) / sqrt(static_cast<double>(npoints)));
@ -322,58 +172,48 @@ static Mat estimateGlobMotionLeastSquaresAffine(
for (int j = 0; j < 3; ++j, ++k)
M(i,j) = sol(k,0);
return T1.inv() * M * T0;
return M;
}
Mat estimateGlobalMotionLeastSquares(
InputOutputArray points0, InputOutputArray points1, int model, float *rmse)
const vector<Point2f> &points0, const vector<Point2f> &points1, int model, float *rmse)
{
CV_Assert(model <= MM_AFFINE);
CV_Assert(points0.type() == points1.type());
const int npoints = points0.getMat().checkVector(2);
CV_Assert(points1.getMat().checkVector(2) == npoints);
CV_Assert(points0.size() == points1.size());
typedef Mat (*Impl)(int, Point2f*, Point2f*, float*);
typedef Mat (*Impl)(int, const Point2f*, const Point2f*, float*);
static Impl impls[] = { estimateGlobMotionLeastSquaresTranslation,
estimateGlobMotionLeastSquaresTranslationAndScale,
estimateGlobMotionLeastSquaresRotation,
estimateGlobMotionLeastSquaresRigid,
estimateGlobMotionLeastSquaresSimilarity,
estimateGlobMotionLeastSquaresLinearSimilarity,
estimateGlobMotionLeastSquaresAffine };
Point2f *points0_ = points0.getMat().ptr<Point2f>();
Point2f *points1_ = points1.getMat().ptr<Point2f>();
return impls[model](npoints, points0_, points1_, rmse);
int npoints = static_cast<int>(points0.size());
return impls[model](npoints, &points0[0], &points1[0], rmse);
}
Mat estimateGlobalMotionRansac(
InputArray points0, InputArray points1, int model, const RansacParams &params,
float *rmse, int *ninliers)
Mat estimateGlobalMotionRobust(
const vector<Point2f> &points0, const vector<Point2f> &points1, int model,
const RansacParams &params, float *rmse, int *ninliers)
{
CV_Assert(model <= MM_AFFINE);
CV_Assert(points0.type() == points1.type());
const int npoints = points0.getMat().checkVector(2);
CV_Assert(points1.getMat().checkVector(2) == npoints);
CV_Assert(points0.size() == points1.size());
typedef Mat (*Impl)(int, const Point2f*, const Point2f*, float*);
static Impl impls[] = { estimateGlobMotionLeastSquaresTranslation,
estimateGlobMotionLeastSquaresTranslationAndScale,
estimateGlobMotionLeastSquaresLinearSimilarity,
estimateGlobMotionLeastSquaresAffine };
const Point2f *points0_ = points0.getMat().ptr<Point2f>();
const Point2f *points1_ = points1.getMat().ptr<Point2f>();
const int niters = params.niters();
const int npoints = static_cast<int>(points0.size());
const int niters = static_cast<int>(ceil(log(1 - params.prob) /
log(1 - pow(1 - params.eps, params.size))));
// current hypothesis
RNG rng(0);
vector<int> indices(params.size);
vector<Point2f> subset0(params.size);
vector<Point2f> subset1(params.size);
// best hypothesis
vector<Point2f> subset0best(params.size);
vector<Point2f> subset1best(params.size);
vector<Point2f> subset0(params.size), subset1(params.size);
vector<Point2f> subset0best(params.size), subset1best(params.size);
Mat_<float> bestM;
int ninliersMax = -1;
RNG rng(0);
Point2f p0, p1;
float x, y;
@ -393,42 +233,40 @@ Mat estimateGlobalMotionRansac(
}
for (int i = 0; i < params.size; ++i)
{
subset0[i] = points0_[indices[i]];
subset1[i] = points1_[indices[i]];
subset0[i] = points0[indices[i]];
subset1[i] = points1[indices[i]];
}
Mat_<float> M = estimateGlobalMotionLeastSquares(subset0, subset1, model, 0);
Mat_<float> M = impls[model](params.size, &subset0[0], &subset1[0], 0);
int numinliers = 0;
int _ninliers = 0;
for (int i = 0; i < npoints; ++i)
{
p0 = points0_[i];
p1 = points1_[i];
p0 = points0[i]; p1 = points1[i];
x = M(0,0)*p0.x + M(0,1)*p0.y + M(0,2);
y = M(1,0)*p0.x + M(1,1)*p0.y + M(1,2);
if (sqr(x - p1.x) + sqr(y - p1.y) < params.thresh * params.thresh)
numinliers++;
_ninliers++;
}
if (numinliers >= ninliersMax)
if (_ninliers >= ninliersMax)
{
bestM = M;
ninliersMax = numinliers;
ninliersMax = _ninliers;
subset0best.swap(subset0);
subset1best.swap(subset1);
}
}
if (ninliersMax < params.size)
// compute RMSE
bestM = estimateGlobalMotionLeastSquares(subset0best, subset1best, model, rmse);
// compute rmse
bestM = impls[model](params.size, &subset0best[0], &subset1best[0], rmse);
else
{
subset0.resize(ninliersMax);
subset1.resize(ninliersMax);
for (int i = 0, j = 0; i < npoints; ++i)
{
p0 = points0_[i];
p1 = points1_[i];
p0 = points0[i]; p1 = points1[i];
x = bestM(0,0)*p0.x + bestM(0,1)*p0.y + bestM(0,2);
y = bestM(1,0)*p0.x + bestM(1,1)*p0.y + bestM(1,2);
if (sqr(x - p1.x) + sqr(y - p1.y) < params.thresh * params.thresh)
@ -438,7 +276,7 @@ Mat estimateGlobalMotionRansac(
j++;
}
}
bestM = estimateGlobalMotionLeastSquares(subset0, subset1, model, rmse);
bestM = impls[model](ninliersMax, &subset0[0], &subset1[0], rmse);
}
if (ninliers)
@ -448,249 +286,33 @@ Mat estimateGlobalMotionRansac(
}
MotionEstimatorRansacL2::MotionEstimatorRansacL2(MotionModel model)
: MotionEstimatorBase(model)
{
setRansacParams(RansacParams::default2dMotion(model));
setMinInlierRatio(0.1f);
}
Mat MotionEstimatorRansacL2::estimate(InputArray points0, InputArray points1, bool *ok)
{
CV_Assert(points0.type() == points1.type());
const int npoints = points0.getMat().checkVector(2);
CV_Assert(points1.getMat().checkVector(2) == npoints);
// find motion
int ninliers = 0;
Mat_<float> M;
if (motionModel() != MM_HOMOGRAPHY)
M = estimateGlobalMotionRansac(
points0, points1, motionModel(), ransacParams_, 0, &ninliers);
else
{
vector<uchar> mask;
M = findHomography(points0, points1, mask, CV_LMEDS);
for (int i = 0; i < npoints; ++i)
if (mask[i]) ninliers++;
}
// check if we're confident enough in estimated motion
if (ok) *ok = true;
if (static_cast<float>(ninliers) / npoints < minInlierRatio_)
{
M = Mat::eye(3, 3, CV_32F);
if (ok) *ok = false;
}
return M;
}
MotionEstimatorL1::MotionEstimatorL1(MotionModel model)
: MotionEstimatorBase(model)
{
}
// TODO will estimation of all motions as one LP problem be faster?
Mat MotionEstimatorL1::estimate(InputArray points0, InputArray points1, bool *ok)
{
CV_Assert(points0.type() == points1.type());
const int npoints = points0.getMat().checkVector(2);
CV_Assert(points1.getMat().checkVector(2) == npoints);
#ifndef HAVE_CLP
CV_Error(CV_StsError, "The library is built without Clp support");
if (ok) *ok = false;
return Mat::eye(3, 3, CV_32F);
#else
CV_Assert(motionModel() <= MM_AFFINE && motionModel() != MM_RIGID);
// prepare LP problem
const Point2f *points0_ = points0.getMat().ptr<Point2f>();
const Point2f *points1_ = points1.getMat().ptr<Point2f>();
int ncols = 6 + 2*npoints;
int nrows = 4*npoints;
if (motionModel() == MM_SIMILARITY)
nrows += 2;
else if (motionModel() == MM_TRANSLATION_AND_SCALE)
nrows += 3;
else if (motionModel() == MM_TRANSLATION)
nrows += 4;
rows_.clear();
cols_.clear();
elems_.clear();
obj_.assign(ncols, 0);
collb_.assign(ncols, -INF);
colub_.assign(ncols, INF);
int c = 6;
for (int i = 0; i < npoints; ++i, c += 2)
{
obj_[c] = 1;
collb_[c] = 0;
obj_[c+1] = 1;
collb_[c+1] = 0;
}
elems_.clear();
rowlb_.assign(nrows, -INF);
rowub_.assign(nrows, INF);
int r = 0;
Point2f p0, p1;
for (int i = 0; i < npoints; ++i, r += 4)
{
p0 = points0_[i];
p1 = points1_[i];
set(r, 0, p0.x); set(r, 1, p0.y); set(r, 2, 1); set(r, 6+2*i, -1);
rowub_[r] = p1.x;
set(r+1, 3, p0.x); set(r+1, 4, p0.y); set(r+1, 5, 1); set(r+1, 6+2*i+1, -1);
rowub_[r+1] = p1.y;
set(r+2, 0, p0.x); set(r+2, 1, p0.y); set(r+2, 2, 1); set(r+2, 6+2*i, 1);
rowlb_[r+2] = p1.x;
set(r+3, 3, p0.x); set(r+3, 4, p0.y); set(r+3, 5, 1); set(r+3, 6+2*i+1, 1);
rowlb_[r+3] = p1.y;
}
if (motionModel() == MM_SIMILARITY)
{
set(r, 0, 1); set(r, 4, -1); rowlb_[r] = rowub_[r] = 0;
set(r+1, 1, 1); set(r+1, 3, 1); rowlb_[r+1] = rowub_[r+1] = 0;
}
else if (motionModel() == MM_TRANSLATION_AND_SCALE)
{
set(r, 0, 1); set(r, 4, -1); rowlb_[r] = rowub_[r] = 0;
set(r+1, 1, 1); rowlb_[r+1] = rowub_[r+1] = 0;
set(r+2, 3, 1); rowlb_[r+2] = rowub_[r+2] = 0;
}
else if (motionModel() == MM_TRANSLATION)
{
set(r, 0, 1); rowlb_[r] = rowub_[r] = 1;
set(r+1, 1, 1); rowlb_[r+1] = rowub_[r+1] = 0;
set(r+2, 3, 1); rowlb_[r+2] = rowub_[r+2] = 0;
set(r+3, 4, 1); rowlb_[r+3] = rowub_[r+3] = 1;
}
// solve
CoinPackedMatrix A(true, &rows_[0], &cols_[0], &elems_[0], elems_.size());
A.setDimensions(nrows, ncols);
ClpSimplex model(false);
model.loadProblem(A, &collb_[0], &colub_[0], &obj_[0], &rowlb_[0], &rowub_[0]);
ClpDualRowSteepest dualSteep(1);
model.setDualRowPivotAlgorithm(dualSteep);
model.scaling(1);
model.dual();
// extract motion
const double *sol = model.getColSolution();
Mat_<float> M = Mat::eye(3, 3, CV_32F);
M(0,0) = sol[0];
M(0,1) = sol[1];
M(0,2) = sol[2];
M(1,0) = sol[3];
M(1,1) = sol[4];
M(1,2) = sol[5];
if (ok) *ok = true;
return M;
#endif
}
FromFileMotionReader::FromFileMotionReader(const string &path)
: ImageMotionEstimatorBase(MM_UNKNOWN)
{
file_.open(path.c_str());
CV_Assert(file_.is_open());
}
Mat FromFileMotionReader::estimate(const Mat &/*frame0*/, const Mat &/*frame1*/, bool *ok)
{
Mat_<float> M(3, 3);
bool ok_;
file_ >> M(0,0) >> M(0,1) >> M(0,2)
>> M(1,0) >> M(1,1) >> M(1,2)
>> M(2,0) >> M(2,1) >> M(2,2) >> ok_;
if (ok) *ok = ok_;
return M;
}
ToFileMotionWriter::ToFileMotionWriter(const string &path, Ptr<ImageMotionEstimatorBase> estimator)
: ImageMotionEstimatorBase(estimator->motionModel()), motionEstimator_(estimator)
{
file_.open(path.c_str());
CV_Assert(file_.is_open());
}
Mat ToFileMotionWriter::estimate(const Mat &frame0, const Mat &frame1, bool *ok)
{
bool ok_;
Mat_<float> M = motionEstimator_->estimate(frame0, frame1, &ok_);
file_ << M(0,0) << " " << M(0,1) << " " << M(0,2) << " "
<< M(1,0) << " " << M(1,1) << " " << M(1,2) << " "
<< M(2,0) << " " << M(2,1) << " " << M(2,2) << " " << ok_ << endl;
if (ok) *ok = ok_;
return M;
}
KeypointBasedMotionEstimator::KeypointBasedMotionEstimator(Ptr<MotionEstimatorBase> estimator)
: ImageMotionEstimatorBase(estimator->motionModel()), motionEstimator_(estimator)
PyrLkRobustMotionEstimator::PyrLkRobustMotionEstimator()
: ransacParams_(RansacParams::affine2dMotionStd())
{
setDetector(new GoodFeaturesToTrackDetector());
setOpticalFlowEstimator(new SparsePyrLkOptFlowEstimator());
setOutlierRejector(new NullOutlierRejector());
setOptFlowEstimator(new SparsePyrLkOptFlowEstimator());
setMotionModel(AFFINE);
setMaxRmse(0.5f);
setMinInlierRatio(0.1f);
}
Mat KeypointBasedMotionEstimator::estimate(const Mat &frame0, const Mat &frame1, bool *ok)
Mat PyrLkRobustMotionEstimator::estimate(const Mat &frame0, const Mat &frame1)
{
// find keypoints
detector_->detect(frame0, keypointsPrev_);
// extract points from keypoints
pointsPrev_.resize(keypointsPrev_.size());
for (size_t i = 0; i < keypointsPrev_.size(); ++i)
pointsPrev_[i] = keypointsPrev_[i].pt;
// find correspondences
optFlowEstimator_->run(frame0, frame1, pointsPrev_, points_, status_, noArray());
// leave good correspondences only
pointsPrevGood_.clear(); pointsPrevGood_.reserve(points_.size());
pointsGood_.clear(); pointsGood_.reserve(points_.size());
for (size_t i = 0; i < points_.size(); ++i)
size_t npoints = points_.size();
pointsPrevGood_.clear();
pointsPrevGood_.reserve(npoints);
pointsGood_.clear();
pointsGood_.reserve(npoints);
for (size_t i = 0; i < npoints; ++i)
{
if (status_[i])
{
@ -699,129 +321,40 @@ Mat KeypointBasedMotionEstimator::estimate(const Mat &frame0, const Mat &frame1,
}
}
// perform outlier rejection
IOutlierRejector *outlRejector = static_cast<IOutlierRejector*>(outlierRejector_);
if (!dynamic_cast<NullOutlierRejector*>(outlRejector))
{
pointsPrev_.swap(pointsPrevGood_);
points_.swap(pointsGood_);
outlierRejector_->process(frame0.size(), pointsPrev_, points_, status_);
pointsPrevGood_.clear();
pointsPrevGood_.reserve(points_.size());
pointsGood_.clear();
pointsGood_.reserve(points_.size());
for (size_t i = 0; i < points_.size(); ++i)
{
if (status_[i])
{
pointsPrevGood_.push_back(pointsPrev_[i]);
pointsGood_.push_back(points_[i]);
}
}
}
float rmse;
int ninliers;
Mat M = estimateGlobalMotionRobust(
pointsPrevGood_, pointsGood_, motionModel_, ransacParams_, &rmse, &ninliers);
// estimate motion
return motionEstimator_->estimate(pointsPrevGood_, pointsGood_, ok);
}
#ifdef HAVE_OPENCV_GPU
KeypointBasedMotionEstimatorGpu::KeypointBasedMotionEstimatorGpu(Ptr<MotionEstimatorBase> estimator)
: ImageMotionEstimatorBase(estimator->motionModel()), motionEstimator_(estimator)
{
CV_Assert(gpu::getCudaEnabledDeviceCount() > 0);
setOutlierRejector(new NullOutlierRejector());
}
Mat KeypointBasedMotionEstimatorGpu::estimate(const Mat &frame0, const Mat &frame1, bool *ok)
{
frame0_.upload(frame0);
frame1_.upload(frame1);
return estimate(frame0_, frame1_, ok);
}
Mat KeypointBasedMotionEstimatorGpu::estimate(const gpu::GpuMat &frame0, const gpu::GpuMat &frame1, bool *ok)
{
// convert frame to gray if it's color
gpu::GpuMat grayFrame0;
if (frame0.channels() == 1)
grayFrame0 = frame0;
else
{
gpu::cvtColor(frame0, grayFrame0_, CV_BGR2GRAY);
grayFrame0 = grayFrame0_;
}
// find keypoints
detector_(grayFrame0, pointsPrev_);
// find correspondences
optFlowEstimator_.run(frame0, frame1, pointsPrev_, points_, status_);
// leave good correspondences only
gpu::compactPoints(pointsPrev_, points_, status_);
pointsPrev_.download(hostPointsPrev_);
points_.download(hostPoints_);
// perform outlier rejection
IOutlierRejector *rejector = static_cast<IOutlierRejector*>(outlierRejector_);
if (!dynamic_cast<NullOutlierRejector*>(rejector))
{
outlierRejector_->process(frame0.size(), hostPointsPrev_, hostPoints_, rejectionStatus_);
hostPointsPrevTmp_.clear();
hostPointsPrevTmp_.reserve(hostPoints_.cols);
hostPointsTmp_.clear();
hostPointsTmp_.reserve(hostPoints_.cols);
for (int i = 0; i < hostPoints_.cols; ++i)
{
if (rejectionStatus_[i])
{
hostPointsPrevTmp_.push_back(hostPointsPrev_.at<Point2f>(0,i));
hostPointsTmp_.push_back(hostPoints_.at<Point2f>(0,i));
}
}
hostPointsPrev_ = Mat(1, (int)hostPointsPrevTmp_.size(), CV_32FC2, &hostPointsPrevTmp_[0]);
hostPoints_ = Mat(1, (int)hostPointsTmp_.size(), CV_32FC2, &hostPointsTmp_[0]);
}
if (rmse > maxRmse_ || static_cast<float>(ninliers) / pointsGood_.size() < minInlierRatio_)
M = Mat::eye(3, 3, CV_32F);
// estimate motion
return motionEstimator_->estimate(hostPointsPrev_, hostPoints_, ok);
return M;
}
#endif // HAVE_OPENCV_GPU
Mat getMotion(int from, int to, const vector<Mat> &motions)
Mat getMotion(int from, int to, const Mat *motions, int size)
{
Mat M = Mat::eye(3, 3, CV_32F);
if (to > from)
{
for (int i = from; i < to; ++i)
M = at(i, motions) * M;
M = at(i, motions, size) * M;
}
else if (from > to)
{
for (int i = to; i < from; ++i)
M = at(i, motions) * M;
M = at(i, motions, size) * M;
M = M.inv();
}
return M;
}
} // namespace videostab
} // namespace cv
Mat getMotion(int from, int to, const vector<Mat> &motions)
{
return getMotion(from, to, &motions[0], (int)motions.size());
}
} // namespace videostab
} // namespace cv

@ -45,8 +45,6 @@
#include "opencv2/videostab/inpainting.hpp"
#include "opencv2/videostab/global_motion.hpp"
#include "opencv2/videostab/fast_marching.hpp"
#include "opencv2/videostab/ring_buffer.hpp"
#include "opencv2/opencv_modules.hpp"
using namespace std;
@ -71,14 +69,6 @@ void InpaintingPipeline::setFrames(const vector<Mat> &val)
}
void InpaintingPipeline::setMotionModel(MotionModel val)
{
for (size_t i = 0; i < inpainters_.size(); ++i)
inpainters_[i]->setMotionModel(val);
InpainterBase::setMotionModel(val);
}
void InpaintingPipeline::setMotions(const vector<Mat> &val)
{
for (size_t i = 0; i < inpainters_.size(); ++i)
@ -103,6 +93,14 @@ void InpaintingPipeline::setStabilizationMotions(const vector<Mat> &val)
}
void InpaintingPipeline::update()
{
for (size_t i = 0; i < inpainters_.size(); ++i)
inpainters_[i]->update();
InpainterBase::update();
}
void InpaintingPipeline::inpaint(int idx, Mat &frame, Mat &mask)
{
for (size_t i = 0; i < inpainters_.size(); ++i)
@ -130,9 +128,9 @@ void ConsistentMosaicInpainter::inpaint(int idx, Mat &frame, Mat &mask)
CV_Assert(mask.size() == frame.size() && mask.type() == CV_8U);
Mat invS = at(idx, *stabilizationMotions_).inv();
vector<Mat_<float> > vmotions(2*radius_ + 1);
vector<Mat_<float> > _motions(2*radius_ + 1);
for (int i = -radius_; i <= radius_; ++i)
vmotions[radius_ + i] = getMotion(idx, idx + i, *motions_) * invS;
_motions[radius_ + i] = getMotion(idx, idx + i, *motions_) * invS;
int n;
float mean, var;
@ -154,7 +152,7 @@ void ConsistentMosaicInpainter::inpaint(int idx, Mat &frame, Mat &mask)
for (int i = -radius_; i <= radius_; ++i)
{
const Mat_<Point3_<uchar> > &framei = at(idx + i, *frames_);
const Mat_<float> &Mi = vmotions[radius_ + i];
const Mat_<float> &Mi = _motions[radius_ + i];
int xi = cvRound(Mi(0,0)*x + Mi(0,1)*y + Mi(0,2));
int yi = cvRound(Mi(1,0)*x + Mi(1,1)*y + Mi(1,2));
if (xi >= 0 && xi < framei.cols && yi >= 0 && yi < framei.rows)
@ -339,12 +337,12 @@ MotionInpainter::MotionInpainter()
void MotionInpainter::inpaint(int idx, Mat &frame, Mat &mask)
{
priority_queue<pair<float,int> > neighbors;
vector<Mat> vmotions(2*radius_ + 1);
vector<Mat> _motions(2*radius_ + 1);
for (int i = -radius_; i <= radius_; ++i)
{
Mat motion0to1 = getMotion(idx, idx + i, *motions_) * at(idx, *stabilizationMotions_).inv();
vmotions[radius_ + i] = motion0to1;
_motions[radius_ + i] = motion0to1;
if (i != 0)
{
@ -370,36 +368,19 @@ void MotionInpainter::inpaint(int idx, Mat &frame, Mat &mask)
int neighbor = neighbors.top().second;
neighbors.pop();
Mat motion1to0 = vmotions[radius_ + neighbor - idx].inv();
// warp frame
Mat motion1to0 = _motions[radius_ + neighbor - idx].inv();
frame1_ = at(neighbor, *frames_);
if (motionModel_ != MM_HOMOGRAPHY)
warpAffine(
frame1_, transformedFrame1_, motion1to0(Rect(0,0,3,2)), frame1_.size(),
INTER_LINEAR, borderMode_);
else
warpPerspective(
frame1_, transformedFrame1_, motion1to0, frame1_.size(), INTER_LINEAR,
borderMode_);
warpAffine(
frame1_, transformedFrame1_, motion1to0(Rect(0,0,3,2)), frame1_.size(),
INTER_LINEAR, borderMode_);
cvtColor(transformedFrame1_, transformedGrayFrame1_, CV_BGR2GRAY);
// warp mask
if (motionModel_ != MM_HOMOGRAPHY)
warpAffine(
mask1_, transformedMask1_, motion1to0(Rect(0,0,3,2)), mask1_.size(),
INTER_NEAREST);
else
warpPerspective(mask1_, transformedMask1_, motion1to0, mask1_.size(), INTER_NEAREST);
warpAffine(
mask1_, transformedMask1_, motion1to0(Rect(0,0,3,2)), mask1_.size(),
INTER_NEAREST);
erode(transformedMask1_, transformedMask1_, Mat());
// update flow
optFlowEstimator_->run(grayFrame_, transformedGrayFrame1_, flowX_, flowY_, flowErrors_);
calcFlowMask(
@ -521,6 +502,7 @@ void completeFrameAccordingToFlow(
Mat_<uchar> flowMask_(flowMask), mask1_(mask1), mask0_(mask0);
Mat_<float> flowX_(flowX), flowY_(flowY);
//int count = 0;
for (int y0 = 0; y0 < frame0.rows; ++y0)
{
for (int x0 = 0; x0 < frame0.cols; ++x0)
@ -535,10 +517,12 @@ void completeFrameAccordingToFlow(
{
frame0.at<Point3_<uchar> >(y0,x0) = frame1.at<Point3_<uchar> >(y1,x1);
mask0_(y0,x0) = 255;
//count++;
}
}
}
}
//cout << count << endl;
}
} // namespace videostab

@ -43,8 +43,6 @@
#include "precomp.hpp"
#include "opencv2/videostab/motion_stabilizing.hpp"
#include "opencv2/videostab/global_motion.hpp"
#include "opencv2/videostab/ring_buffer.hpp"
#include "clp.hpp"
using namespace std;
@ -53,520 +51,37 @@ namespace cv
namespace videostab
{
void MotionStabilizationPipeline::stabilize(
int size, const vector<Mat> &motions, pair<int,int> range, Mat *stabilizationMotions)
{
vector<Mat> updatedMotions(motions.size());
for (size_t i = 0; i < motions.size(); ++i)
updatedMotions[i] = motions[i].clone();
vector<Mat> stabilizationMotions_(size);
for (int i = 0; i < size; ++i)
stabilizationMotions[i] = Mat::eye(3, 3, CV_32F);
for (size_t i = 0; i < stabilizers_.size(); ++i)
{
stabilizers_[i]->stabilize(size, updatedMotions, range, &stabilizationMotions_[0]);
for (int k = 0; k < size; ++k)
stabilizationMotions[k] = stabilizationMotions_[k] * stabilizationMotions[k];
for (int j = 0; j + 1 < size; ++j)
{
Mat S0 = stabilizationMotions[j];
Mat S1 = stabilizationMotions[j+1];
at(j, updatedMotions) = S1 * at(j, updatedMotions) * S0.inv();
}
}
}
void MotionFilterBase::stabilize(
int size, const vector<Mat> &motions, pair<int,int> range, Mat *stabilizationMotions)
void MotionFilterBase::stabilize(const Mat *motions, int size, Mat *stabilizationMotions) const
{
for (int i = 0; i < size; ++i)
stabilizationMotions[i] = stabilize(i, motions, range);
stabilizationMotions[i] = stabilize(i, motions, size);
}
void GaussianMotionFilter::setParams(int _radius, float _stdev)
void GaussianMotionFilter::update()
{
radius_ = _radius;
stdev_ = _stdev > 0.f ? _stdev : sqrt(static_cast<float>(_radius));
float sigma = stdev_ > 0.f ? stdev_ : sqrt(static_cast<float>(radius_));
float sum = 0;
weight_.resize(2*radius_ + 1);
for (int i = -radius_; i <= radius_; ++i)
sum += weight_[radius_ + i] = std::exp(-i*i/(stdev_*stdev_));
sum += weight_[radius_ + i] = std::exp(-i*i/(sigma*sigma));
for (int i = -radius_; i <= radius_; ++i)
weight_[radius_ + i] /= sum;
}
Mat GaussianMotionFilter::stabilize(int idx, const vector<Mat> &motions, pair<int,int> range)
Mat GaussianMotionFilter::stabilize(int index, const Mat *motions, int size) const
{
const Mat &cur = at(idx, motions);
const Mat &cur = at(index, motions, size);
Mat res = Mat::zeros(cur.size(), cur.type());
float sum = 0.f;
int iMin = max(idx - radius_, range.first);
int iMax = min(idx + radius_, range.second);
for (int i = iMin; i <= iMax; ++i)
for (int i = std::max(index - radius_, 0); i <= index + radius_; ++i)
{
res += weight_[radius_ + i - idx] * getMotion(idx, i, motions);
sum += weight_[radius_ + i - idx];
res += weight_[radius_ + i - index] * getMotion(index, i, motions, size);
sum += weight_[radius_ + i - index];
}
return sum > 0.f ? res / sum : Mat::eye(cur.size(), cur.type());
}
LpMotionStabilizer::LpMotionStabilizer(MotionModel model)
{
setMotionModel(model);
setFrameSize(Size(0,0));
setTrimRatio(0.1f);
setWeight1(1);
setWeight2(10);
setWeight3(100);
setWeight4(100);
}
#ifndef HAVE_CLP
void LpMotionStabilizer::stabilize(int, const vector<Mat>&, pair<int,int>, Mat*)
{
CV_Error(CV_StsError, "The library is built without Clp support");
}
#else
void LpMotionStabilizer::stabilize(
int size, const vector<Mat> &motions, pair<int,int> /*range*/, Mat *stabilizationMotions)
{
CV_Assert(model_ <= MM_AFFINE);
int N = size;
const vector<Mat> &M = motions;
Mat *S = stabilizationMotions;
double w = frameSize_.width, h = frameSize_.height;
double tw = w * trimRatio_, th = h * trimRatio_;
int ncols = 4*N + 6*(N-1) + 6*(N-2) + 6*(N-3);
int nrows = 8*N + 2*6*(N-1) + 2*6*(N-2) + 2*6*(N-3);
rows_.clear();
cols_.clear();
elems_.clear();
obj_.assign(ncols, 0);
collb_.assign(ncols, -INF);
colub_.assign(ncols, INF);
int c = 4*N;
// for each slack variable e[t] (error bound)
for (int t = 0; t < N-1; ++t, c += 6)
{
// e[t](0,0)
obj_[c] = w4_*w1_;
collb_[c] = 0;
// e[t](0,1)
obj_[c+1] = w4_*w1_;
collb_[c+1] = 0;
// e[t](0,2)
obj_[c+2] = w1_;
collb_[c+2] = 0;
// e[t](1,0)
obj_[c+3] = w4_*w1_;
collb_[c+3] = 0;
// e[t](1,1)
obj_[c+4] = w4_*w1_;
collb_[c+4] = 0;
// e[t](1,2)
obj_[c+5] = w1_;
collb_[c+5] = 0;
}
for (int t = 0; t < N-2; ++t, c += 6)
{
// e[t](0,0)
obj_[c] = w4_*w2_;
collb_[c] = 0;
// e[t](0,1)
obj_[c+1] = w4_*w2_;
collb_[c+1] = 0;
// e[t](0,2)
obj_[c+2] = w2_;
collb_[c+2] = 0;
// e[t](1,0)
obj_[c+3] = w4_*w2_;
collb_[c+3] = 0;
// e[t](1,1)
obj_[c+4] = w4_*w2_;
collb_[c+4] = 0;
// e[t](1,2)
obj_[c+5] = w2_;
collb_[c+5] = 0;
}
for (int t = 0; t < N-3; ++t, c += 6)
{
// e[t](0,0)
obj_[c] = w4_*w3_;
collb_[c] = 0;
// e[t](0,1)
obj_[c+1] = w4_*w3_;
collb_[c+1] = 0;
// e[t](0,2)
obj_[c+2] = w3_;
collb_[c+2] = 0;
// e[t](1,0)
obj_[c+3] = w4_*w3_;
collb_[c+3] = 0;
// e[t](1,1)
obj_[c+4] = w4_*w3_;
collb_[c+4] = 0;
// e[t](1,2)
obj_[c+5] = w3_;
collb_[c+5] = 0;
}
elems_.clear();
rowlb_.assign(nrows, -INF);
rowub_.assign(nrows, INF);
int r = 0;
// frame corners
const Point2d pt[] = {Point2d(0,0), Point2d(w,0), Point2d(w,h), Point2d(0,h)};
// for each frame
for (int t = 0; t < N; ++t)
{
c = 4*t;
// for each frame corner
for (int i = 0; i < 4; ++i, r += 2)
{
set(r, c, pt[i].x); set(r, c+1, pt[i].y); set(r, c+2, 1);
set(r+1, c, pt[i].y); set(r+1, c+1, -pt[i].x); set(r+1, c+3, 1);
rowlb_[r] = pt[i].x-tw;
rowub_[r] = pt[i].x+tw;
rowlb_[r+1] = pt[i].y-th;
rowub_[r+1] = pt[i].y+th;
}
}
// for each S[t+1]M[t] - S[t] - e[t] <= 0 condition
for (int t = 0; t < N-1; ++t, r += 6)
{
Mat_<float> M0 = at(t,M);
c = 4*t;
set(r, c, -1);
set(r+1, c+1, -1);
set(r+2, c+2, -1);
set(r+3, c+1, 1);
set(r+4, c, -1);
set(r+5, c+3, -1);
c = 4*(t+1);
set(r, c, M0(0,0)); set(r, c+1, M0(1,0));
set(r+1, c, M0(0,1)); set(r+1, c+1, M0(1,1));
set(r+2, c, M0(0,2)); set(r+2, c+1, M0(1,2)); set(r+2, c+2, 1);
set(r+3, c, M0(1,0)); set(r+3, c+1, -M0(0,0));
set(r+4, c, M0(1,1)); set(r+4, c+1, -M0(0,1));
set(r+5, c, M0(1,2)); set(r+5, c+1, -M0(0,2)); set(r+5, c+3, 1);
c = 4*N + 6*t;
for (int i = 0; i < 6; ++i)
set(r+i, c+i, -1);
rowub_[r] = 0;
rowub_[r+1] = 0;
rowub_[r+2] = 0;
rowub_[r+3] = 0;
rowub_[r+4] = 0;
rowub_[r+5] = 0;
}
// for each 0 <= S[t+1]M[t] - S[t] + e[t] condition
for (int t = 0; t < N-1; ++t, r += 6)
{
Mat_<float> M0 = at(t,M);
c = 4*t;
set(r, c, -1);
set(r+1, c+1, -1);
set(r+2, c+2, -1);
set(r+3, c+1, 1);
set(r+4, c, -1);
set(r+5, c+3, -1);
c = 4*(t+1);
set(r, c, M0(0,0)); set(r, c+1, M0(1,0));
set(r+1, c, M0(0,1)); set(r+1, c+1, M0(1,1));
set(r+2, c, M0(0,2)); set(r+2, c+1, M0(1,2)); set(r+2, c+2, 1);
set(r+3, c, M0(1,0)); set(r+3, c+1, -M0(0,0));
set(r+4, c, M0(1,1)); set(r+4, c+1, -M0(0,1));
set(r+5, c, M0(1,2)); set(r+5, c+1, -M0(0,2)); set(r+5, c+3, 1);
c = 4*N + 6*t;
for (int i = 0; i < 6; ++i)
set(r+i, c+i, 1);
rowlb_[r] = 0;
rowlb_[r+1] = 0;
rowlb_[r+2] = 0;
rowlb_[r+3] = 0;
rowlb_[r+4] = 0;
rowlb_[r+5] = 0;
}
// for each S[t+2]M[t+1] - S[t+1]*(I+M[t]) + S[t] - e[t] <= 0 condition
for (int t = 0; t < N-2; ++t, r += 6)
{
Mat_<float> M0 = at(t,M), M1 = at(t+1,M);
c = 4*t;
set(r, c, 1);
set(r+1, c+1, 1);
set(r+2, c+2, 1);
set(r+3, c+1, -1);
set(r+4, c, 1);
set(r+5, c+3, 1);
c = 4*(t+1);
set(r, c, -M0(0,0)-1); set(r, c+1, -M0(1,0));
set(r+1, c, -M0(0,1)); set(r+1, c+1, -M0(1,1)-1);
set(r+2, c, -M0(0,2)); set(r+2, c+1, -M0(1,2)); set(r+2, c+2, -2);
set(r+3, c, -M0(1,0)); set(r+3, c+1, M0(0,0)+1);
set(r+4, c, -M0(1,1)-1); set(r+4, c+1, M0(0,1));
set(r+5, c, -M0(1,2)); set(r+5, c+1, M0(0,2)); set(r+5, c+3, -2);
c = 4*(t+2);
set(r, c, M1(0,0)); set(r, c+1, M1(1,0));
set(r+1, c, M1(0,1)); set(r+1, c+1, M1(1,1));
set(r+2, c, M1(0,2)); set(r+2, c+1, M1(1,2)); set(r+2, c+2, 1);
set(r+3, c, M1(1,0)); set(r+3, c+1, -M1(0,0));
set(r+4, c, M1(1,1)); set(r+4, c+1, -M1(0,1));
set(r+5, c, M1(1,2)); set(r+5, c+1, -M1(0,2)); set(r+5, c+3, 1);
c = 4*N + 6*(N-1) + 6*t;
for (int i = 0; i < 6; ++i)
set(r+i, c+i, -1);
rowub_[r] = 0;
rowub_[r+1] = 0;
rowub_[r+2] = 0;
rowub_[r+3] = 0;
rowub_[r+4] = 0;
rowub_[r+5] = 0;
}
// for each 0 <= S[t+2]M[t+1]] - S[t+1]*(I+M[t]) + S[t] + e[t] condition
for (int t = 0; t < N-2; ++t, r += 6)
{
Mat_<float> M0 = at(t,M), M1 = at(t+1,M);
c = 4*t;
set(r, c, 1);
set(r+1, c+1, 1);
set(r+2, c+2, 1);
set(r+3, c+1, -1);
set(r+4, c, 1);
set(r+5, c+3, 1);
c = 4*(t+1);
set(r, c, -M0(0,0)-1); set(r, c+1, -M0(1,0));
set(r+1, c, -M0(0,1)); set(r+1, c+1, -M0(1,1)-1);
set(r+2, c, -M0(0,2)); set(r+2, c+1, -M0(1,2)); set(r+2, c+2, -2);
set(r+3, c, -M0(1,0)); set(r+3, c+1, M0(0,0)+1);
set(r+4, c, -M0(1,1)-1); set(r+4, c+1, M0(0,1));
set(r+5, c, -M0(1,2)); set(r+5, c+1, M0(0,2)); set(r+5, c+3, -2);
c = 4*(t+2);
set(r, c, M1(0,0)); set(r, c+1, M1(1,0));
set(r+1, c, M1(0,1)); set(r+1, c+1, M1(1,1));
set(r+2, c, M1(0,2)); set(r+2, c+1, M1(1,2)); set(r+2, c+2, 1);
set(r+3, c, M1(1,0)); set(r+3, c+1, -M1(0,0));
set(r+4, c, M1(1,1)); set(r+4, c+1, -M1(0,1));
set(r+5, c, M1(1,2)); set(r+5, c+1, -M1(0,2)); set(r+5, c+3, 1);
c = 4*N + 6*(N-1) + 6*t;
for (int i = 0; i < 6; ++i)
set(r+i, c+i, 1);
rowlb_[r] = 0;
rowlb_[r+1] = 0;
rowlb_[r+2] = 0;
rowlb_[r+3] = 0;
rowlb_[r+4] = 0;
rowlb_[r+5] = 0;
}
// for each S[t+3]M[t+2] - S[t+2]*(I+2M[t+1]) + S[t+1]*(2*I+M[t]) - S[t] - e[t] <= 0 condition
for (int t = 0; t < N-3; ++t, r += 6)
{
Mat_<float> M0 = at(t,M), M1 = at(t+1,M), M2 = at(t+2,M);
c = 4*t;
set(r, c, -1);
set(r+1, c+1, -1);
set(r+2, c+2, -1);
set(r+3, c+1, 1);
set(r+4, c, -1);
set(r+5, c+3, -1);
c = 4*(t+1);
set(r, c, M0(0,0)+2); set(r, c+1, M0(1,0));
set(r+1, c, M0(0,1)); set(r+1, c+1, M0(1,1)+2);
set(r+2, c, M0(0,2)); set(r+2, c+1, M0(1,2)); set(r+2, c+2, 3);
set(r+3, c, M0(1,0)); set(r+3, c+1, -M0(0,0)-2);
set(r+4, c, M0(1,1)+2); set(r+4, c+1, -M0(0,1));
set(r+5, c, M0(1,2)); set(r+5, c+1, -M0(0,2)); set(r+5, c+3, 3);
c = 4*(t+2);
set(r, c, -2*M1(0,0)-1); set(r, c+1, -2*M1(1,0));
set(r+1, c, -2*M1(0,1)); set(r+1, c+1, -2*M1(1,1)-1);
set(r+2, c, -2*M1(0,2)); set(r+2, c+1, -2*M1(1,2)); set(r+2, c+2, -3);
set(r+3, c, -2*M1(1,0)); set(r+3, c+1, 2*M1(0,0)+1);
set(r+4, c, -2*M1(1,1)-1); set(r+4, c+1, 2*M1(0,1));
set(r+5, c, -2*M1(1,2)); set(r+5, c+1, 2*M1(0,2)); set(r+5, c+3, -3);
c = 4*(t+3);
set(r, c, M2(0,0)); set(r, c+1, M2(1,0));
set(r+1, c, M2(0,1)); set(r+1, c+1, M2(1,1));
set(r+2, c, M2(0,2)); set(r+2, c+1, M2(1,2)); set(r+2, c+2, 1);
set(r+3, c, M2(1,0)); set(r+3, c+1, -M2(0,0));
set(r+4, c, M2(1,1)); set(r+4, c+1, -M2(0,1));
set(r+5, c, M2(1,2)); set(r+5, c+1, -M2(0,2)); set(r+5, c+3, 1);
c = 4*N + 6*(N-1) + 6*(N-2) + 6*t;
for (int i = 0; i < 6; ++i)
set(r+i, c+i, -1);
rowub_[r] = 0;
rowub_[r+1] = 0;
rowub_[r+2] = 0;
rowub_[r+3] = 0;
rowub_[r+4] = 0;
rowub_[r+5] = 0;
}
// for each 0 <= S[t+3]M[t+2] - S[t+2]*(I+2M[t+1]) + S[t+1]*(2*I+M[t]) + e[t] condition
for (int t = 0; t < N-3; ++t, r += 6)
{
Mat_<float> M0 = at(t,M), M1 = at(t+1,M), M2 = at(t+2,M);
c = 4*t;
set(r, c, -1);
set(r+1, c+1, -1);
set(r+2, c+2, -1);
set(r+3, c+1, 1);
set(r+4, c, -1);
set(r+5, c+3, -1);
c = 4*(t+1);
set(r, c, M0(0,0)+2); set(r, c+1, M0(1,0));
set(r+1, c, M0(0,1)); set(r+1, c+1, M0(1,1)+2);
set(r+2, c, M0(0,2)); set(r+2, c+1, M0(1,2)); set(r+2, c+2, 3);
set(r+3, c, M0(1,0)); set(r+3, c+1, -M0(0,0)-2);
set(r+4, c, M0(1,1)+2); set(r+4, c+1, -M0(0,1));
set(r+5, c, M0(1,2)); set(r+5, c+1, -M0(0,2)); set(r+5, c+3, 3);
c = 4*(t+2);
set(r, c, -2*M1(0,0)-1); set(r, c+1, -2*M1(1,0));
set(r+1, c, -2*M1(0,1)); set(r+1, c+1, -2*M1(1,1)-1);
set(r+2, c, -2*M1(0,2)); set(r+2, c+1, -2*M1(1,2)); set(r+2, c+2, -3);
set(r+3, c, -2*M1(1,0)); set(r+3, c+1, 2*M1(0,0)+1);
set(r+4, c, -2*M1(1,1)-1); set(r+4, c+1, 2*M1(0,1));
set(r+5, c, -2*M1(1,2)); set(r+5, c+1, 2*M1(0,2)); set(r+5, c+3, -3);
c = 4*(t+3);
set(r, c, M2(0,0)); set(r, c+1, M2(1,0));
set(r+1, c, M2(0,1)); set(r+1, c+1, M2(1,1));
set(r+2, c, M2(0,2)); set(r+2, c+1, M2(1,2)); set(r+2, c+2, 1);
set(r+3, c, M2(1,0)); set(r+3, c+1, -M2(0,0));
set(r+4, c, M2(1,1)); set(r+4, c+1, -M2(0,1));
set(r+5, c, M2(1,2)); set(r+5, c+1, -M2(0,2)); set(r+5, c+3, 1);
c = 4*N + 6*(N-1) + 6*(N-2) + 6*t;
for (int i = 0; i < 6; ++i)
set(r+i, c+i, 1);
rowlb_[r] = 0;
rowlb_[r+1] = 0;
rowlb_[r+2] = 0;
rowlb_[r+3] = 0;
rowlb_[r+4] = 0;
rowlb_[r+5] = 0;
}
// solve
CoinPackedMatrix A(true, &rows_[0], &cols_[0], &elems_[0], elems_.size());
A.setDimensions(nrows, ncols);
ClpSimplex model(false);
model.loadProblem(A, &collb_[0], &colub_[0], &obj_[0], &rowlb_[0], &rowub_[0]);
ClpDualRowSteepest dualSteep(1);
model.setDualRowPivotAlgorithm(dualSteep);
ClpPrimalColumnSteepest primalSteep(1);
model.setPrimalColumnPivotAlgorithm(primalSteep);
model.scaling(1);
ClpPresolve presolveInfo;
Ptr<ClpSimplex> presolvedModel = presolveInfo.presolvedModel(model);
if (!presolvedModel.empty())
{
presolvedModel->dual();
presolveInfo.postsolve(true);
model.checkSolution();
model.primal(1);
}
else
{
model.dual();
model.checkSolution();
model.primal(1);
}
// save results
const double *sol = model.getColSolution();
c = 0;
for (int t = 0; t < N; ++t, c += 4)
{
Mat_<float> S0 = Mat::eye(3, 3, CV_32F);
S0(1,1) = S0(0,0) = sol[c];
S0(0,1) = sol[c+1];
S0(1,0) = -sol[c+1];
S0(0,2) = sol[c+2];
S0(1,2) = sol[c+3];
S[t] = S0;
}
return res / sum;
}
#endif // #ifndef HAVE_CLP
static inline int areaSign(Point2f a, Point2f b, Point2f c)
@ -604,15 +119,11 @@ static inline bool isGoodMotion(const float M[], float w, float h, float dx, flo
{
Point2f pt[4] = {Point2f(0,0), Point2f(w,0), Point2f(w,h), Point2f(0,h)};
Point2f Mpt[4];
float z;
for (int i = 0; i < 4; ++i)
{
Mpt[i].x = M[0]*pt[i].x + M[1]*pt[i].y + M[2];
Mpt[i].y = M[3]*pt[i].x + M[4]*pt[i].y + M[5];
z = M[6]*pt[i].x + M[7]*pt[i].y + M[8];
Mpt[i].x /= z;
Mpt[i].y /= z;
}
pt[0] = Point2f(dx, dy);
@ -632,9 +143,6 @@ static inline void relaxMotion(const float M[], float t, float res[])
res[3] = M[3]*(1.f-t);
res[4] = M[4]*(1.f-t) + t;
res[5] = M[5]*(1.f-t);
res[6] = M[6]*(1.f-t);
res[7] = M[7]*(1.f-t);
res[8] = M[8]*(1.f-t) + t;
}
@ -646,12 +154,11 @@ Mat ensureInclusionConstraint(const Mat &M, Size size, float trimRatio)
const float h = static_cast<float>(size.height);
const float dx = floor(w * trimRatio);
const float dy = floor(h * trimRatio);
const float srcM[] =
const float srcM[6] =
{M.at<float>(0,0), M.at<float>(0,1), M.at<float>(0,2),
M.at<float>(1,0), M.at<float>(1,1), M.at<float>(1,2),
M.at<float>(2,0), M.at<float>(2,1), M.at<float>(2,2)};
M.at<float>(1,0), M.at<float>(1,1), M.at<float>(1,2)};
float curM[9];
float curM[6];
float t = 0;
relaxMotion(srcM, t, curM);
if (isGoodMotion(curM, w, h, dx, dy))
@ -666,6 +173,8 @@ Mat ensureInclusionConstraint(const Mat &M, Size size, float trimRatio)
r = t;
else
l = t;
t = r;
relaxMotion(srcM, r, curM);
}
return (1 - r) * M + r * Mat::eye(3, 3, CV_32F);
@ -683,15 +192,11 @@ float estimateOptimalTrimRatio(const Mat &M, Size size)
Point2f pt[4] = {Point2f(0,0), Point2f(w,0), Point2f(w,h), Point2f(0,h)};
Point2f Mpt[4];
float z;
for (int i = 0; i < 4; ++i)
{
Mpt[i].x = M_(0,0)*pt[i].x + M_(0,1)*pt[i].y + M_(0,2);
Mpt[i].y = M_(1,0)*pt[i].x + M_(1,1)*pt[i].y + M_(1,2);
z = M_(2,0)*pt[i].x + M_(2,1)*pt[i].y + M_(2,2);
Mpt[i].x /= z;
Mpt[i].y /= z;
}
float l = 0, r = 0.5f;

@ -43,7 +43,6 @@
#include "precomp.hpp"
#include "opencv2/video/video.hpp"
#include "opencv2/videostab/optical_flow.hpp"
#include "opencv2/videostab/ring_buffer.hpp"
using namespace std;
@ -61,53 +60,6 @@ void SparsePyrLkOptFlowEstimator::run(
#ifdef HAVE_OPENCV_GPU
SparsePyrLkOptFlowEstimatorGpu::SparsePyrLkOptFlowEstimatorGpu()
{
CV_Assert(gpu::getCudaEnabledDeviceCount() > 0);
}
void SparsePyrLkOptFlowEstimatorGpu::run(
InputArray frame0, InputArray frame1, InputArray points0, InputOutputArray points1,
OutputArray status, OutputArray errors)
{
frame0_.upload(frame0.getMat());
frame1_.upload(frame1.getMat());
points0_.upload(points0.getMat());
if (errors.needed())
{
run(frame0_, frame1_, points0_, points1_, status_, errors_);
errors_.download(errors.getMatRef());
}
else
run(frame0_, frame1_, points0_, points1_, status_);
points1_.download(points1.getMatRef());
status_.download(status.getMatRef());
}
void SparsePyrLkOptFlowEstimatorGpu::run(
const gpu::GpuMat &frame0, const gpu::GpuMat &frame1, const gpu::GpuMat &points0,
gpu::GpuMat &points1, gpu::GpuMat &status, gpu::GpuMat &errors)
{
optFlowEstimator_.winSize = winSize_;
optFlowEstimator_.maxLevel = maxLevel_;
optFlowEstimator_.sparse(frame0, frame1, points0, points1, status, &errors);
}
void SparsePyrLkOptFlowEstimatorGpu::run(
const gpu::GpuMat &frame0, const gpu::GpuMat &frame1, const gpu::GpuMat &points0,
gpu::GpuMat &points1, gpu::GpuMat &status)
{
optFlowEstimator_.winSize = winSize_;
optFlowEstimator_.maxLevel = maxLevel_;
optFlowEstimator_.sparse(frame0, frame1, points0, points1, status);
}
DensePyrLkOptFlowEstimatorGpu::DensePyrLkOptFlowEstimatorGpu()
{
CV_Assert(gpu::getCudaEnabledDeviceCount() > 0);
@ -123,7 +75,6 @@ void DensePyrLkOptFlowEstimatorGpu::run(
optFlowEstimator_.winSize = winSize_;
optFlowEstimator_.maxLevel = maxLevel_;
if (errors.needed())
{
optFlowEstimator_.dense(frame0_, frame1_, flowX_, flowY_, &errors_);
@ -135,7 +86,8 @@ void DensePyrLkOptFlowEstimatorGpu::run(
flowX_.download(flowX.getMatRef());
flowY_.download(flowY.getMatRef());
}
#endif // HAVE_OPENCV_GPU
#endif
} // namespace videostab
} // namespace cv

@ -1,201 +0,0 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2000-2008, Intel Corporation, all rights reserved.
// Copyright (C) 2009-2011, Willow Garage Inc., all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "precomp.hpp"
#include "opencv2/core/core.hpp"
#include "opencv2/videostab/outlier_rejection.hpp"
using namespace std;
namespace cv
{
namespace videostab
{
void NullOutlierRejector::process(
Size /*frameSize*/, InputArray points0, InputArray points1, OutputArray mask)
{
CV_Assert(points0.type() == points1.type());
CV_Assert(points0.getMat().checkVector(2) == points1.getMat().checkVector(2));
int npoints = points0.getMat().checkVector(2);
mask.create(1, npoints, CV_8U);
Mat mask_ = mask.getMat();
mask_.setTo(1);
}
TranslationBasedLocalOutlierRejector::TranslationBasedLocalOutlierRejector()
{
setCellSize(Size(50, 50));
setRansacParams(RansacParams::default2dMotion(MM_TRANSLATION));
}
void TranslationBasedLocalOutlierRejector::process(
Size frameSize, InputArray points0, InputArray points1, OutputArray mask)
{
CV_Assert(points0.type() == points1.type());
CV_Assert(points0.getMat().checkVector(2) == points1.getMat().checkVector(2));
int npoints = points0.getMat().checkVector(2);
const Point2f* points0_ = points0.getMat().ptr<Point2f>();
const Point2f* points1_ = points1.getMat().ptr<Point2f>();
mask.create(1, npoints, CV_8U);
uchar* mask_ = mask.getMat().ptr<uchar>();
Size ncells((frameSize.width + cellSize_.width - 1) / cellSize_.width,
(frameSize.height + cellSize_.height - 1) / cellSize_.height);
int cx, cy;
// fill grid cells
grid_.assign(ncells.area(), Cell());
for (int i = 0; i < npoints; ++i)
{
cx = std::min(cvRound(points0_[i].x / cellSize_.width), ncells.width - 1);
cy = std::min(cvRound(points0_[i].y / cellSize_.height), ncells.height - 1);
grid_[cy * ncells.width + cx].push_back(i);
}
// process each cell
RNG rng(0);
int niters = ransacParams_.niters();
int ninliers, ninliersMax;
vector<int> inliers;
float dx, dy, dxBest, dyBest;
float x1, y1;
int idx;
for (size_t ci = 0; ci < grid_.size(); ++ci)
{
// estimate translation model at the current cell using RANSAC
const Cell &cell = grid_[ci];
ninliersMax = 0;
dxBest = dyBest = 0.f;
// find the best hypothesis
if (!cell.empty())
{
for (int iter = 0; iter < niters; ++iter)
{
idx = cell[static_cast<unsigned>(rng) % cell.size()];
dx = points1_[idx].x - points0_[idx].x;
dy = points1_[idx].y - points0_[idx].y;
ninliers = 0;
for (size_t i = 0; i < cell.size(); ++i)
{
x1 = points0_[cell[i]].x + dx;
y1 = points0_[cell[i]].y + dy;
if (sqr(x1 - points1_[cell[i]].x) + sqr(y1 - points1_[cell[i]].y) <
sqr(ransacParams_.thresh))
{
ninliers++;
}
}
if (ninliers > ninliersMax)
{
ninliersMax = ninliers;
dxBest = dx;
dyBest = dy;
}
}
}
// get the best hypothesis inliers
ninliers = 0;
inliers.resize(ninliersMax);
for (size_t i = 0; i < cell.size(); ++i)
{
x1 = points0_[cell[i]].x + dxBest;
y1 = points0_[cell[i]].y + dyBest;
if (sqr(x1 - points1_[cell[i]].x) + sqr(y1 - points1_[cell[i]].y) <
sqr(ransacParams_.thresh))
{
inliers[ninliers++] = cell[i];
}
}
// refine the best hypothesis
dxBest = dyBest = 0.f;
for (size_t i = 0; i < inliers.size(); ++i)
{
dxBest += points1_[inliers[i]].x - points0_[inliers[i]].x;
dyBest += points1_[inliers[i]].y - points0_[inliers[i]].y;
}
if (!inliers.empty())
{
dxBest /= inliers.size();
dyBest /= inliers.size();
}
// set mask elements for refined model inliers
for (size_t i = 0; i < cell.size(); ++i)
{
x1 = points0_[cell[i]].x + dxBest;
y1 = points0_[cell[i]].y + dyBest;
if (sqr(x1 - points1_[cell[i]].x) + sqr(y1 - points1_[cell[i]].y) <
sqr(ransacParams_.thresh))
{
mask_[cell[i]] = 1;
}
else
{
mask_[cell[i]] = 0;
}
}
}
}
} // namespace videostab
} // namespace cv

@ -44,20 +44,15 @@
#define __OPENCV_PRECOMP_HPP__
#ifdef HAVE_CVCONFIG_H
#include "cvconfig.h"
#include "cvconfig.h"
#endif
#include <stdexcept>
#include <iostream>
#include <ctime>
#include <algorithm>
#include "opencv2/core/core.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/video/video.hpp"
#include "opencv2/features2d/features2d.hpp"
#include "opencv2/calib3d/calib3d.hpp"
// some aux. functions
inline float sqr(float x) { return x * x; }
@ -66,5 +61,25 @@ inline float intensity(const cv::Point3_<uchar> &bgr)
return 0.3f*bgr.x + 0.59f*bgr.y + 0.11f*bgr.z;
}
template <typename T> inline T& at(int index, T *items, int size)
{
return items[cv::borderInterpolate(index, size, cv::BORDER_WRAP)];
}
template <typename T> inline const T& at(int index, const T *items, int size)
{
return items[cv::borderInterpolate(index, size, cv::BORDER_WRAP)];
}
template <typename T> inline T& at(int index, std::vector<T> &items)
{
return at(index, &items[0], static_cast<int>(items.size()));
}
template <typename T> inline const T& at(int index, const std::vector<T> &items)
{
return items[cv::borderInterpolate(index, static_cast<int>(items.size()), cv::BORDER_WRAP)];
}
#endif

@ -42,10 +42,6 @@
#include "precomp.hpp"
#include "opencv2/videostab/stabilizer.hpp"
#include "opencv2/videostab/ring_buffer.hpp"
// for debug purposes
#define SAVE_MOTIONS 0
using namespace std;
@ -56,9 +52,9 @@ namespace videostab
StabilizerBase::StabilizerBase()
{
setLog(new LogToStdout());
setLog(new NullLog());
setFrameSource(new NullFrameSource());
setMotionEstimator(new KeypointBasedMotionEstimator(new MotionEstimatorRansacL2()));
setMotionEstimator(new PyrLkRobustMotionEstimator());
setDeblurer(new NullDeblurer());
setInpainter(new NullInpainter());
setRadius(15);
@ -68,47 +64,55 @@ StabilizerBase::StabilizerBase()
}
void StabilizerBase::reset()
void StabilizerBase::setUp(int cacheSize, const Mat &frame)
{
frameSize_ = Size(0, 0);
frameMask_ = Mat();
curPos_ = -1;
curStabilizedPos_ = -1;
doDeblurring_ = false;
preProcessedFrame_ = Mat();
doInpainting_ = false;
inpaintingMask_ = Mat();
frames_.clear();
motions_.clear();
blurrinessRates_.clear();
stabilizedFrames_.clear();
stabilizedMasks_.clear();
stabilizationMotions_.clear();
processingStartTime_ = 0;
InpainterBase *_inpainter = static_cast<InpainterBase*>(inpainter_);
doInpainting_ = dynamic_cast<NullInpainter*>(_inpainter) == 0;
if (doInpainting_)
{
inpainter_->setRadius(radius_);
inpainter_->setFrames(frames_);
inpainter_->setMotions(motions_);
inpainter_->setStabilizedFrames(stabilizedFrames_);
inpainter_->setStabilizationMotions(stabilizationMotions_);
inpainter_->update();
}
DeblurerBase *deblurer = static_cast<DeblurerBase*>(deblurer_);
doDeblurring_ = dynamic_cast<NullDeblurer*>(deblurer) == 0;
if (doDeblurring_)
{
blurrinessRates_.resize(cacheSize);
float blurriness = calcBlurriness(frame);
for (int i = -radius_; i <= 0; ++i)
at(i, blurrinessRates_) = blurriness;
deblurer_->setRadius(radius_);
deblurer_->setFrames(frames_);
deblurer_->setMotions(motions_);
deblurer_->setBlurrinessRates(blurrinessRates_);
deblurer_->update();
}
log_->print("processing frames");
}
Mat StabilizerBase::nextStabilizedFrame()
{
// check if we've processed all frames already
if (curStabilizedPos_ == curPos_ && curStabilizedPos_ != -1)
{
logProcessingTime();
return Mat();
}
return Mat(); // we've processed all frames already
bool processed;
do processed = doOneIteration();
while (processed && curStabilizedPos_ == -1);
// check if the frame source is empty
if (curStabilizedPos_ == -1)
{
logProcessingTime();
return Mat();
}
return Mat(); // frame source is empty
return postProcessFrame(at(curStabilizedPos_, stabilizedFrames_));
const Mat &stabilizedFrame = at(curStabilizedPos_, stabilizedFrames_);
int dx = static_cast<int>(floor(trimRatio_ * stabilizedFrame.cols));
int dy = static_cast<int>(floor(trimRatio_ * stabilizedFrame.rows));
return stabilizedFrame(Rect(dx, dy, stabilizedFrame.cols - 2*dx, stabilizedFrame.rows - 2*dy));
}
@ -126,7 +130,7 @@ bool StabilizerBase::doOneIteration()
if (doDeblurring_)
at(curPos_, blurrinessRates_) = calcBlurriness(frame);
at(curPos_ - 1, motions_) = estimateMotion();
estimateMotion();
if (curPos_ >= radius_)
{
@ -145,7 +149,7 @@ bool StabilizerBase::doOneIteration()
{
curStabilizedPos_++;
at(curStabilizedPos_ + radius_, frames_) = at(curPos_, frames_);
at(curStabilizedPos_ + radius_ - 1, motions_) = Mat::eye(3, 3, CV_32F);
at(curStabilizedPos_ + radius_ - 1, motions_) = at(curPos_ - 1, motions_);
stabilizeFrame();
log_->print(".");
@ -156,44 +160,15 @@ bool StabilizerBase::doOneIteration()
}
void StabilizerBase::setUp(const Mat &firstFrame)
void StabilizerBase::stabilizeFrame(const Mat &stabilizationMotion)
{
InpainterBase *inpaint = static_cast<InpainterBase*>(inpainter_);
doInpainting_ = dynamic_cast<NullInpainter*>(inpaint) == 0;
if (doInpainting_)
{
inpainter_->setMotionModel(motionEstimator_->motionModel());
inpainter_->setFrames(frames_);
inpainter_->setMotions(motions_);
inpainter_->setStabilizedFrames(stabilizedFrames_);
inpainter_->setStabilizationMotions(stabilizationMotions_);
}
DeblurerBase *deblurer = static_cast<DeblurerBase*>(deblurer_);
doDeblurring_ = dynamic_cast<NullDeblurer*>(deblurer) == 0;
if (doDeblurring_)
{
blurrinessRates_.resize(2*radius_ + 1);
float blurriness = calcBlurriness(firstFrame);
for (int i = -radius_; i <= 0; ++i)
at(i, blurrinessRates_) = blurriness;
deblurer_->setFrames(frames_);
deblurer_->setMotions(motions_);
deblurer_->setBlurrinessRates(blurrinessRates_);
}
log_->print("processing frames");
processingStartTime_ = clock();
}
void StabilizerBase::stabilizeFrame()
{
Mat stabilizationMotion = estimateStabilizationMotion();
Mat stabilizationMotion_;
if (doCorrectionForInclusion_)
stabilizationMotion = ensureInclusionConstraint(stabilizationMotion, frameSize_, trimRatio_);
stabilizationMotion_ = ensureInclusionConstraint(stabilizationMotion, frameSize_, trimRatio_);
else
stabilizationMotion_ = stabilizationMotion.clone();
at(curStabilizedPos_, stabilizationMotions_) = stabilizationMotion;
at(curStabilizedPos_, stabilizationMotions_) = stabilizationMotion_;
if (doDeblurring_)
{
@ -204,26 +179,15 @@ void StabilizerBase::stabilizeFrame()
preProcessedFrame_ = at(curStabilizedPos_, frames_);
// apply stabilization transformation
if (motionEstimator_->motionModel() != MM_HOMOGRAPHY)
warpAffine(
preProcessedFrame_, at(curStabilizedPos_, stabilizedFrames_),
stabilizationMotion(Rect(0,0,3,2)), frameSize_, INTER_LINEAR, borderMode_);
else
warpPerspective(
preProcessedFrame_, at(curStabilizedPos_, stabilizedFrames_),
stabilizationMotion, frameSize_, INTER_LINEAR, borderMode_);
warpAffine(
preProcessedFrame_, at(curStabilizedPos_, stabilizedFrames_),
stabilizationMotion_(Rect(0,0,3,2)), frameSize_, INTER_LINEAR, borderMode_);
if (doInpainting_)
{
if (motionEstimator_->motionModel() != MM_HOMOGRAPHY)
warpAffine(
frameMask_, at(curStabilizedPos_, stabilizedMasks_),
stabilizationMotion(Rect(0,0,3,2)), frameSize_, INTER_NEAREST);
else
warpPerspective(
frameMask_, at(curStabilizedPos_, stabilizedMasks_),
stabilizationMotion, frameSize_, INTER_NEAREST);
warpAffine(
frameMask_, at(curStabilizedPos_, stabilizedMasks_),
stabilizationMotion_(Rect(0,0,3,2)), frameSize_, INTER_NEAREST);
erode(at(curStabilizedPos_, stabilizedMasks_), at(curStabilizedPos_, stabilizedMasks_),
Mat());
@ -236,42 +200,34 @@ void StabilizerBase::stabilizeFrame()
}
Mat StabilizerBase::postProcessFrame(const Mat &frame)
{
// trim frame
int dx = static_cast<int>(floor(trimRatio_ * frame.cols));
int dy = static_cast<int>(floor(trimRatio_ * frame.rows));
return frame(Rect(dx, dy, frame.cols - 2*dx, frame.rows - 2*dy));
}
void StabilizerBase::logProcessingTime()
{
clock_t elapsedTime = clock() - processingStartTime_;
log_->print("\nprocessing time: %.3f sec\n", static_cast<double>(elapsedTime) / CLOCKS_PER_SEC);
}
OnePassStabilizer::OnePassStabilizer()
{
setMotionFilter(new GaussianMotionFilter());
reset();
resetImpl();
}
void OnePassStabilizer::reset()
void OnePassStabilizer::resetImpl()
{
StabilizerBase::reset();
curPos_ = -1;
curStabilizedPos_ = -1;
frames_.clear();
motions_.clear();
stabilizedFrames_.clear();
stabilizationMotions_.clear();
doDeblurring_ = false;
doInpainting_ = false;
}
void OnePassStabilizer::setUp(const Mat &firstFrame)
void OnePassStabilizer::setUp(Mat &firstFrame)
{
frameSize_ = firstFrame.size();
frameMask_.create(frameSize_, CV_8U);
frameMask_.setTo(255);
int cacheSize = 2*radius_ + 1;
frames_.resize(cacheSize);
stabilizedFrames_.resize(cacheSize);
stabilizedMasks_.resize(cacheSize);
@ -286,126 +242,79 @@ void OnePassStabilizer::setUp(const Mat &firstFrame)
at(0, frames_) = firstFrame;
StabilizerBase::setUp(firstFrame);
}
motionFilter_->setRadius(radius_);
motionFilter_->update();
Mat OnePassStabilizer::estimateMotion()
{
return motionEstimator_->estimate(at(curPos_ - 1, frames_), at(curPos_, frames_));
StabilizerBase::setUp(cacheSize, firstFrame);
}
Mat OnePassStabilizer::estimateStabilizationMotion()
void OnePassStabilizer::estimateMotion()
{
return motionFilter_->stabilize(curStabilizedPos_, motions_, make_pair(0, curPos_));
at(curPos_ - 1, motions_) = motionEstimator_->estimate(
at(curPos_ - 1, frames_), at(curPos_, frames_));
}
Mat OnePassStabilizer::postProcessFrame(const Mat &frame)
void OnePassStabilizer::stabilizeFrame()
{
return StabilizerBase::postProcessFrame(frame);
Mat stabilizationMotion = motionFilter_->stabilize(curStabilizedPos_, &motions_[0], (int)motions_.size());
StabilizerBase::stabilizeFrame(stabilizationMotion);
}
TwoPassStabilizer::TwoPassStabilizer()
{
setMotionStabilizer(new GaussianMotionFilter());
setWobbleSuppressor(new NullWobbleSuppressor());
setEstimateTrimRatio(false);
reset();
resetImpl();
}
void TwoPassStabilizer::reset()
Mat TwoPassStabilizer::nextFrame()
{
StabilizerBase::reset();
frameCount_ = 0;
isPrePassDone_ = false;
doWobbleSuppression_ = false;
motions2_.clear();
suppressedFrame_ = Mat();
runPrePassIfNecessary();
return StabilizerBase::nextStabilizedFrame();
}
Mat TwoPassStabilizer::nextFrame()
vector<Mat> TwoPassStabilizer::motions() const
{
runPrePassIfNecessary();
return StabilizerBase::nextStabilizedFrame();
if (frameCount_ == 0)
return vector<Mat>();
vector<Mat> res(frameCount_ - 1);
copy(motions_.begin(), motions_.begin() + frameCount_ - 1, res.begin());
return res;
}
#if SAVE_MOTIONS
static void saveMotions(
int frameCount, const vector<Mat> &motions, const vector<Mat> &stabilizationMotions)
void TwoPassStabilizer::resetImpl()
{
ofstream fm("log_motions.csv");
for (int i = 0; i < frameCount - 1; ++i)
{
Mat_<float> M = at(i, motions);
fm << M(0,0) << " " << M(0,1) << " " << M(0,2) << " "
<< M(1,0) << " " << M(1,1) << " " << M(1,2) << " "
<< M(2,0) << " " << M(2,1) << " " << M(2,2) << endl;
}
ofstream fo("log_orig.csv");
for (int i = 0; i < frameCount; ++i)
{
Mat_<float> M = getMotion(0, i, motions);
fo << M(0,0) << " " << M(0,1) << " " << M(0,2) << " "
<< M(1,0) << " " << M(1,1) << " " << M(1,2) << " "
<< M(2,0) << " " << M(2,1) << " " << M(2,2) << endl;
}
ofstream fs("log_stab.csv");
for (int i = 0; i < frameCount; ++i)
{
Mat_<float> M = stabilizationMotions[i] * getMotion(0, i, motions);
fs << M(0,0) << " " << M(0,1) << " " << M(0,2) << " "
<< M(1,0) << " " << M(1,1) << " " << M(1,2) << " "
<< M(2,0) << " " << M(2,1) << " " << M(2,2) << endl;
}
isPrePassDone_ = false;
frameCount_ = 0;
curPos_ = -1;
curStabilizedPos_ = -1;
frames_.clear();
motions_.clear();
stabilizedFrames_.clear();
stabilizationMotions_.clear();
doDeblurring_ = false;
doInpainting_ = false;
}
#endif
void TwoPassStabilizer::runPrePassIfNecessary()
{
if (!isPrePassDone_)
{
// check if we must do wobble suppression
WobbleSuppressorBase *wobble = static_cast<WobbleSuppressorBase*>(wobbleSuppressor_);
doWobbleSuppression_ = dynamic_cast<NullWobbleSuppressor*>(wobble) == 0;
// estimate motions
clock_t startTime = clock();
log_->print("first pass: estimating motions");
Mat prevFrame, frame;
bool ok = true, ok2 = true;
while (!(frame = frameSource_->nextFrame()).empty())
{
if (frameCount_ > 0)
{
motions_.push_back(motionEstimator_->estimate(prevFrame, frame, &ok));
if (doWobbleSuppression_)
{
Mat M = wobbleSuppressor_->motionEstimator()->estimate(prevFrame, frame, &ok2);
if (ok2)
motions2_.push_back(M);
else
motions2_.push_back(motions_.back());
}
if (ok)
{
if (ok2) log_->print(".");
else log_->print("?");
}
else log_->print("x");
}
motions_.push_back(motionEstimator_->estimate(prevFrame, frame));
else
{
frameSize_ = frame.size();
@ -415,30 +324,24 @@ void TwoPassStabilizer::runPrePassIfNecessary()
prevFrame = frame;
frameCount_++;
}
clock_t elapsedTime = clock() - startTime;
log_->print("\nmotion estimation time: %.3f sec\n",
static_cast<double>(elapsedTime) / CLOCKS_PER_SEC);
// add aux. motions
log_->print(".");
}
for (int i = 0; i < radius_; ++i)
motions_.push_back(Mat::eye(3, 3, CV_32F));
log_->print("\n");
// stabilize
startTime = clock();
IMotionStabilizer *_motionStabilizer = static_cast<IMotionStabilizer*>(motionStabilizer_);
MotionFilterBase *motionFilterBase = dynamic_cast<MotionFilterBase*>(_motionStabilizer);
if (motionFilterBase)
{
motionFilterBase->setRadius(radius_);
motionFilterBase->update();
}
stabilizationMotions_.resize(frameCount_);
motionStabilizer_->stabilize(
frameCount_, motions_, make_pair(0, frameCount_ - 1), &stabilizationMotions_[0]);
elapsedTime = clock() - startTime;
log_->print("motion stabilization time: %.3f sec\n",
static_cast<double>(elapsedTime) / CLOCKS_PER_SEC);
// estimate optimal trim ratio if necessary
motionStabilizer_->stabilize(&motions_[0], frameCount_, &stabilizationMotions_[0]);
if (mustEstTrimRatio_)
{
@ -451,19 +354,16 @@ void TwoPassStabilizer::runPrePassIfNecessary()
log_->print("estimated trim ratio: %f\n", static_cast<double>(trimRatio_));
}
#if SAVE_MOTIONS
saveMotions(frameCount_, motions_, stabilizationMotions_);
#endif
isPrePassDone_ = true;
frameSource_->reset();
}
}
void TwoPassStabilizer::setUp(const Mat &firstFrame)
void TwoPassStabilizer::setUp(Mat &firstFrame)
{
int cacheSize = 2*radius_ + 1;
frames_.resize(cacheSize);
stabilizedFrames_.resize(cacheSize);
stabilizedMasks_.resize(cacheSize);
@ -471,36 +371,13 @@ void TwoPassStabilizer::setUp(const Mat &firstFrame)
for (int i = -radius_; i <= 0; ++i)
at(i, frames_) = firstFrame;
WobbleSuppressorBase *wobble = static_cast<WobbleSuppressorBase*>(wobbleSuppressor_);
doWobbleSuppression_ = dynamic_cast<NullWobbleSuppressor*>(wobble) == 0;
if (doWobbleSuppression_)
{
wobbleSuppressor_->setFrameCount(frameCount_);
wobbleSuppressor_->setMotions(motions_);
wobbleSuppressor_->setMotions2(motions2_);
wobbleSuppressor_->setStabilizationMotions(stabilizationMotions_);
}
StabilizerBase::setUp(firstFrame);
}
Mat TwoPassStabilizer::estimateMotion()
{
return motions_[curPos_ - 1].clone();
}
Mat TwoPassStabilizer::estimateStabilizationMotion()
{
return stabilizationMotions_[curStabilizedPos_].clone();
StabilizerBase::setUp(cacheSize, firstFrame);
}
Mat TwoPassStabilizer::postProcessFrame(const Mat &frame)
void TwoPassStabilizer::stabilizeFrame()
{
wobbleSuppressor_->suppress(curStabilizedPos_, frame, suppressedFrame_);
return StabilizerBase::postProcessFrame(suppressedFrame_);
StabilizerBase::stabilizeFrame(stabilizationMotions_[curStabilizedPos_]);
}
} // namespace videostab

@ -1,156 +0,0 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2000-2008, Intel Corporation, all rights reserved.
// Copyright (C) 2009-2011, Willow Garage Inc., all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "precomp.hpp"
#include "opencv2/videostab/wobble_suppression.hpp"
#include "opencv2/videostab/ring_buffer.hpp"
using namespace std;
namespace cv
{
namespace videostab
{
WobbleSuppressorBase::WobbleSuppressorBase() : motions_(0), stabilizationMotions_(0)
{
setMotionEstimator(new KeypointBasedMotionEstimator(new MotionEstimatorRansacL2(MM_HOMOGRAPHY)));
}
void NullWobbleSuppressor::suppress(int /*idx*/, const Mat &frame, Mat &result)
{
result = frame;
}
void MoreAccurateMotionWobbleSuppressor::suppress(int idx, const Mat &frame, Mat &result)
{
CV_Assert(motions_ && stabilizationMotions_);
if (idx % period_ == 0)
{
result = frame;
return;
}
int k1 = idx / period_ * period_;
int k2 = std::min(k1 + period_, frameCount_ - 1);
Mat S1 = (*stabilizationMotions_)[idx];
Mat_<float> ML = S1 * getMotion(k1, idx, *motions2_) * getMotion(k1, idx, *motions_).inv() * S1.inv();
Mat_<float> MR = S1 * getMotion(idx, k2, *motions2_).inv() * getMotion(idx, k2, *motions_) * S1.inv();
mapx_.create(frame.size());
mapy_.create(frame.size());
float xl, yl, zl, wl;
float xr, yr, zr, wr;
for (int y = 0; y < frame.rows; ++y)
{
for (int x = 0; x < frame.cols; ++x)
{
xl = ML(0,0)*x + ML(0,1)*y + ML(0,2);
yl = ML(1,0)*x + ML(1,1)*y + ML(1,2);
zl = ML(2,0)*x + ML(2,1)*y + ML(2,2);
xl /= zl; yl /= zl;
wl = float(idx - k1);
xr = MR(0,0)*x + MR(0,1)*y + MR(0,2);
yr = MR(1,0)*x + MR(1,1)*y + MR(1,2);
zr = MR(2,0)*x + MR(2,1)*y + MR(2,2);
xr /= zr; yr /= zr;
wr = float(k2 - idx);
mapx_(y,x) = (wr * xl + wl * xr) / (wl + wr);
mapy_(y,x) = (wr * yl + wl * yr) / (wl + wr);
}
}
if (result.data == frame.data)
result = Mat(frame.size(), frame.type());
remap(frame, result, mapx_, mapy_, INTER_LINEAR, BORDER_REPLICATE);
}
#ifdef HAVE_OPENCV_GPU
void MoreAccurateMotionWobbleSuppressorGpu::suppress(int idx, const gpu::GpuMat &frame, gpu::GpuMat &result)
{
CV_Assert(motions_ && stabilizationMotions_);
if (idx % period_ == 0)
{
result = frame;
return;
}
int k1 = idx / period_ * period_;
int k2 = std::min(k1 + period_, frameCount_ - 1);
Mat S1 = (*stabilizationMotions_)[idx];
Mat ML = S1 * getMotion(k1, idx, *motions2_) * getMotion(k1, idx, *motions_).inv() * S1.inv();
Mat MR = S1 * getMotion(idx, k2, *motions2_).inv() * getMotion(idx, k2, *motions_) * S1.inv();
gpu::calcWobbleSuppressionMaps(k1, idx, k2, frame.size(), ML, MR, mapx_, mapy_);
if (result.data == frame.data)
result = gpu::GpuMat(frame.size(), frame.type());
gpu::remap(frame, result, mapx_, mapy_, INTER_LINEAR, BORDER_REPLICATE);
}
void MoreAccurateMotionWobbleSuppressorGpu::suppress(int idx, const Mat &frame, Mat &result)
{
frameDevice_.upload(frame);
suppress(idx, frameDevice_, resultDevice_);
resultDevice_.download(result);
}
#endif
} // namespace videostab
} // namespace cv

@ -8,18 +8,12 @@
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/videostab/videostab.hpp"
#include "opencv2/opencv_modules.hpp"
#define arg(name) cmd.get<string>(name)
#define argb(name) cmd.get<bool>(name)
#define argi(name) cmd.get<int>(name)
#define argf(name) cmd.get<float>(name)
#define argd(name) cmd.get<double>(name)
using namespace std;
using namespace cv;
using namespace cv::videostab;
Ptr<IFrameSource> stabilizedFrames;
string saveMotionsPath;
double outputFps;
@ -29,40 +23,96 @@ bool quietMode;
void run();
void saveMotionsIfNecessary();
void printHelp();
MotionModel motionModel(const string &str);
class GlobalMotionReader : public IGlobalMotionEstimator
{
public:
GlobalMotionReader(string path)
{
ifstream f(path.c_str());
if (!f.is_open())
throw runtime_error("can't open motions file: " + path);
int size; f >> size;
motions_.resize(size);
for (int i = 0; i < size; ++i)
{
Mat_<float> M(3, 3);
for (int l = 0; l < 3; ++l)
for (int s = 0; s < 3; ++s)
f >> M(l,s);
motions_[i] = M;
}
pos_ = 0;
}
virtual Mat estimate(const Mat &/*frame0*/, const Mat &/*frame1*/)
{
if (pos_ >= motions_.size())
{
stringstream text;
text << "can't load motion between frames " << pos_ << " and " << pos_+1;
throw runtime_error(text.str());
}
return motions_[pos_++];
}
private:
vector<Mat> motions_;
size_t pos_;
};
void run()
{
VideoWriter writer;
Mat stabilizedFrame;
int nframes = 0;
// for each stabilized frame
while (!(stabilizedFrame = stabilizedFrames->nextFrame()).empty())
{
nframes++;
// init writer (once) and save stabilized frame
if (!saveMotionsPath.empty())
saveMotionsIfNecessary();
if (!outputPath.empty())
{
if (!writer.isOpened())
writer.open(outputPath, CV_FOURCC('X','V','I','D'),
outputFps, stabilizedFrame.size());
writer.open(outputPath, CV_FOURCC('X','V','I','D'), outputFps, stabilizedFrame.size());
writer << stabilizedFrame;
}
// show stabilized frame
if (!quietMode)
{
imshow("stabilizedFrame", stabilizedFrame);
char key = static_cast<char>(waitKey(3));
if (key == 27) { cout << endl; break; }
if (key == 27)
break;
}
}
cout << "processed frames: " << nframes << endl
<< "finished\n";
cout << "\nfinished\n";
}
void saveMotionsIfNecessary()
{
static bool areMotionsSaved = false;
if (!areMotionsSaved)
{
IFrameSource *frameSource = static_cast<IFrameSource*>(stabilizedFrames);
TwoPassStabilizer *twoPassStabilizer = dynamic_cast<TwoPassStabilizer*>(frameSource);
if (twoPassStabilizer)
{
ofstream f(saveMotionsPath.c_str());
const vector<Mat> &motions = twoPassStabilizer->motions();
f << motions.size() << endl;
for (size_t i = 0; i < motions.size(); ++i)
{
Mat_<float> M = motions[i];
for (int l = 0, k = 0; l < 3; ++l)
for (int s = 0; s < 3; ++s, ++k)
f << M(l,s) << " ";
f << endl;
}
}
areMotionsSaved = true;
cout << "motions are saved";
}
}
@ -71,210 +121,57 @@ void printHelp()
cout << "OpenCV video stabilizer.\n"
"Usage: videostab <file_path> [arguments]\n\n"
"Arguments:\n"
" -m, --model=(transl|transl_and_scale|rigid|similarity|affine|homography)\n"
" -m, --model=(transl|transl_and_scale|linear_sim|affine)\n"
" Set motion model. The default is affine.\n"
" -lp, --lin-prog-motion-est=(yes|no)\n"
" Turn on/off LP based motion estimation. The default is no.\n"
" --subset=(<int_number>|auto)\n"
" Number of random samples per one motion hypothesis. The default is auto.\n"
" --thresh=(<float_number>|auto)\n"
" Maximum error to classify match as inlier. The default is auto.\n"
" --outlier-ratio=<float_number>\n"
" Motion estimation outlier ratio hypothesis. The default is 0.5.\n"
" Outliers ratio in motion estimation. The default is 0.5.\n"
" --min-inlier-ratio=<float_number>\n"
" Minimum inlier ratio to decide if estimated motion is OK. The default is 0.1.\n"
" --nkps=<int_number>\n"
" Number of keypoints to find in each frame. The default is 1000.\n"
" --local-outlier-rejection=(yes|no)\n"
" Perform local outlier rejection. The default is no.\n\n"
" -sm, --save-motions=(<file_path>|no)\n"
" Save estimated motions into file. The default is no.\n"
" -lm, --load-motions=(<file_path>|no)\n"
" Load motions from file. The default is no.\n\n"
" Minimum inlier ratio to decide if estimated motion is OK. The default is 0.1,\n"
" but you may want to increase it.\n\n"
" --save-motions=<file_path>\n"
" Save estimated motions into file.\n"
" --load-motions=<file_path>\n"
" Load motions from file.\n\n"
" -r, --radius=<int_number>\n"
" Set sliding window radius. The default is 15.\n"
" --stdev=(<float_number>|auto)\n"
" Set smoothing weights standard deviation. The default is auto\n"
" (i.e. sqrt(radius)).\n"
" -lps, --lin-prog-stab=(yes|no)\n"
" Turn on/off linear programming based stabilization method.\n"
" --lps-trim-ratio=(<float_number>|auto)\n"
" Trimming ratio used in linear programming based method.\n"
" --lps-w1=(<float_number>|1)\n"
" 1st derivative weight. The default is 1.\n"
" --lps-w2=(<float_number>|10)\n"
" 2nd derivative weight. The default is 10.\n"
" --lps-w3=(<float_number>|100)\n"
" 3rd derivative weight. The default is 100.\n"
" --lps-w4=(<float_number>|100)\n"
" Non-translation motion components weight. The default is 100.\n\n"
" Set smoothing radius. The default is 15.\n"
" --stdev=<float_number>\n"
" Set smoothing weights standard deviation. The default is sqrt(radius).\n\n"
" --deblur=(yes|no)\n"
" Do deblurring.\n"
" --deblur-sens=<float_number>\n"
" Set deblurring sensitivity (from 0 to +inf). The default is 0.1.\n\n"
" -t, --trim-ratio=<float_number>\n"
" Set trimming ratio (from 0 to 0.5). The default is 0.1.\n"
" -et, --est-trim=(yes|no)\n"
" Estimate trim ratio automatically. The default is yes.\n"
" -ic, --incl-constr=(yes|no)\n"
" Set trimming ratio (from 0 to 0.5). The default is 0.0.\n"
" --est-trim=(yes|no)\n"
" Estimate trim ratio automatically. The default is yes (that leads to two passes,\n"
" you can turn it off if you want to use one pass only).\n"
" --incl-constr=(yes|no)\n"
" Ensure the inclusion constraint is always satisfied. The default is no.\n\n"
" -bm, --border-mode=(replicate|reflect|const)\n"
" --border-mode=(replicate|reflect|const)\n"
" Set border extrapolation mode. The default is replicate.\n\n"
" --mosaic=(yes|no)\n"
" Do consistent mosaicing. The default is no.\n"
" --mosaic-stdev=<float_number>\n"
" Consistent mosaicing stdev threshold. The default is 10.0.\n\n"
" -mi, --motion-inpaint=(yes|no)\n"
" --motion-inpaint=(yes|no)\n"
" Do motion inpainting (requires GPU support). The default is no.\n"
" --mi-dist-thresh=<float_number>\n"
" --dist-thresh=<float_number>\n"
" Estimated flow distance threshold for motion inpainting. The default is 5.0.\n\n"
" -ci, --color-inpaint=(no|average|ns|telea)\n"
" --color-inpaint=(no|average|ns|telea)\n"
" Do color inpainting. The defailt is no.\n"
" --ci-radius=<float_number>\n"
" Set color inpainting radius (for ns and telea options only).\n"
" The default is 2.0\n\n"
" -ws, --wobble-suppress=(yes|no)\n"
" Perform wobble suppression. The default is no.\n"
" --ws-lp=(yes|no)\n"
" Turn on/off LP based motion estimation. The default is no.\n"
" --ws-period=<int_number>\n"
" Set wobble suppression period. The default is 30.\n"
" --ws-model=(transl|transl_and_scale|rigid|similarity|affine|homography)\n"
" Set wobble suppression motion model (must have more DOF than motion \n"
" estimation model). The default is homography.\n"
" --ws-subset=(<int_number>|auto)\n"
" Number of random samples per one motion hypothesis. The default is auto.\n"
" --ws-thresh=(<float_number>|auto)\n"
" Maximum error to classify match as inlier. The default is auto.\n"
" --ws-outlier-ratio=<float_number>\n"
" Motion estimation outlier ratio hypothesis. The default is 0.5.\n"
" --ws-min-inlier-ratio=<float_number>\n"
" Minimum inlier ratio to decide if estimated motion is OK. The default is 0.1.\n"
" --ws-nkps=<int_number>\n"
" Number of keypoints to find in each frame. The default is 1000.\n"
" --ws-local-outlier-rejection=(yes|no)\n"
" Perform local outlier rejection. The default is no.\n\n"
" -sm2, --save-motions2=(<file_path>|no)\n"
" Save motions estimated for wobble suppression. The default is no.\n"
" -lm2, --load-motions2=(<file_path>|no)\n"
" Load motions for wobble suppression from file. The default is no.\n\n"
" -gpu=(yes|no)\n"
" Use GPU optimization whenever possible. The default is no.\n\n"
" --color-inpaint-radius=<float_number>\n"
" Set color inpainting radius (for ns and telea options only).\n\n"
" -o, --output=(no|<file_path>)\n"
" Set output file path explicitely. The default is stabilized.avi.\n"
" --fps=(<float_number>|auto)\n"
" Set output video FPS explicitely. By default the source FPS is used (auto).\n"
" --fps=<int_number>\n"
" Set output video FPS explicitely. By default the source FPS is used.\n"
" -q, --quiet\n"
" Don't show output video frames.\n\n"
" -h, --help\n"
" Print help.\n\n"
"Note: some argument configurations lead to two passes, some to single pass.\n\n";
" Print help.\n"
"\n";
}
// motion estimator builders are for concise creation of motion estimators
class IMotionEstimatorBuilder
{
public:
virtual ~IMotionEstimatorBuilder() {}
virtual Ptr<ImageMotionEstimatorBase> build() = 0;
protected:
IMotionEstimatorBuilder(CommandLineParser &command) : cmd(command) {}
CommandLineParser cmd;
};
class MotionEstimatorRansacL2Builder : public IMotionEstimatorBuilder
{
public:
MotionEstimatorRansacL2Builder(CommandLineParser &command, bool use_gpu, const string &_prefix = "")
: IMotionEstimatorBuilder(command), gpu(use_gpu), prefix(_prefix) {}
virtual Ptr<ImageMotionEstimatorBase> build()
{
MotionEstimatorRansacL2 *est = new MotionEstimatorRansacL2(motionModel(arg(prefix + "model")));
RansacParams ransac = est->ransacParams();
if (arg(prefix + "subset") != "auto")
ransac.size = argi(prefix + "subset");
if (arg(prefix + "thresh") != "auto")
ransac.thresh = argf(prefix + "thresh");
ransac.eps = argf(prefix + "outlier-ratio");
est->setRansacParams(ransac);
est->setMinInlierRatio(argf(prefix + "min-inlier-ratio"));
Ptr<IOutlierRejector> outlierRejector = new NullOutlierRejector();
if (arg(prefix + "local-outlier-rejection") == "yes")
{
TranslationBasedLocalOutlierRejector *tblor = new TranslationBasedLocalOutlierRejector();
RansacParams ransacParams = tblor->ransacParams();
if (arg(prefix + "thresh") != "auto")
ransacParams.thresh = argf(prefix + "thresh");
tblor->setRansacParams(ransacParams);
outlierRejector = tblor;
}
#ifdef HAVE_OPENCV_GPU
if (gpu)
{
KeypointBasedMotionEstimatorGpu *kbest = new KeypointBasedMotionEstimatorGpu(est);
kbest->setOutlierRejector(outlierRejector);
return kbest;
}
#endif
KeypointBasedMotionEstimator *kbest = new KeypointBasedMotionEstimator(est);
kbest->setDetector(new GoodFeaturesToTrackDetector(argi(prefix + "nkps")));
kbest->setOutlierRejector(outlierRejector);
return kbest;
}
private:
bool gpu;
string prefix;
};
class MotionEstimatorL1Builder : public IMotionEstimatorBuilder
{
public:
MotionEstimatorL1Builder(CommandLineParser &command, bool use_gpu, const string &_prefix = "")
: IMotionEstimatorBuilder(command), gpu(use_gpu), prefix(_prefix) {}
virtual Ptr<ImageMotionEstimatorBase> build()
{
MotionEstimatorL1 *est = new MotionEstimatorL1(motionModel(arg(prefix + "model")));
Ptr<IOutlierRejector> outlierRejector = new NullOutlierRejector();
if (arg(prefix + "local-outlier-rejection") == "yes")
{
TranslationBasedLocalOutlierRejector *tblor = new TranslationBasedLocalOutlierRejector();
RansacParams ransacParams = tblor->ransacParams();
if (arg(prefix + "thresh") != "auto")
ransacParams.thresh = argf(prefix + "thresh");
tblor->setRansacParams(ransacParams);
outlierRejector = tblor;
}
#ifdef HAVE_OPENCV_GPU
if (gpu)
{
KeypointBasedMotionEstimatorGpu *kbest = new KeypointBasedMotionEstimatorGpu(est);
kbest->setOutlierRejector(outlierRejector);
return kbest;
}
#endif
KeypointBasedMotionEstimator *kbest = new KeypointBasedMotionEstimator(est);
kbest->setDetector(new GoodFeaturesToTrackDetector(argi(prefix + "nkps")));
kbest->setOutlierRejector(outlierRejector);
return kbest;
}
private:
bool gpu;
string prefix;
};
int main(int argc, const char **argv)
{
@ -282,254 +179,182 @@ int main(int argc, const char **argv)
{
const char *keys =
"{ 1 | | | | }"
"{ m | model | affine | }"
"{ lp | lin-prog-motion-est | no | }"
"{ | subset | auto | }"
"{ | thresh | auto | }"
"{ | outlier-ratio | 0.5 | }"
"{ | min-inlier-ratio | 0.1 | }"
"{ | nkps | 1000 | }"
"{ | extra-kps | 0 | }"
"{ | local-outlier-rejection | no | }"
"{ sm | save-motions | no | }"
"{ lm | load-motions | no | }"
"{ r | radius | 15 | }"
"{ | stdev | auto | }"
"{ lps | lin-prog-stab | no | }"
"{ | lps-trim-ratio | auto | }"
"{ | lps-w1 | 1 | }"
"{ | lps-w2 | 10 | }"
"{ | lps-w3 | 100 | }"
"{ | lps-w4 | 100 | }"
"{ | deblur | no | }"
"{ | deblur-sens | 0.1 | }"
"{ et | est-trim | yes | }"
"{ t | trim-ratio | 0.1 | }"
"{ ic | incl-constr | no | }"
"{ bm | border-mode | replicate | }"
"{ | mosaic | no | }"
"{ ms | mosaic-stdev | 10.0 | }"
"{ mi | motion-inpaint | no | }"
"{ | mi-dist-thresh | 5.0 | }"
"{ ci | color-inpaint | no | }"
"{ | ci-radius | 2 | }"
"{ ws | wobble-suppress | no | }"
"{ | ws-period | 30 | }"
"{ | ws-model | homography | }"
"{ | ws-subset | auto | }"
"{ | ws-thresh | auto | }"
"{ | ws-outlier-ratio | 0.5 | }"
"{ | ws-min-inlier-ratio | 0.1 | }"
"{ | ws-nkps | 1000 | }"
"{ | ws-extra-kps | 0 | }"
"{ | ws-local-outlier-rejection | no | }"
"{ | ws-lp | no | }"
"{ sm2 | save-motions2 | no | }"
"{ lm2 | load-motions2 | no | }"
"{ gpu | | no }"
"{ m | model | | }"
"{ | min-inlier-ratio | | }"
"{ | outlier-ratio | | }"
"{ | save-motions | | }"
"{ | load-motions | | }"
"{ r | radius | | }"
"{ | stdev | | }"
"{ | deblur | | }"
"{ | deblur-sens | | }"
"{ | est-trim | yes | }"
"{ t | trim-ratio | | }"
"{ | incl-constr | | }"
"{ | border-mode | | }"
"{ | mosaic | | }"
"{ | mosaic-stdev | | }"
"{ | motion-inpaint | | }"
"{ | dist-thresh | | }"
"{ | color-inpaint | no | }"
"{ | color-inpaint-radius | | }"
"{ o | output | stabilized.avi | }"
"{ | fps | auto | }"
"{ | fps | | }"
"{ q | quiet | false | }"
"{ h | help | false | }";
CommandLineParser cmd(argc, argv, keys);
// parse command arguments
if (argb("help"))
if (cmd.get<bool>("help"))
{
printHelp();
return 0;
}
}
StabilizerBase *stabilizer;
GaussianMotionFilter *motionFilter = 0;
#ifdef HAVE_OPENCV_GPU
if (arg("gpu") == "yes")
if (!cmd.get<string>("stdev").empty())
{
cout << "initializing GPU..."; cout.flush();
Mat hostTmp = Mat::zeros(1, 1, CV_32F);
gpu::GpuMat deviceTmp;
deviceTmp.upload(hostTmp);
cout << endl;
motionFilter = new GaussianMotionFilter();
motionFilter->setStdev(cmd.get<float>("stdev"));
}
#endif
StabilizerBase *stabilizer = 0;
// check if source video is specified
string inputPath = arg("1");
if (inputPath.empty())
throw runtime_error("specify video file path");
// get source video parameters
if (!cmd.get<string>("save-motions").empty())
saveMotionsPath = cmd.get<string>("save-motions");
VideoFileSource *source = new VideoFileSource(inputPath);
cout << "frame count (rough): " << source->count() << endl;
if (arg("fps") == "auto")
outputFps = source->fps();
else
outputFps = argd("fps");
// prepare motion estimation builders
Ptr<IMotionEstimatorBuilder> motionEstBuilder;
if (arg("lin-prog-motion-est") == "yes")
motionEstBuilder = new MotionEstimatorL1Builder(cmd, arg("gpu") == "yes");
else
motionEstBuilder = new MotionEstimatorRansacL2Builder(cmd, arg("gpu") == "yes");
Ptr<IMotionEstimatorBuilder> wsMotionEstBuilder;
if (arg("ws-lp") == "yes")
wsMotionEstBuilder = new MotionEstimatorL1Builder(cmd, arg("gpu") == "yes", "ws-");
else
wsMotionEstBuilder = new MotionEstimatorRansacL2Builder(cmd, arg("gpu") == "yes", "ws-");
// determine whether we must use one pass or two pass stabilizer
bool isTwoPass =
arg("est-trim") == "yes" || arg("wobble-suppress") == "yes" || arg("lin-prog-stab") == "yes";
cmd.get<string>("est-trim") == "yes" ||
!cmd.get<string>("save-motions").empty();
if (isTwoPass)
{
// we must use two pass stabilizer
TwoPassStabilizer *twoPassStabilizer = new TwoPassStabilizer();
if (!cmd.get<string>("est-trim").empty())
twoPassStabilizer->setEstimateTrimRatio(cmd.get<string>("est-trim") == "yes");
if (motionFilter)
twoPassStabilizer->setMotionStabilizer(motionFilter);
stabilizer = twoPassStabilizer;
twoPassStabilizer->setEstimateTrimRatio(arg("est-trim") == "yes");
// determine stabilization technique
if (arg("lin-prog-stab") == "yes")
{
LpMotionStabilizer *stab = new LpMotionStabilizer();
stab->setFrameSize(Size(source->width(), source->height()));
stab->setTrimRatio(arg("lps-trim-ratio") == "auto" ? argf("trim-ratio") : argf("lps-trim-ratio"));
stab->setWeight1(argf("lps-w1"));
stab->setWeight2(argf("lps-w2"));
stab->setWeight3(argf("lps-w3"));
stab->setWeight4(argf("lps-w4"));
twoPassStabilizer->setMotionStabilizer(stab);
}
else if (arg("stdev") == "auto")
twoPassStabilizer->setMotionStabilizer(new GaussianMotionFilter(argi("radius")));
else
twoPassStabilizer->setMotionStabilizer(new GaussianMotionFilter(argi("radius"), argf("stdev")));
// init wobble suppressor if necessary
if (arg("wobble-suppress") == "yes")
{
MoreAccurateMotionWobbleSuppressorBase *ws = new MoreAccurateMotionWobbleSuppressor();
if (arg("gpu") == "yes")
#ifdef HAVE_OPENCV_GPU
ws = new MoreAccurateMotionWobbleSuppressorGpu();
#else
throw runtime_error("OpenCV is built without GPU support");
#endif
ws->setMotionEstimator(wsMotionEstBuilder->build());
ws->setPeriod(argi("ws-period"));
twoPassStabilizer->setWobbleSuppressor(ws);
MotionModel model = ws->motionEstimator()->motionModel();
if (arg("load-motions2") != "no")
{
ws->setMotionEstimator(new FromFileMotionReader(arg("load-motions2")));
ws->motionEstimator()->setMotionModel(model);
}
if (arg("save-motions2") != "no")
{
ws->setMotionEstimator(new ToFileMotionWriter(arg("save-motions2"), ws->motionEstimator()));
ws->motionEstimator()->setMotionModel(model);
}
}
}
else
{
// we must use one pass stabilizer
OnePassStabilizer *onePassStabilizer = new OnePassStabilizer();
OnePassStabilizer *onePassStabilizer= new OnePassStabilizer();
if (motionFilter)
onePassStabilizer->setMotionFilter(motionFilter);
stabilizer = onePassStabilizer;
if (arg("stdev") == "auto")
onePassStabilizer->setMotionFilter(new GaussianMotionFilter(argi("radius")));
else
onePassStabilizer->setMotionFilter(new GaussianMotionFilter(argi("radius"), argf("stdev")));
}
stabilizer->setFrameSource(source);
stabilizer->setMotionEstimator(motionEstBuilder->build());
// cast stabilizer to simple frame source interface to read stabilized frames
stabilizedFrames = dynamic_cast<IFrameSource*>(stabilizer);
string inputPath = cmd.get<string>("1");
if (inputPath.empty())
throw runtime_error("specify video file path");
MotionModel model = stabilizer->motionEstimator()->motionModel();
if (arg("load-motions") != "no")
{
stabilizer->setMotionEstimator(new FromFileMotionReader(arg("load-motions")));
stabilizer->motionEstimator()->setMotionModel(model);
}
if (arg("save-motions") != "no")
VideoFileSource *frameSource = new VideoFileSource(inputPath);
outputFps = frameSource->fps();
stabilizer->setFrameSource(frameSource);
cout << "frame count: " << frameSource->frameCount() << endl;
PyrLkRobustMotionEstimator *motionEstimator = new PyrLkRobustMotionEstimator();
if (cmd.get<string>("model") == "transl")
motionEstimator->setMotionModel(TRANSLATION);
else if (cmd.get<string>("model") == "transl_and_scale")
motionEstimator->setMotionModel(TRANSLATION_AND_SCALE);
else if (cmd.get<string>("model") == "linear_sim")
motionEstimator->setMotionModel(LINEAR_SIMILARITY);
else if (cmd.get<string>("model") == "affine")
motionEstimator->setMotionModel(AFFINE);
else if (!cmd.get<string>("model").empty())
throw runtime_error("unknow motion mode: " + cmd.get<string>("model"));
if (!cmd.get<string>("outlier-ratio").empty())
{
stabilizer->setMotionEstimator(new ToFileMotionWriter(arg("save-motions"), stabilizer->motionEstimator()));
stabilizer->motionEstimator()->setMotionModel(model);
RansacParams ransacParams = motionEstimator->ransacParams();
ransacParams.eps = cmd.get<float>("outlier-ratio");
motionEstimator->setRansacParams(ransacParams);
}
if (!cmd.get<string>("min-inlier-ratio").empty())
motionEstimator->setMinInlierRatio(cmd.get<float>("min-inlier-ratio"));
stabilizer->setMotionEstimator(motionEstimator);
if (!cmd.get<string>("load-motions").empty())
stabilizer->setMotionEstimator(new GlobalMotionReader(cmd.get<string>("load-motions")));
stabilizer->setRadius(argi("radius"));
if (!cmd.get<string>("radius").empty())
stabilizer->setRadius(cmd.get<int>("radius"));
// init deblurer
if (arg("deblur") == "yes")
if (cmd.get<string>("deblur") == "yes")
{
WeightingDeblurer *deblurer = new WeightingDeblurer();
deblurer->setRadius(argi("radius"));
deblurer->setSensitivity(argf("deblur-sens"));
if (!cmd.get<string>("deblur-sens").empty())
deblurer->setSensitivity(cmd.get<float>("deblur-sens"));
stabilizer->setDeblurer(deblurer);
}
// set up trimming paramters
stabilizer->setTrimRatio(argf("trim-ratio"));
stabilizer->setCorrectionForInclusion(arg("incl-constr") == "yes");
if (!cmd.get<string>("trim-ratio").empty())
stabilizer->setTrimRatio(cmd.get<float>("trim-ratio"));
if (!cmd.get<string>("incl-constr").empty())
stabilizer->setCorrectionForInclusion(cmd.get<string>("incl-constr") == "yes");
if (arg("border-mode") == "reflect")
if (cmd.get<string>("border-mode") == "reflect")
stabilizer->setBorderMode(BORDER_REFLECT);
else if (arg("border-mode") == "replicate")
else if (cmd.get<string>("border-mode") == "replicate")
stabilizer->setBorderMode(BORDER_REPLICATE);
else if (arg("border-mode") == "const")
else if (cmd.get<string>("border-mode") == "const")
stabilizer->setBorderMode(BORDER_CONSTANT);
else
throw runtime_error("unknown border extrapolation mode: "
+ cmd.get<string>("border-mode"));
else if (!cmd.get<string>("border-mode").empty())
throw runtime_error("unknown border extrapolation mode: " + cmd.get<string>("border-mode"));
// init inpainter
InpaintingPipeline *inpainters = new InpaintingPipeline();
Ptr<InpainterBase> inpainters_(inpainters);
if (arg("mosaic") == "yes")
if (cmd.get<string>("mosaic") == "yes")
{
ConsistentMosaicInpainter *inp = new ConsistentMosaicInpainter();
inp->setStdevThresh(argf("mosaic-stdev"));
inpainters->pushBack(inp);
ConsistentMosaicInpainter *inpainter = new ConsistentMosaicInpainter();
if (!cmd.get<string>("mosaic-stdev").empty())
inpainter->setStdevThresh(cmd.get<float>("mosaic-stdev"));
inpainters->pushBack(inpainter);
}
if (arg("motion-inpaint") == "yes")
if (cmd.get<string>("motion-inpaint") == "yes")
{
MotionInpainter *inp = new MotionInpainter();
inp->setDistThreshold(argf("mi-dist-thresh"));
inpainters->pushBack(inp);
MotionInpainter *inpainter = new MotionInpainter();
if (!cmd.get<string>("dist-thresh").empty())
inpainter->setDistThreshold(cmd.get<float>("dist-thresh"));
inpainters->pushBack(inpainter);
}
if (arg("color-inpaint") == "average")
inpainters->pushBack(new ColorAverageInpainter());
else if (arg("color-inpaint") == "ns")
inpainters->pushBack(new ColorInpainter(INPAINT_NS, argd("ci-radius")));
else if (arg("color-inpaint") == "telea")
inpainters->pushBack(new ColorInpainter(INPAINT_TELEA, argd("ci-radius")));
else if (arg("color-inpaint") != "no")
throw runtime_error("unknown color inpainting method: " + arg("color-inpaint"));
if (!inpainters->empty())
if (!cmd.get<string>("color-inpaint").empty())
{
inpainters->setRadius(argi("radius"));
stabilizer->setInpainter(inpainters_);
}
if (cmd.get<string>("color-inpaint") == "average")
inpainters->pushBack(new ColorAverageInpainter());
else if (!cmd.get<string>("color-inpaint-radius").empty())
{
float radius = cmd.get<float>("color-inpaint-radius");
if (cmd.get<string>("color-inpaint") == "ns")
inpainters->pushBack(new ColorInpainter(INPAINT_NS, radius));
else if (cmd.get<string>("color-inpaint") == "telea")
inpainters->pushBack(new ColorInpainter(INPAINT_TELEA, radius));
else if (cmd.get<string>("color-inpaint") != "no")
throw runtime_error("unknown color inpainting method: " + cmd.get<string>("color-inpaint"));
}
else
{
if (cmd.get<string>("color-inpaint") == "ns")
inpainters->pushBack(new ColorInpainter(INPAINT_NS));
else if (cmd.get<string>("color-inpaint") == "telea")
inpainters->pushBack(new ColorInpainter(INPAINT_TELEA));
else if (cmd.get<string>("color-inpaint") != "no")
throw runtime_error("unknown color inpainting method: " + cmd.get<string>("color-inpaint"));
}
}
if (!inpainters->empty())
stabilizer->setInpainter(inpainters);
stabilizer->setLog(new LogToStdout());
if (arg("output") != "no")
outputPath = arg("output");
outputPath = cmd.get<string>("output") != "no" ? cmd.get<string>("output") : "";
quietMode = argb("quiet");
if (!cmd.get<string>("fps").empty())
outputFps = cmd.get<double>("fps");
quietMode = cmd.get<bool>("quiet");
stabilizedFrames = dynamic_cast<IFrameSource*>(stabilizer);
run();
}
@ -542,21 +367,3 @@ int main(int argc, const char **argv)
stabilizedFrames.release();
return 0;
}
MotionModel motionModel(const string &str)
{
if (str == "transl")
return MM_TRANSLATION;
if (str == "transl_and_scale")
return MM_TRANSLATION_AND_SCALE;
if (str == "rigid")
return MM_RIGID;
if (str == "similarity")
return MM_SIMILARITY;
if (str == "affine")
return MM_AFFINE;
if (str == "homography")
return MM_HOMOGRAPHY;
throw runtime_error("unknown motion model: " + str);
}

Loading…
Cancel
Save