From baf372ad3df3ac3d2b29d71dccdb1ae8a7c94b02 Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Mon, 17 Jun 2019 20:47:41 +0000 Subject: [PATCH 1/4] videoio(test): add extra videoio tests --- modules/videoio/test/test_gstreamer.cpp | 18 +++ modules/videoio/test/test_video_io.cpp | 181 ++++++++++++++++-------- 2 files changed, 143 insertions(+), 56 deletions(-) diff --git a/modules/videoio/test/test_gstreamer.cpp b/modules/videoio/test/test_gstreamer.cpp index 55accb52c0..497da13984 100644 --- a/modules/videoio/test/test_gstreamer.cpp +++ b/modules/videoio/test/test_gstreamer.cpp @@ -71,6 +71,24 @@ Param test_data[] = { INSTANTIATE_TEST_CASE_P(videoio, Videoio_Gstreamer_Test, testing::ValuesIn(test_data)); +TEST(Videoio_GStreamer, unsupported_pipeline) +{ + VideoCaptureAPIs apiPref = CAP_GSTREAMER; + if (!isBackendAvailable(apiPref, cv::videoio_registry::getStreamBackends())) + throw SkipTestException(cv::String("Backend is not available/disabled: ") + cv::videoio_registry::getBackendName(apiPref)); + + // could not link videoconvert0 to matroskamux0, matroskamux0 can't handle caps video/x-raw, format=(string)RGBA + std::string pipeline = "appsrc ! videoconvert ! video/x-raw, format=(string)RGBA ! matroskamux ! filesink location=test.mkv"; + Size frame_size(640, 480); + + VideoWriter writer; + EXPECT_NO_THROW(writer.open(pipeline, apiPref, 0/*fourcc*/, 30/*fps*/, frame_size, true)); + EXPECT_FALSE(writer.isOpened()); + // no frames + EXPECT_NO_THROW(writer.release()); + +} + } // namespace #endif diff --git a/modules/videoio/test/test_video_io.cpp b/modules/videoio/test/test_video_io.cpp index 98b293e09a..913418db64 100644 --- a/modules/videoio/test/test_video_io.cpp +++ b/modules/videoio/test/test_video_io.cpp @@ -225,8 +225,8 @@ public: struct Ext_Fourcc_PSNR { - string ext; - string fourcc; + const char* ext; + const char* fourcc; float PSNR; VideoCaptureAPIs api; }; @@ -359,18 +359,6 @@ INSTANTIATE_TEST_CASE_P(videoio, Videoio_Bunny, testing::ValuesIn(backend_params))); -//================================================================================================== - -inline Ext_Fourcc_PSNR makeParam(const char * ext, const char * fourcc, float psnr, VideoCaptureAPIs apipref) -{ - Ext_Fourcc_PSNR res; - res.ext = ext; - res.fourcc = fourcc; - res.PSNR = psnr; - res.api = apipref; - return res; -} - inline static std::ostream &operator<<(std::ostream &out, const Ext_Fourcc_PSNR &p) { out << "FOURCC(" << p.fourcc << "), ." << p.ext << ", " << p.api << ", " << p.PSNR << "dB"; return out; @@ -380,69 +368,68 @@ static Ext_Fourcc_PSNR synthetic_params[] = { #ifdef HAVE_MSMF #if !defined(_M_ARM) - makeParam("wmv", "WMV1", 30.f, CAP_MSMF), - makeParam("wmv", "WMV2", 30.f, CAP_MSMF), + {"wmv", "WMV1", 30.f, CAP_MSMF}, + {"wmv", "WMV2", 30.f, CAP_MSMF}, #endif - makeParam("wmv", "WMV3", 30.f, CAP_MSMF), - makeParam("wmv", "WVC1", 30.f, CAP_MSMF), - makeParam("mov", "H264", 30.f, CAP_MSMF), + {"wmv", "WMV3", 30.f, CAP_MSMF}, + {"wmv", "WVC1", 30.f, CAP_MSMF}, + {"mov", "H264", 30.f, CAP_MSMF}, #endif // TODO: Broken? //#ifdef HAVE_VFW //#if !defined(_M_ARM) -// makeParam("wmv", "WMV1", 30.f, CAP_VFW), -// makeParam("wmv", "WMV2", 30.f, CAP_VFW), +// {"wmv", "WMV1", 30.f, CAP_VFW}, +// {"wmv", "WMV2", 30.f, CAP_VFW}, //#endif -// makeParam("wmv", "WMV3", 30.f, CAP_VFW), -// makeParam("wmv", "WVC1", 30.f, CAP_VFW), -// makeParam("avi", "H264", 30.f, CAP_VFW), -// makeParam("avi", "MJPG", 30.f, CAP_VFW), +// {"wmv", "WMV3", 30.f, CAP_VFW}, +// {"wmv", "WVC1", 30.f, CAP_VFW}, +// {"avi", "H264", 30.f, CAP_VFW}, +// {"avi", "MJPG", 30.f, CAP_VFW}, //#endif #ifdef HAVE_QUICKTIME - makeParam("mov", "mp4v", 30.f, CAP_QT), - makeParam("avi", "XVID", 30.f, CAP_QT), - makeParam("avi", "MPEG", 30.f, CAP_QT), - makeParam("avi", "IYUV", 30.f, CAP_QT), - makeParam("avi", "MJPG", 30.f, CAP_QT), - - makeParam("mkv", "XVID", 30.f, CAP_QT), - makeParam("mkv", "MPEG", 30.f, CAP_QT), - makeParam("mkv", "MJPG", 30.f, CAP_QT), + {"mov", "mp4v", 30.f, CAP_QT}, + {"avi", "XVID", 30.f, CAP_QT}, + {"avi", "MPEG", 30.f, CAP_QT}, + {"avi", "IYUV", 30.f, CAP_QT}, + {"avi", "MJPG", 30.f, CAP_QT}, + + {"mkv", "XVID", 30.f, CAP_QT}, + {"mkv", "MPEG", 30.f, CAP_QT}, + {"mkv", "MJPG", 30.f, CAP_QT}, #endif #ifdef HAVE_AVFOUNDATION - makeParam("mov", "H264", 30.f, CAP_AVFOUNDATION), - makeParam("mov", "MJPG", 30.f, CAP_AVFOUNDATION), - makeParam("mp4", "H264", 30.f, CAP_AVFOUNDATION), - makeParam("mp4", "MJPG", 30.f, CAP_AVFOUNDATION), - makeParam("m4v", "H264", 30.f, CAP_AVFOUNDATION), - makeParam("m4v", "MJPG", 30.f, CAP_AVFOUNDATION), + {"mov", "H264", 30.f, CAP_AVFOUNDATION}, + {"mov", "MJPG", 30.f, CAP_AVFOUNDATION}, + {"mp4", "H264", 30.f, CAP_AVFOUNDATION}, + {"mp4", "MJPG", 30.f, CAP_AVFOUNDATION}, + {"m4v", "H264", 30.f, CAP_AVFOUNDATION}, + {"m4v", "MJPG", 30.f, CAP_AVFOUNDATION}, #endif #ifdef HAVE_FFMPEG - makeParam("avi", "XVID", 30.f, CAP_FFMPEG), - makeParam("avi", "MPEG", 30.f, CAP_FFMPEG), - makeParam("avi", "IYUV", 30.f, CAP_FFMPEG), - makeParam("avi", "MJPG", 30.f, CAP_FFMPEG), - - makeParam("mkv", "XVID", 30.f, CAP_FFMPEG), - makeParam("mkv", "MPEG", 30.f, CAP_FFMPEG), - makeParam("mkv", "MJPG", 30.f, CAP_FFMPEG), + {"avi", "XVID", 30.f, CAP_FFMPEG}, + {"avi", "MPEG", 30.f, CAP_FFMPEG}, + {"avi", "IYUV", 30.f, CAP_FFMPEG}, + {"avi", "MJPG", 30.f, CAP_FFMPEG}, + + {"mkv", "XVID", 30.f, CAP_FFMPEG}, + {"mkv", "MPEG", 30.f, CAP_FFMPEG}, + {"mkv", "MJPG", 30.f, CAP_FFMPEG}, #endif #ifdef HAVE_GSTREAMER - makeParam("avi", "MPEG", 30.f, CAP_GSTREAMER), - makeParam("avi", "MJPG", 30.f, CAP_GSTREAMER), - makeParam("avi", "H264", 30.f, CAP_GSTREAMER), - - makeParam("mkv", "MPEG", 30.f, CAP_GSTREAMER), - makeParam("mkv", "MJPG", 30.f, CAP_GSTREAMER), - makeParam("mkv", "H264", 30.f, CAP_GSTREAMER), + {"avi", "MPEG", 30.f, CAP_GSTREAMER}, + {"avi", "MJPG", 30.f, CAP_GSTREAMER}, + {"avi", "H264", 30.f, CAP_GSTREAMER}, + {"mkv", "MPEG", 30.f, CAP_GSTREAMER}, + {"mkv", "MJPG", 30.f, CAP_GSTREAMER}, + {"mkv", "H264", 30.f, CAP_GSTREAMER}, #endif - makeParam("avi", "MJPG", 30.f, CAP_OPENCV_MJPEG), + {"avi", "MJPG", 30.f, CAP_OPENCV_MJPEG}, }; @@ -458,4 +445,86 @@ INSTANTIATE_TEST_CASE_P(videoio, Videoio_Synthetic, testing::ValuesIn(all_sizes), testing::ValuesIn(synthetic_params))); +struct Ext_Fourcc_API +{ + const char* ext; + const char* fourcc; + VideoCaptureAPIs api; +}; + +inline static std::ostream &operator<<(std::ostream &out, const Ext_Fourcc_API &p) +{ + out << "(FOURCC(" << p.fourcc << "), \"" << p.ext << "\", " << p.api << ")"; return out; +} + + +class Videoio_Writer : public Videoio_Test_Base, public testing::TestWithParam +{ +protected: + Size frame_size; + int fourcc; + double fps; +public: + Videoio_Writer() + { + frame_size = Size(640, 480); + const Ext_Fourcc_API p = GetParam(); + ext = p.ext; + fourcc = fourccFromString(p.fourcc); + if (ext.size() == 3) + video_file = cv::tempfile((fourccToString(fourcc) + "." + ext).c_str()); + else + video_file = ext; + fps = 25.; + apiPref = p.api; + } + void SetUp() + { + } + void TearDown() + { + if (ext.size() == 3) + (void)remove(video_file.c_str()); + } +}; + +TEST_P(Videoio_Writer, write_nothing) +{ + if (!isBackendAvailable(apiPref, cv::videoio_registry::getStreamBackends())) + throw SkipTestException(cv::String("Backend is not available/disabled: ") + cv::videoio_registry::getBackendName(apiPref)); + + VideoWriter writer; + EXPECT_NO_THROW(writer.open(video_file, apiPref, fourcc, fps, frame_size, true)); + ASSERT_TRUE(writer.isOpened()); +#if 0 // no frames + cv::Mat m(frame_size, CV_8UC3, Scalar::all(127)); + writer << m; +#endif + EXPECT_NO_THROW(writer.release()); +} + +static vector generate_Ext_Fourcc_API() +{ + const size_t N = sizeof(synthetic_params)/sizeof(synthetic_params[0]); + vector result; result.reserve(N); + for (size_t i = 0; i < N; i++) + { + const Ext_Fourcc_PSNR& src = synthetic_params[i]; + Ext_Fourcc_API e = { src.ext, src.fourcc, src.api }; + result.push_back(e); + } + + { + Ext_Fourcc_API e = { "appsrc ! videoconvert ! video/x-raw, format=(string)NV12 ! filesink location=test.nv12", "\0\0\0\0", CAP_GSTREAMER }; + result.push_back(e); + } + { + Ext_Fourcc_API e = { "appsrc ! videoconvert ! video/x-raw, format=(string)I420 ! matroskamux ! filesink location=test.mkv", "\0\0\0\0", CAP_GSTREAMER }; + result.push_back(e); + } + return result; +} + +INSTANTIATE_TEST_CASE_P(videoio, Videoio_Writer, testing::ValuesIn(generate_Ext_Fourcc_API())); + } // namespace From 681e0323f286813b11eafe336f1eb0485dd8d9a0 Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Tue, 18 Jun 2019 15:35:16 +0300 Subject: [PATCH 2/4] core: backport toLowerCase()/toUpperCase() --- modules/core/include/opencv2/core/cvstd.hpp | 34 +++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/modules/core/include/opencv2/core/cvstd.hpp b/modules/core/include/opencv2/core/cvstd.hpp index 8af1162bfa..fbf6d317e1 100644 --- a/modules/core/include/opencv2/core/cvstd.hpp +++ b/modules/core/include/opencv2/core/cvstd.hpp @@ -1026,6 +1026,40 @@ static inline bool operator>= (const String& lhs, const String& rhs) { return lh static inline bool operator>= (const char* lhs, const String& rhs) { return rhs.compare(lhs) <= 0; } static inline bool operator>= (const String& lhs, const char* rhs) { return lhs.compare(rhs) >= 0; } + +#ifndef OPENCV_DISABLE_STRING_LOWER_UPPER_CONVERSIONS + +//! @cond IGNORED +namespace details { +// std::tolower is int->int +static inline char char_tolower(char ch) +{ + return (char)std::tolower((int)ch); +} +// std::toupper is int->int +static inline char char_toupper(char ch) +{ + return (char)std::toupper((int)ch); +} +} // namespace details +//! @endcond + +static inline std::string toLowerCase(const std::string& str) +{ + std::string result(str); + std::transform(result.begin(), result.end(), result.begin(), details::char_tolower); + return result; +} + +static inline std::string toUpperCase(const std::string& str) +{ + std::string result(str); + std::transform(result.begin(), result.end(), result.begin(), details::char_toupper); + return result; +} + +#endif // OPENCV_DISABLE_STRING_LOWER_UPPER_CONVERSIONS + //! @} relates cv::String } // cv From ab24325df3e87b3f3838756b305113a3d3a9a2e5 Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Tue, 18 Jun 2019 17:25:43 +0300 Subject: [PATCH 3/4] videoio(test): avoid raw ptr in ffmpeg tests causes leaks if FFmpeg is disabled through OPENCV_VIDEOIO_PRIORITY_FFMPEG=0 --- modules/videoio/test/test_ffmpeg.cpp | 31 +++++++++++++--------------- 1 file changed, 14 insertions(+), 17 deletions(-) diff --git a/modules/videoio/test/test_ffmpeg.cpp b/modules/videoio/test/test_ffmpeg.cpp index e9e5e9aca1..7e508e7782 100644 --- a/modules/videoio/test/test_ffmpeg.cpp +++ b/modules/videoio/test/test_ffmpeg.cpp @@ -227,7 +227,7 @@ public: const static Size FrameSize; static std::string TmpDirectory; - CreateVideoWriterInvoker(std::vector& _writers, std::vector& _files) : + CreateVideoWriterInvoker(std::vector< cv::Ptr >& _writers, std::vector& _files) : writers(_writers), files(_files) { } @@ -241,14 +241,14 @@ public: std::string fileName = tempfile(stream.str().c_str()); files[i] = fileName; - writers[i] = new VideoWriter(fileName, CAP_FFMPEG, VideoWriter::fourcc('X','V','I','D'), 25.0f, FrameSize); + writers[i] = makePtr(fileName, CAP_FFMPEG, VideoWriter::fourcc('X','V','I','D'), 25.0f, FrameSize); CV_Assert(writers[i]->isOpened()); } } private: - std::vector& writers; + std::vector< cv::Ptr >& writers; std::vector& files; }; @@ -264,7 +264,7 @@ public: static const Scalar ObjectColor; static const Point Center; - WriteVideo_Invoker(const std::vector& _writers) : + WriteVideo_Invoker(const std::vector< cv::Ptr >& _writers) : ParallelLoopBody(), writers(&_writers) { } @@ -304,7 +304,7 @@ protected: } private: - const std::vector* writers; + const std::vector< cv::Ptr >* writers; }; const Scalar WriteVideo_Invoker::ObjectColor(Scalar::all(0)); @@ -315,7 +315,7 @@ class CreateVideoCaptureInvoker : public ParallelLoopBody { public: - CreateVideoCaptureInvoker(std::vector& _readers, const std::vector& _files) : + CreateVideoCaptureInvoker(std::vector< cv::Ptr >& _readers, const std::vector& _files) : ParallelLoopBody(), readers(&_readers), files(&_files) { } @@ -324,12 +324,12 @@ public: { for (int i = range.start; i != range.end; ++i) { - readers->operator[](i) = new VideoCapture(files->operator[](i), CAP_FFMPEG); + readers->operator[](i) = makePtr(files->operator[](i), CAP_FFMPEG); CV_Assert(readers->operator[](i)->isOpened()); } } private: - std::vector* readers; + std::vector< cv::Ptr >* readers; const std::vector* files; }; @@ -337,7 +337,7 @@ class ReadImageAndTest : public ParallelLoopBody { public: - ReadImageAndTest(const std::vector& _readers, cvtest::TS* _ts) : + ReadImageAndTest(const std::vector< cv::Ptr >& _readers, cvtest::TS* _ts) : ParallelLoopBody(), readers(&_readers), ts(_ts) { } @@ -346,7 +346,7 @@ public: { for (int j = range.start; j < range.end; ++j) { - VideoCapture* capture = readers->operator[](j); + VideoCapture* capture = readers->operator[](j).get(); CV_Assert(capture != NULL); CV_Assert(capture->isOpened()); @@ -394,7 +394,7 @@ public: static bool next; private: - const std::vector* readers; + const std::vector< cv::Ptr >* readers; cvtest::TS* ts; }; @@ -406,7 +406,7 @@ TEST(Videoio_Video_parallel_writers_and_readers, accuracy) cvtest::TS* ts = cvtest::TS::ptr(); // creating VideoWriters - std::vector writers(threadsCount); + std::vector< cv::Ptr > writers(threadsCount); Range range(0, threadsCount); std::vector files(threadsCount); CreateVideoWriterInvoker invoker1(writers, files); @@ -416,11 +416,9 @@ TEST(Videoio_Video_parallel_writers_and_readers, accuracy) parallel_for_(range, WriteVideo_Invoker(writers)); // deleting the writers - for (std::vector::iterator i = writers.begin(), end = writers.end(); i != end; ++i) - delete *i; writers.clear(); - std::vector readers(threadsCount); + std::vector > readers(threadsCount); CreateVideoCaptureInvoker invoker2(readers, files); parallel_for_(range, invoker2); @@ -437,8 +435,7 @@ TEST(Videoio_Video_parallel_writers_and_readers, accuracy) } // delete the readers - for (std::vector::iterator i = readers.begin(), end = readers.end(); i != end; ++i) - delete *i; + readers.clear(); } #endif From 8ab6efb8c0e4f71c1161d817c7670e2865ff732e Mon Sep 17 00:00:00 2001 From: Alexander Alekhin Date: Mon, 17 Jun 2019 19:33:02 +0300 Subject: [PATCH 4/4] videoio: refactor GStreamer backend - exception safe smart pointers, fixed many memory leaks - fixed errors handling - fixed strings handling - fixed file existence check - fixed toFraction() implementation - add 'drop=true' for v4l2 input - avoid overriding custom pipelines sink's 'max-buffers' value - updated valgrind supression rules - added support for GStreamer native leaks tracking (via gst_deinit() call) - repaired capturing through GStreamer0.10 (fixed CV_8UC1) --- modules/videoio/src/cap_gstreamer.cpp | 1130 ++++++++++++---------- modules/videoio/src/precomp.hpp | 3 + platforms/scripts/valgrind.supp | 37 + platforms/scripts/valgrind_3rdparty.supp | 90 +- 4 files changed, 745 insertions(+), 515 deletions(-) diff --git a/modules/videoio/src/cap_gstreamer.cpp b/modules/videoio/src/cap_gstreamer.cpp index 95ff159cc3..af947a8e16 100644 --- a/modules/videoio/src/cap_gstreamer.cpp +++ b/modules/videoio/src/cap_gstreamer.cpp @@ -48,12 +48,13 @@ * \brief Use GStreamer to read/write video */ #include "precomp.hpp" + +#include +#include + #include -using namespace std; -#ifndef _MSC_VER -#include -#endif #include + #include #include #include @@ -71,11 +72,7 @@ using namespace std; #endif -#ifdef NDEBUG -#define CV_WARN(message) -#else -#define CV_WARN(message) fprintf(stderr, "OpenCV | GStreamer warning: %s (%s:%d)\n", message, __FILE__, __LINE__) -#endif +#define CV_WARN(...) CV_LOG_WARNING(NULL, "OpenCV | GStreamer warning: " << __VA_ARGS__) #if GST_VERSION_MAJOR == 0 #define COLOR_ELEM "ffmpegcolorspace" @@ -85,26 +82,107 @@ using namespace std; #define COLOR_ELEM_NAME COLOR_ELEM #endif -#if defined(_WIN32) || defined(_WIN64) -#if defined(__MINGW32__) -inline char *realpath(const char *path, char *resolved_path) +#if GST_VERSION_MAJOR == 0 +#define CV_GST_FORMAT(format) &(format) +#else +#define CV_GST_FORMAT(format) (format) +#endif + + +namespace cv { + +static void toFraction(double decimal, CV_OUT int& numerator, CV_OUT int& denominator); +static void handleMessage(GstElement * pipeline); + + +namespace { + +template static inline void GSafePtr_addref(T* ptr) { - return _fullpath(resolved_path,path,PATH_MAX); + if (ptr) + g_object_ref_sink(ptr); } + +template static inline void GSafePtr_release(T** pPtr); + +template<> inline void GSafePtr_release(GError** pPtr) { g_clear_error(pPtr); } +template<> inline void GSafePtr_release(GstElement** pPtr) { if (pPtr) { gst_object_unref(G_OBJECT(*pPtr)); *pPtr = NULL; } } +template<> inline void GSafePtr_release(GstElementFactory** pPtr) { if (pPtr) { gst_object_unref(G_OBJECT(*pPtr)); *pPtr = NULL; } } +template<> inline void GSafePtr_release(GstPad** pPtr) { if (pPtr) { gst_object_unref(G_OBJECT(*pPtr)); *pPtr = NULL; } } +template<> inline void GSafePtr_release(GstCaps** pPtr) { if (pPtr) { gst_caps_unref(*pPtr); *pPtr = NULL; } } +template<> inline void GSafePtr_release(GstBuffer** pPtr) { if (pPtr) { gst_buffer_unref(*pPtr); *pPtr = NULL; } } +#if GST_VERSION_MAJOR > 0 +template<> inline void GSafePtr_release(GstSample** pPtr) { if (pPtr) { gst_sample_unref(*pPtr); *pPtr = NULL; } } #endif -#define snprintf _snprintf -#define vsnprintf _vsnprintf -#define strcasecmp _stricmp -#define strncasecmp _strnicmp -#include +template<> inline void GSafePtr_release(GstBus** pPtr) { if (pPtr) { gst_object_unref(G_OBJECT(*pPtr)); *pPtr = NULL; } } +template<> inline void GSafePtr_release(GstMessage** pPtr) { if (pPtr) { gst_message_unref(*pPtr); *pPtr = NULL; } } + +#if FULL_GST_VERSION >= VERSION_NUM(0,10,32) +template<> inline void GSafePtr_release(GstEncodingVideoProfile** pPtr) { if (pPtr) { gst_encoding_profile_unref(*pPtr); *pPtr = NULL; } } +template<> inline void GSafePtr_release(GstEncodingContainerProfile** pPtr) { if (pPtr) { gst_object_unref(G_OBJECT(*pPtr)); *pPtr = NULL; } } #endif -void toFraction(double decimal, double &numerator, double &denominator); -void handleMessage(GstElement * pipeline); +template<> inline void GSafePtr_addref(char* pPtr); // declaration only. not defined. should not be used +template<> inline void GSafePtr_release(char** pPtr) { if (pPtr) { g_free(*pPtr); *pPtr = NULL; } } -using namespace cv; +template +class GSafePtr +{ +protected: + T* ptr; +public: + inline GSafePtr() CV_NOEXCEPT : ptr(NULL) { } + inline ~GSafePtr() CV_NOEXCEPT { release(); } + inline void release() CV_NOEXCEPT + { +#if 0 + printf("release: %s:%d: %p\n", CV__TRACE_FUNCTION, __LINE__, ptr); + if (ptr) { + printf(" refcount: %d\n", (int)GST_OBJECT_REFCOUNT_VALUE(ptr)); \ + } +#endif + if (ptr) + GSafePtr_release(&ptr); + } + + inline operator T* () CV_NOEXCEPT { return ptr; } + inline operator /*const*/ T* () const CV_NOEXCEPT { return (T*)ptr; } // there is no const correctness in Gst C API + + inline T* get() CV_NOEXCEPT { return ptr; } + inline /*const*/ T* get() const CV_NOEXCEPT { CV_Assert(ptr); return (T*)ptr; } // there is no const correctness in Gst C API + + inline const T* operator -> () const { CV_Assert(ptr); return ptr; } + inline operator bool () const CV_NOEXCEPT { return ptr != NULL; } + inline bool operator ! () const CV_NOEXCEPT { return ptr == NULL; } -static cv::Mutex gst_initializer_mutex; + inline T** getRef() { CV_Assert(ptr == NULL); return &ptr; } + + inline GSafePtr& reset(T* p) CV_NOEXCEPT // pass result of functions with "transfer floating" ownership + { + //printf("reset: %s:%d: %p\n", CV__TRACE_FUNCTION, __LINE__, p); + release(); + if (p) + { + GSafePtr_addref(p); + ptr = p; + } + return *this; + } + + inline GSafePtr& attach(T* p) CV_NOEXCEPT // pass result of functions with "transfer full" ownership + { + //printf("attach: %s:%d: %p\n", CV__TRACE_FUNCTION, __LINE__, p); + release(); ptr = p; return *this; + } + inline T* detach() CV_NOEXCEPT { T* p = ptr; ptr = NULL; return p; } + + inline void swap(GSafePtr& o) CV_NOEXCEPT { std::swap(ptr, o.ptr); } +private: + GSafePtr(const GSafePtr&); // = disabled + GSafePtr& operator=(const T*); // = disabled +}; + +} // namespace /*! * \brief The gst_initializer class @@ -113,29 +191,50 @@ static cv::Mutex gst_initializer_mutex; class gst_initializer { public: - static void init() + static gst_initializer& init() { - gst_initializer_mutex.lock(); - static gst_initializer init; - gst_initializer_mutex.unlock(); + static gst_initializer g_init; + if (g_init.isFailed) + CV_Error(Error::StsError, "Can't initialize GStreamer"); + return g_init; } private: - gst_initializer() + bool isFailed; + bool call_deinit; + gst_initializer() : + isFailed(false) { - gst_init(NULL, NULL); + call_deinit = utils::getConfigurationParameterBool("OPENCV_VIDEOIO_GSTREAMER_CALL_DEINIT", false); + + GSafePtr err; + gst_init_check(NULL, NULL, err.getRef()); + if (err) + { + CV_WARN("Can't initialize GStreamer: " << err->message); + isFailed = true; + return; + } guint major, minor, micro, nano; gst_version(&major, &minor, µ, &nano); if (GST_VERSION_MAJOR != major) { - CV_WARN("incompatible gstreamer version"); + CV_WARN("incompatible GStreamer version"); + isFailed = true; + return; + } + } + ~gst_initializer() + { + if (call_deinit) + { + // Debug leaks: GST_LEAKS_TRACER_STACK_TRACE=1 GST_DEBUG="GST_TRACER:7" GST_TRACERS="leaks" + gst_deinit(); } -// gst_debug_set_active(1); -// gst_debug_set_colored(1); -// gst_debug_set_default_threshold(GST_LEVEL_INFO); } }; -inline static string get_gst_propname(int propId) +inline static +std::string get_gst_propname(int propId) { switch (propId) { @@ -143,17 +242,15 @@ inline static string get_gst_propname(int propId) case CV_CAP_PROP_CONTRAST: return "contrast"; case CV_CAP_PROP_SATURATION: return "saturation"; case CV_CAP_PROP_HUE: return "hue"; - default: return string(); + default: return std::string(); } } -inline static bool is_gst_element_exists(const std::string & name) +inline static +bool is_gst_element_exists(const std::string& name) { - GstElementFactory * testfac = gst_element_factory_find(name.c_str()); - if (!testfac) - return false; - g_object_unref(G_OBJECT(testfac)); - return true; + GSafePtr testfac; testfac.attach(gst_element_factory_find(name.c_str())); + return (bool)testfac; } //================================================================================================== @@ -161,41 +258,39 @@ inline static bool is_gst_element_exists(const std::string & name) class GStreamerCapture CV_FINAL : public IVideoCapture { private: - GstElement* pipeline; - GstElement* v4l2src; - GstElement* sink; -#if GST_VERSION_MAJOR > 0 - GstSample* sample; + GSafePtr pipeline; + GSafePtr v4l2src; + GSafePtr sink; +#if GST_VERSION_MAJOR == 0 + GSafePtr buffer; #else - void * sample; // unused - GstBuffer* buffer; + GSafePtr sample; #endif - GstCaps* caps; + GSafePtr caps; + gint64 duration; gint width; gint height; - gint channels; double fps; bool isPosFramesSupported; bool isPosFramesEmulated; gint64 emulatedFrameNumber; - bool isOutputByteBuffer; public: GStreamerCapture(); - ~GStreamerCapture(); + virtual ~GStreamerCapture() CV_OVERRIDE; virtual bool grabFrame() CV_OVERRIDE; virtual bool retrieveFrame(int /*unused*/, OutputArray dst) CV_OVERRIDE; virtual double getProperty(int propId) const CV_OVERRIDE; virtual bool setProperty(int propId, double value) CV_OVERRIDE; - virtual bool isOpened() const CV_OVERRIDE; + virtual bool isOpened() const CV_OVERRIDE { return (bool)pipeline; } virtual int getCaptureDomain() CV_OVERRIDE { return cv::CAP_GSTREAMER; } bool open(int id); bool open(const String &filename_); static void newPad(GstElement * /*elem*/, GstPad *pad, gpointer data); protected: - bool determineFrameDims(Size & sz); + bool determineFrameDims(CV_OUT Size& sz, CV_OUT gint& channels, CV_OUT bool& isOutputByteBuffer); bool isPipelinePlaying(); void startPipeline(); void stopPipeline(); @@ -204,21 +299,11 @@ protected: void removeFilter(const char *filter); }; -/*! - * \brief CvCapture_GStreamer::init - * inits the class - */ GStreamerCapture::GStreamerCapture() : - pipeline(NULL), v4l2src(NULL), sink(NULL), sample(NULL), -#if GST_VERSION_MAJOR == 0 - buffer(NULL), -#endif - caps(NULL), - duration(-1), width(-1), height(-1), channels(0), fps(-1), + duration(-1), width(-1), height(-1), fps(-1), isPosFramesSupported(false), isPosFramesEmulated(false), - emulatedFrameNumber(-1), - isOutputByteBuffer(false) + emulatedFrameNumber(-1) { } @@ -230,10 +315,10 @@ GStreamerCapture::~GStreamerCapture() { if (isPipelinePlaying()) stopPipeline(); - if (pipeline && GST_IS_ELEMENT(pipeline)) + if (pipeline && GST_IS_ELEMENT(pipeline.get())) { - gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL); - gst_object_unref(GST_OBJECT(pipeline)); + gst_element_set_state(pipeline, GST_STATE_NULL); + pipeline.release(); } } @@ -245,28 +330,24 @@ GStreamerCapture::~GStreamerCapture() */ bool GStreamerCapture::grabFrame() { - if(!pipeline) + if (!pipeline || !GST_IS_ELEMENT(pipeline.get())) return false; // start the pipeline if it was not in playing state yet - if(!this->isPipelinePlaying()) + if (!this->isPipelinePlaying()) this->startPipeline(); // bail out if EOS - if(gst_app_sink_is_eos(GST_APP_SINK(sink))) + if (gst_app_sink_is_eos(GST_APP_SINK(sink.get()))) return false; #if GST_VERSION_MAJOR == 0 - if(buffer) - gst_buffer_unref(buffer); - buffer = gst_app_sink_pull_buffer(GST_APP_SINK(sink)); - if(!buffer) + buffer.attach(gst_app_sink_pull_buffer(GST_APP_SINK(sink.get()))); + if (!buffer) return false; #else - if(sample) - gst_sample_unref(sample); - sample = gst_app_sink_pull_sample(GST_APP_SINK(sink)); - if(!sample) + sample.attach(gst_app_sink_pull_sample(GST_APP_SINK(sink.get()))); + if (!sample) return false; #endif @@ -287,30 +368,33 @@ bool GStreamerCapture::retrieveFrame(int, OutputArray dst) if (!buffer) return false; #else - if(!sample) + if (!sample) return false; #endif Size sz; - if (!determineFrameDims(sz)) + gint channels = 0; + bool isOutputByteBuffer = false; + if (!determineFrameDims(sz, channels, isOutputByteBuffer)) return false; // gstreamer expects us to handle the memory at this point // so we can just wrap the raw buffer and be done with it #if GST_VERSION_MAJOR == 0 - Mat src(sz, CV_8UC1, (uchar*)GST_BUFFER_DATA(buffer)); + Mat src(sz, CV_MAKETYPE(CV_8U, channels), (uchar*)GST_BUFFER_DATA(buffer.get())); src.copyTo(dst); #else - GstBuffer * buf = gst_sample_get_buffer(sample); + GstBuffer* buf = gst_sample_get_buffer(sample); // no lifetime transfer if (!buf) return false; - GstMapInfo info; + GstMapInfo info = {}; if (!gst_buffer_map(buf, &info, GST_MAP_READ)) { //something weird went wrong here. abort. abort. - CV_WARN("Failed to map GStreamerbuffer to system memory"); + CV_WARN("Failed to map GStreamer buffer to system memory"); return false; } + try { Mat src; if (isOutputByteBuffer) @@ -320,37 +404,45 @@ bool GStreamerCapture::retrieveFrame(int, OutputArray dst) CV_Assert(src.isContinuous()); src.copyTo(dst); } + catch (...) + { + gst_buffer_unmap(buf, &info); + throw; + } gst_buffer_unmap(buf, &info); #endif return true; } -bool GStreamerCapture::determineFrameDims(Size &sz) +bool GStreamerCapture::determineFrameDims(Size &sz, gint& channels, bool& isOutputByteBuffer) { #if GST_VERSION_MAJOR == 0 - GstCaps * frame_caps = gst_buffer_get_caps(buffer); + GstCaps * frame_caps = gst_buffer_get_caps(buffer); // no lifetime transfer #else - GstCaps * frame_caps = gst_sample_get_caps(sample); + GstCaps * frame_caps = gst_sample_get_caps(sample); // no lifetime transfer #endif // bail out in no caps if (!GST_CAPS_IS_SIMPLE(frame_caps)) return false; - GstStructure* structure = gst_caps_get_structure(frame_caps, 0); + GstStructure* structure = gst_caps_get_structure(frame_caps, 0); // no lifetime transfer // bail out if width or height are 0 if (!gst_structure_get_int(structure, "width", &width) || !gst_structure_get_int(structure, "height", &height)) + { + CV_WARN("Can't query frame size from GStreeamer buffer"); return false; + } sz = Size(width, height); #if GST_VERSION_MAJOR > 0 - const gchar* name = gst_structure_get_name(structure); - - if (!name) + const gchar* name_ = gst_structure_get_name(structure); + if (!name_) return false; + std::string name = toLowerCase(std::string(name_)); // we support 11 types of data: // video/x-raw, format=BGR -> 8bit, 3 channels @@ -366,58 +458,70 @@ bool GStreamerCapture::determineFrameDims(Size &sz) // image/jpeg -> 8bit, mjpeg: buffer_size x 1 x 1 // bayer data is never decoded, the user is responsible for that // everything is 8 bit, so we just test the caps for bit depth - if (strcasecmp(name, "video/x-raw") == 0) + if (name == "video/x-raw") { - const gchar* format = gst_structure_get_string(structure, "format"); - if (!format) + const gchar* format_ = gst_structure_get_string(structure, "format"); + if (!format_) return false; - if (strcasecmp(format, "BGR") == 0) + std::string format = toUpperCase(std::string(format_)); + + if (format == "BGR") { channels = 3; } - else if( (strcasecmp(format, "UYVY") == 0) || (strcasecmp(format, "YUY2") == 0) || (strcasecmp(format, "YVYU") == 0) ) + else if (format == "UYVY" || format == "YUY2" || format == "YVYU") { channels = 2; } - else if( (strcasecmp(format, "NV12") == 0) || (strcasecmp(format, "NV21") == 0) || (strcasecmp(format, "YV12") == 0) || (strcasecmp(format, "I420") == 0) ) + else if (format == "NV12" || format == "NV21" || format == "YV12" || format == "I420") { channels = 1; sz.height = sz.height * 3 / 2; } - else if(strcasecmp(format, "GRAY8") == 0) + else if (format == "GRAY8") { channels = 1; } + else + { + CV_Error_(Error::StsNotImplemented, ("Unsupported GStreamer format: %s", format.c_str())); + } } - else if (strcasecmp(name, "video/x-bayer") == 0) + else if (name == "video/x-bayer") { channels = 1; } - else if(strcasecmp(name, "image/jpeg") == 0) + else if (name == "image/jpeg") { // the correct size will be set once the first frame arrives channels = 1; isOutputByteBuffer = true; } + else + { + CV_Error_(Error::StsNotImplemented, ("Unsupported GStreamer layer type: %s", name.c_str())); + } #else + CV_UNUSED(isOutputByteBuffer); // we support only video/x-raw, format=BGR -> 8bit, 3 channels channels = 3; #endif return true; } -/*! - * \brief CvCapture_GStreamer::isPipelinePlaying - * \return if the pipeline is currently playing. - */ bool GStreamerCapture::isPipelinePlaying() { + if (!pipeline || !GST_IS_ELEMENT(pipeline.get())) + { + CV_WARN("GStreamer: pipeline have not been created"); + return false; + } GstState current, pending; GstClockTime timeout = 5*GST_SECOND; GstStateChangeReturn ret = gst_element_get_state(pipeline, ¤t, &pending, timeout); if (!ret) { - CV_WARN("GStreamer: unable to query pipeline state"); + CV_WARN("unable to query pipeline state"); return false; } return current == GST_STATE_PLAYING; @@ -429,8 +533,12 @@ bool GStreamerCapture::isPipelinePlaying() */ void GStreamerCapture::startPipeline() { - //fprintf(stderr, "relinked, pausing\n"); - GstStateChangeReturn status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING); + if (!pipeline || !GST_IS_ELEMENT(pipeline.get())) + { + CV_WARN("GStreamer: pipeline have not been created"); + return; + } + GstStateChangeReturn status = gst_element_set_state(pipeline, GST_STATE_PLAYING); if (status == GST_STATE_CHANGE_ASYNC) { // wait for status update @@ -439,31 +547,28 @@ void GStreamerCapture::startPipeline() if (status == GST_STATE_CHANGE_FAILURE) { handleMessage(pipeline); - gst_object_unref(pipeline); - pipeline = NULL; - CV_WARN("GStreamer: unable to start pipeline"); + pipeline.release(); + CV_WARN("unable to start pipeline"); return; } if (isPosFramesEmulated) emulatedFrameNumber = 0; - //printf("state now playing\n"); handleMessage(pipeline); } -/*! - * \brief CvCapture_GStreamer::stopPipeline - * Stop the pipeline by setting it to NULL - */ void GStreamerCapture::stopPipeline() { - //fprintf(stderr, "restarting pipeline, going to ready\n"); - if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL) == GST_STATE_CHANGE_FAILURE) + if (!pipeline || !GST_IS_ELEMENT(pipeline.get())) + { + CV_WARN("GStreamer: pipeline have not been created"); + return; + } + if (gst_element_set_state(pipeline, GST_STATE_NULL) == GST_STATE_CHANGE_FAILURE) { - CV_WARN("GStreamer: unable to stop pipeline"); - gst_object_unref(pipeline); - pipeline = NULL; + CV_WARN("unable to stop pipeline"); + pipeline.release(); } } @@ -489,45 +594,47 @@ void GStreamerCapture::restartPipeline() */ void GStreamerCapture::setFilter(const char *prop, int type, int v1, int v2) { - //printf("GStreamer: setFilter \n"); - if(!caps || !( GST_IS_CAPS (caps) )) + if (!caps || !(GST_IS_CAPS(caps.get()))) { - if(type == G_TYPE_INT) + if (type == G_TYPE_INT) { #if GST_VERSION_MAJOR == 0 - caps = gst_caps_new_simple("video/x-raw-rgb", prop, type, v1, NULL); + caps.attach(gst_caps_new_simple("video/x-raw-rgb", prop, type, v1, NULL)); #else - caps = gst_caps_new_simple("video/x-raw","format",G_TYPE_STRING,"BGR", prop, type, v1, NULL); + caps.attach(gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "BGR", prop, type, v1, NULL)); #endif } else { #if GST_VERSION_MAJOR == 0 - caps = gst_caps_new_simple("video/x-raw-rgb", prop, type, v1, v2, NULL); + caps.attach(gst_caps_new_simple("video/x-raw-rgb", prop, type, v1, v2, NULL)); #else - caps = gst_caps_new_simple("video/x-raw","format",G_TYPE_STRING,"BGR", prop, type, v1, v2, NULL); + caps.attach(gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "BGR", prop, type, v1, v2, NULL)); #endif } } else { #if GST_VERSION_MAJOR > 0 - if (! gst_caps_is_writable(caps)) - caps = gst_caps_make_writable (caps); + if (!gst_caps_is_writable(caps.get())) + caps.attach(gst_caps_make_writable(caps.detach())); #endif - if(type == G_TYPE_INT){ + if (type == G_TYPE_INT) + { gst_caps_set_simple(caps, prop, type, v1, NULL); - }else{ + } + else + { gst_caps_set_simple(caps, prop, type, v1, v2, NULL); } } #if GST_VERSION_MAJOR > 0 - caps = gst_caps_fixate(caps); + caps.attach(gst_caps_fixate(caps.detach())); #endif - gst_app_sink_set_caps(GST_APP_SINK(sink), caps); - //printf("filtering with %s\n", gst_caps_to_string(caps)); + gst_app_sink_set_caps(GST_APP_SINK(sink.get()), caps); + GST_LOG("filtering with caps: %" GST_PTR_FORMAT, caps.get()); } /*! @@ -541,14 +648,18 @@ void GStreamerCapture::removeFilter(const char *filter) return; #if GST_VERSION_MAJOR > 0 - if (! gst_caps_is_writable(caps)) - caps = gst_caps_make_writable (caps); + if (!gst_caps_is_writable(caps.get())) + caps.attach(gst_caps_make_writable(caps.detach())); #endif - GstStructure *s = gst_caps_get_structure(caps, 0); + GstStructure *s = gst_caps_get_structure(caps, 0); // no lifetime transfer gst_structure_remove_field(s, filter); - gst_app_sink_set_caps(GST_APP_SINK(sink), caps); +#if GST_VERSION_MAJOR > 0 + caps.attach(gst_caps_fixate(caps.detach())); +#endif + + gst_app_sink_set_caps(GST_APP_SINK(sink.get()), caps); } /*! @@ -560,31 +671,24 @@ void GStreamerCapture::removeFilter(const char *filter) */ void GStreamerCapture::newPad(GstElement *, GstPad *pad, gpointer data) { - GstPad *sinkpad; - GstElement *color = (GstElement *) data; + GSafePtr sinkpad; + GstElement* color = (GstElement*)data; - sinkpad = gst_element_get_static_pad (color, "sink"); - if (!sinkpad){ - //fprintf(stderr, "Gstreamer: no pad named sink\n"); + sinkpad.attach(gst_element_get_static_pad(color, "sink")); + if (!sinkpad) { + CV_WARN("no pad named sink"); return; } - gst_pad_link (pad, sinkpad); - gst_object_unref (sinkpad); -} - -bool GStreamerCapture::isOpened() const -{ - return pipeline != NULL; + gst_pad_link(pad, sinkpad.get()); } /*! - * \brief CvCapture_GStreamer::open Open the given file with gstreamer - * \param type CvCapture type. One of CV_CAP_GSTREAMER_* + * \brief Create GStreamer pipeline * \param filename Filename to open in case of CV_CAP_GSTREAMER_FILE * \return boolean. Specifies if opening was successful. * - * In case of CV_CAP_GSTREAMER_V4L(2), a pipelin is constructed as follows: + * In case of camera 'index', a pipeline is constructed as follows: * v4l2src ! autoconvert ! appsink * * @@ -598,16 +702,12 @@ bool GStreamerCapture::isOpened() const * e.g. videotestsrc ! videoconvert ! appsink * the appsink name should be either 'appsink0' (the default) or 'opencvsink' * - * When dealing with a file, CvCapture_GStreamer will not drop frames if the grabbing interval - * larger than the framerate period. (Unlike the uri or manual pipeline description, which assume - * a live source) + * GStreamer will not drop frames if the grabbing interval larger than the framerate period. + * To support dropping for live streams add appsink 'drop' parameter into your custom pipeline. * * The pipeline will only be started whenever the first frame is grabbed. Setting pipeline properties * is really slow if we need to restart the pipeline over and over again. * - * TODO: the 'type' parameter is imo unneeded. for v4l2, filename 'v4l2:///dev/video0' can be used. - * I expect this to be the same for CV_CAP_GSTREAMER_1394. Is anyone actually still using v4l (v1)? - * */ bool GStreamerCapture::open(int id) { @@ -618,7 +718,7 @@ bool GStreamerCapture::open(int id) std::ostringstream desc; desc << "v4l2src device=/dev/video" << id << " ! " << COLOR_ELEM - << " ! appsink"; + << " ! appsink drop=true"; return open(desc.str()); } @@ -626,14 +726,13 @@ bool GStreamerCapture::open(const String &filename_) { gst_initializer::init(); - const gchar * filename = filename_.c_str(); + const gchar* filename = filename_.c_str(); bool file = false; - //bool stream = false; bool manualpipeline = false; - char *uri = NULL; - GstElement* uridecodebin = NULL; - GstElement* color = NULL; + GSafePtr uri; + GSafePtr uridecodebin; + GSafePtr color; GstStateChangeReturn status; // test if we have a valid uri. If so, open it with an uridecodebin @@ -642,78 +741,64 @@ bool GStreamerCapture::open(const String &filename_) // ordinary file path. if (!gst_uri_is_valid(filename)) { -#ifdef _MSC_VER - uri = new char[2048]; - DWORD pathSize = GetFullPathName(filename, 2048, uri, NULL); - struct stat buf; - if (pathSize == 0 || stat(uri, &buf) != 0) - { - delete[] uri; - uri = NULL; - } -#else - uri = realpath(filename, NULL); -#endif - //stream = false; - if(uri) + if (utils::fs::exists(filename_)) { - uri = g_filename_to_uri(uri, NULL, NULL); - if(uri) + uri.attach(g_filename_to_uri(filename, NULL, NULL)); + if (uri) { file = true; } else { - CV_WARN("GStreamer: Error opening file\n"); - CV_WARN(filename); - CV_WARN(uri); + CV_WARN("Error opening file: " << filename << " (" << uri.get() << ")"); return false; } } else { - GError *err = NULL; - uridecodebin = gst_parse_launch(filename, &err); - if(!uridecodebin) + GSafePtr err; + uridecodebin.attach(gst_parse_launch(filename, err.getRef())); + if (err) { - fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message); + CV_WARN("Error opening bin: " << err->message); return false; } - //stream = true; manualpipeline = true; } } else { - //stream = true; - uri = g_strdup(filename); + uri.attach(g_strdup(filename)); } bool element_from_uri = false; - if(!uridecodebin) + if (!uridecodebin) { // At this writing, the v4l2 element (and maybe others too) does not support caps renegotiation. // This means that we cannot use an uridecodebin when dealing with v4l2, since setting // capture properties will not work. // The solution (probably only until gstreamer 1.2) is to make an element from uri when dealing with v4l2. - gchar * protocol = gst_uri_get_protocol(uri); - if (!strcasecmp(protocol , "v4l2")) + GSafePtr protocol_; protocol_.attach(gst_uri_get_protocol(uri)); + CV_Assert(protocol_); + std::string protocol = toLowerCase(std::string(protocol_.get())); + if (protocol == "v4l2") { #if GST_VERSION_MAJOR == 0 - uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src"); + uridecodebin.reset(gst_element_make_from_uri(GST_URI_SRC, uri.get(), "src")); #else - uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src", NULL); + uridecodebin.reset(gst_element_make_from_uri(GST_URI_SRC, uri.get(), "src", NULL)); #endif + CV_Assert(uridecodebin); element_from_uri = true; } else { - uridecodebin = gst_element_factory_make("uridecodebin", NULL); - g_object_set(G_OBJECT(uridecodebin), "uri", uri, NULL); + uridecodebin.reset(gst_element_factory_make("uridecodebin", NULL)); + CV_Assert(uridecodebin); + g_object_set(G_OBJECT(uridecodebin.get()), "uri", uri.get(), NULL); } - g_free(protocol); - if(!uridecodebin) + if (!uridecodebin) { CV_WARN("Can not parse GStreamer URI bin"); return false; @@ -722,17 +807,17 @@ bool GStreamerCapture::open(const String &filename_) if (manualpipeline) { - GstIterator *it = gst_bin_iterate_elements(GST_BIN(uridecodebin)); + GstIterator *it = gst_bin_iterate_elements(GST_BIN(uridecodebin.get())); - GstElement *element = NULL; gboolean done = false; - gchar* name = NULL; #if GST_VERSION_MAJOR > 0 GValue value = G_VALUE_INIT; #endif while (!done) { + GstElement *element = NULL; + GSafePtr name; #if GST_VERSION_MAJOR > 0 switch (gst_iterator_next (it, &value)) { @@ -743,29 +828,28 @@ bool GStreamerCapture::open(const String &filename_) { case GST_ITERATOR_OK: #endif - name = gst_element_get_name(element); + name.attach(gst_element_get_name(element)); if (name) { if (strstr(name, "opencvsink") != NULL || strstr(name, "appsink") != NULL) { - sink = GST_ELEMENT ( gst_object_ref (element) ); + sink.attach(GST_ELEMENT(gst_object_ref(element))); } else if (strstr(name, COLOR_ELEM_NAME) != NULL) { - color = GST_ELEMENT ( gst_object_ref (element) ); + color.attach(GST_ELEMENT(gst_object_ref(element))); } else if (strstr(name, "v4l") != NULL) { - v4l2src = GST_ELEMENT ( gst_object_ref (element) ); + v4l2src.attach(GST_ELEMENT(gst_object_ref(element))); } - g_free(name); + name.release(); done = sink && color && v4l2src; } #if GST_VERSION_MAJOR > 0 g_value_unset (&value); #endif - break; case GST_ITERATOR_RESYNC: gst_iterator_resync (it); @@ -780,83 +864,90 @@ bool GStreamerCapture::open(const String &filename_) if (!sink) { - CV_WARN("GStreamer: cannot find appsink in manual pipeline\n"); + CV_WARN("cannot find appsink in manual pipeline"); return false; } - pipeline = uridecodebin; + pipeline.swap(uridecodebin); } else { - pipeline = gst_pipeline_new(NULL); + pipeline.reset(gst_pipeline_new(NULL)); + CV_Assert(pipeline); + // videoconvert (in 0.10: ffmpegcolorspace, in 1.x autovideoconvert) //automatically selects the correct colorspace conversion based on caps. - color = gst_element_factory_make(COLOR_ELEM, NULL); - sink = gst_element_factory_make("appsink", NULL); + color.reset(gst_element_factory_make(COLOR_ELEM, NULL)); + CV_Assert(color); + + sink.reset(gst_element_factory_make("appsink", NULL)); + CV_Assert(sink); - gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL); + gst_bin_add_many(GST_BIN(pipeline.get()), uridecodebin.get(), color.get(), sink.get(), NULL); - if(element_from_uri) + if (element_from_uri) { - if(!gst_element_link(uridecodebin, color)) + if(!gst_element_link(uridecodebin, color.get())) { CV_WARN("cannot link color -> sink"); - gst_object_unref(pipeline); - pipeline = NULL; + pipeline.release(); return false; } } else { - g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color); + g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color.get()); } - if(!gst_element_link(color, sink)) + if (!gst_element_link(color.get(), sink.get())) { - CV_WARN("GStreamer: cannot link color -> sink\n"); - gst_object_unref(pipeline); - pipeline = NULL; + CV_WARN("GStreamer: cannot link color -> sink"); + pipeline.release(); return false; } } - //TODO: is 1 single buffer really high enough? - gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1); -// gst_app_sink_set_drop (GST_APP_SINK(sink), stream); + if (!manualpipeline || strstr(filename, " max-buffers=") == NULL) + { + //TODO: is 1 single buffer really high enough? + gst_app_sink_set_max_buffers(GST_APP_SINK(sink.get()), 1); + } + //do not emit signals: all calls will be synchronous and blocking - gst_app_sink_set_emit_signals (GST_APP_SINK(sink), FALSE); -// gst_base_sink_set_sync(GST_BASE_SINK(sink), FALSE); + gst_app_sink_set_emit_signals (GST_APP_SINK(sink.get()), FALSE); + #if GST_VERSION_MAJOR == 0 - caps = gst_caps_new_simple("video/x-raw-rgb", - "bpp", G_TYPE_INT, 24, - "red_mask", G_TYPE_INT, 0x0000FF, - "green_mask", G_TYPE_INT, 0x00FF00, - "blue_mask", G_TYPE_INT, 0xFF0000, - NULL); + caps.attach(gst_caps_new_simple("video/x-raw-rgb", + "bpp", G_TYPE_INT, 24, + "red_mask", G_TYPE_INT, 0x0000FF, + "green_mask", G_TYPE_INT, 0x00FF00, + "blue_mask", G_TYPE_INT, 0xFF0000, + NULL)); #else - caps = gst_caps_from_string("video/x-raw, format=(string){BGR, GRAY8}; video/x-bayer,format=(string){rggb,bggr,grbg,gbrg}; image/jpeg"); + caps.attach(gst_caps_from_string("video/x-raw, format=(string){BGR, GRAY8}; video/x-bayer,format=(string){rggb,bggr,grbg,gbrg}; image/jpeg")); - if(manualpipeline){ - GstPad* sink_pad = gst_element_get_static_pad(sink, "sink"); - GstCaps* peer_caps = gst_pad_peer_query_caps(sink_pad,NULL); + if (manualpipeline) + { + GSafePtr peer_caps; + GSafePtr sink_pad; + sink_pad.attach(gst_element_get_static_pad(sink, "sink")); + peer_caps.attach(gst_pad_peer_query_caps(sink_pad, NULL)); if (!gst_caps_can_intersect(caps, peer_caps)) { - gst_caps_unref(caps); - caps = gst_caps_from_string("video/x-raw, format=(string){UYVY,YUY2,YVYU,NV12,NV21,YV12,I420}"); + caps.attach(gst_caps_from_string("video/x-raw, format=(string){UYVY,YUY2,YVYU,NV12,NV21,YV12,I420}")); + CV_Assert(caps); } - gst_object_unref(sink_pad); - gst_caps_unref(peer_caps); } #endif - gst_app_sink_set_caps(GST_APP_SINK(sink), caps); - gst_caps_unref(caps); + gst_app_sink_set_caps(GST_APP_SINK(sink.get()), caps); + caps.release(); { - GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-init"); + GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-init"); - status = gst_element_set_state(GST_ELEMENT(pipeline), + status = gst_element_set_state(GST_ELEMENT(pipeline.get()), file ? GST_STATE_PAUSED : GST_STATE_PLAYING); if (status == GST_STATE_CHANGE_ASYNC) { @@ -865,11 +956,10 @@ bool GStreamerCapture::open(const String &filename_) } if (status == GST_STATE_CHANGE_FAILURE) { - GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-error"); + GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-error"); handleMessage(pipeline); - gst_object_unref(pipeline); - pipeline = NULL; - CV_WARN("GStreamer: unable to start pipeline\n"); + pipeline.release(); + CV_WARN("unable to start pipeline"); return false; } @@ -883,34 +973,32 @@ bool GStreamerCapture::open(const String &filename_) #endif { handleMessage(pipeline); - CV_WARN("GStreamer: unable to query duration of stream"); + CV_WARN("unable to query duration of stream"); duration = -1; } handleMessage(pipeline); - GstPad* pad = gst_element_get_static_pad(sink, "sink"); + GSafePtr pad; + pad.attach(gst_element_get_static_pad(sink, "sink")); + + GSafePtr buffer_caps; #if GST_VERSION_MAJOR == 0 - GstCaps* buffer_caps = gst_pad_get_caps(pad); + buffer_caps.attach(gst_pad_get_caps(pad)); #else - GstCaps* buffer_caps = gst_pad_get_current_caps(pad); + buffer_caps.attach(gst_pad_get_current_caps(pad)); #endif - const GstStructure *structure = gst_caps_get_structure (buffer_caps, 0); - - if (!gst_structure_get_int (structure, "width", &width)) - { - CV_WARN("Cannot query video width\n"); - } - - if (!gst_structure_get_int (structure, "height", &height)) + const GstStructure *structure = gst_caps_get_structure(buffer_caps, 0); // no lifetime transfer + if (!gst_structure_get_int (structure, "width", &width) || + !gst_structure_get_int (structure, "height", &height)) { - CV_WARN("Cannot query video height\n"); + CV_WARN("cannot query video width/height"); } gint num = 0, denom=1; - if(!gst_structure_get_fraction(structure, "framerate", &num, &denom)) + if (!gst_structure_get_fraction(structure, "framerate", &num, &denom)) { - CV_WARN("Cannot query video fps\n"); + CV_WARN("cannot query video fps"); } fps = (double)num/(double)denom; @@ -921,17 +1009,11 @@ bool GStreamerCapture::open(const String &filename_) gboolean status_; format_ = GST_FORMAT_DEFAULT; -#if GST_VERSION_MAJOR == 0 -#define FORMAT &format_ -#else -#define FORMAT format_ -#endif - status_ = gst_element_query_position(sink, FORMAT, &value_); -#undef FORMAT + + status_ = gst_element_query_position(sink, CV_GST_FORMAT(format_), &value_); if (!status_ || value_ != 0 || duration < 0) { - CV_WARN(cv::format("Cannot query video position: status=%d value=%lld duration=%lld\n", - (int)status_, (long long int)value_, (long long int)duration).c_str()); + CV_WARN("Cannot query video position: status=" << status_ << ", value=" << value_ << ", duration=" << duration); isPosFramesSupported = false; isPosFramesEmulated = true; emulatedFrameNumber = 0; @@ -940,7 +1022,7 @@ bool GStreamerCapture::open(const String &filename_) isPosFramesSupported = true; } - GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline"); + GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline"); } return true; @@ -961,21 +1043,16 @@ double GStreamerCapture::getProperty(int propId) const gint64 value; gboolean status; -#if GST_VERSION_MAJOR == 0 -#define FORMAT &format -#else -#define FORMAT format -#endif - if(!pipeline) { CV_WARN("GStreamer: no pipeline"); return 0; } - switch(propId) { + switch(propId) + { case CV_CAP_PROP_POS_MSEC: format = GST_FORMAT_TIME; - status = gst_element_query_position(sink, FORMAT, &value); + status = gst_element_query_position(sink.get(), CV_GST_FORMAT(format), &value); if(!status) { handleMessage(pipeline); CV_WARN("GStreamer: unable to query position of stream"); @@ -990,7 +1067,7 @@ double GStreamerCapture::getProperty(int propId) const return 0; // TODO getProperty() "unsupported" value should be changed } format = GST_FORMAT_DEFAULT; - status = gst_element_query_position(sink, FORMAT, &value); + status = gst_element_query_position(sink.get(), CV_GST_FORMAT(format), &value); if(!status) { handleMessage(pipeline); CV_WARN("GStreamer: unable to query position of stream"); @@ -999,7 +1076,7 @@ double GStreamerCapture::getProperty(int propId) const return value; case CV_CAP_PROP_POS_AVI_RATIO: format = GST_FORMAT_PERCENT; - status = gst_element_query_position(sink, FORMAT, &value); + status = gst_element_query_position(sink.get(), CV_GST_FORMAT(format), &value); if(!status) { handleMessage(pipeline); CV_WARN("GStreamer: unable to query position of stream"); @@ -1020,11 +1097,11 @@ double GStreamerCapture::getProperty(int propId) const case CV_CAP_PROP_HUE: if (v4l2src) { - string propName = get_gst_propname(propId); + std::string propName = get_gst_propname(propId); if (!propName.empty()) { gint32 val = 0; - g_object_get(G_OBJECT(v4l2src), propName.c_str(), &val, NULL); + g_object_get(G_OBJECT(v4l2src.get()), propName.c_str(), &val, NULL); return static_cast(val); } } @@ -1035,14 +1112,12 @@ double GStreamerCapture::getProperty(int propId) const CV_WARN("there is no sink yet"); return 0; } - return gst_app_sink_get_max_buffers(GST_APP_SINK(sink)); + return gst_app_sink_get_max_buffers(GST_APP_SINK(sink.get())); default: - CV_WARN("GStreamer: unhandled property"); + CV_WARN("unhandled property: " << propId); break; } -#undef FORMAT - return 0; } @@ -1071,7 +1146,7 @@ bool GStreamerCapture::setProperty(int propId, double value) switch(propId) { case CV_CAP_PROP_POS_MSEC: - if(!gst_element_seek_simple(GST_ELEMENT(pipeline), GST_FORMAT_TIME, + if(!gst_element_seek_simple(GST_ELEMENT(pipeline.get()), GST_FORMAT_TIME, flags, (gint64) (value * GST_MSECOND))) { handleMessage(pipeline); CV_WARN("GStreamer: unable to seek"); @@ -1108,7 +1183,7 @@ bool GStreamerCapture::setProperty(int propId, double value) return false; CV_WARN("unable to seek"); } - if(!gst_element_seek_simple(GST_ELEMENT(pipeline), GST_FORMAT_DEFAULT, + if(!gst_element_seek_simple(GST_ELEMENT(pipeline.get()), GST_FORMAT_DEFAULT, flags, (gint64) value)) { handleMessage(pipeline); CV_WARN("GStreamer: unable to seek"); @@ -1119,7 +1194,7 @@ bool GStreamerCapture::setProperty(int propId, double value) return true; } case CV_CAP_PROP_POS_AVI_RATIO: - if(!gst_element_seek_simple(GST_ELEMENT(pipeline), GST_FORMAT_PERCENT, + if(!gst_element_seek_simple(GST_ELEMENT(pipeline.get()), GST_FORMAT_PERCENT, flags, (gint64) (value * GST_FORMAT_PERCENT_MAX))) { handleMessage(pipeline); CV_WARN("GStreamer: unable to seek"); @@ -1154,8 +1229,8 @@ bool GStreamerCapture::setProperty(int propId, double value) break; case CV_CAP_PROP_FPS: if(value > 0) { - double num=0, denom = 1; - toFraction(value, num, denom); + int num = 0, denom = 1; + toFraction(value, num, denom); setFilter("framerate", GST_TYPE_FRACTION, value, denom); } else removeFilter("framerate"); @@ -1166,11 +1241,11 @@ bool GStreamerCapture::setProperty(int propId, double value) case CV_CAP_PROP_HUE: if (v4l2src) { - string propName = get_gst_propname(propId); + std::string propName = get_gst_propname(propId); if (!propName.empty()) { gint32 val = cv::saturate_cast(value); - g_object_set(G_OBJECT(v4l2src), propName.c_str(), &val, NULL); + g_object_set(G_OBJECT(v4l2src.get()), propName.c_str(), &val, NULL); return true; } } @@ -1185,7 +1260,7 @@ bool GStreamerCapture::setProperty(int propId, double value) CV_WARN("there is no sink yet"); return false; } - gst_app_sink_set_max_buffers(GST_APP_SINK(sink), (guint) value); + gst_app_sink_set_max_buffers(GST_APP_SINK(sink.get()), (guint) value); return true; } default: @@ -1199,7 +1274,7 @@ bool GStreamerCapture::setProperty(int propId, double value) } -Ptr cv::createGStreamerCapture(const String& filename) +Ptr createGStreamerCapture(const String& filename) { Ptr cap = makePtr(); if (cap && cap->open(filename)) @@ -1207,7 +1282,7 @@ Ptr cv::createGStreamerCapture(const String& filename) return Ptr(); } -Ptr cv::createGStreamerCapture(int index) +Ptr createGStreamerCapture(int index) { Ptr cap = makePtr(); if (cap && cap->open(index)) @@ -1219,13 +1294,13 @@ Ptr cv::createGStreamerCapture(int index) /*! * \brief The CvVideoWriter_GStreamer class - * Use Gstreamer to write video + * Use GStreamer to write video */ class CvVideoWriter_GStreamer : public CvVideoWriter { public: CvVideoWriter_GStreamer() - : pipeline(0), source(0), encodebin(0), file(0), buffer(0), input_pix_fmt(0), + : input_pix_fmt(0), num_frames(0), framerate(0) { } @@ -1239,15 +1314,14 @@ public: virtual bool writeFrame( const IplImage* image ) CV_OVERRIDE; protected: const char* filenameToMimetype(const char* filename); - GstElement* pipeline; - GstElement* source; - GstElement* encodebin; - GstElement* file; + GSafePtr pipeline; + GSafePtr source; - GstBuffer* buffer; int input_pix_fmt; int num_frames; double framerate; + + void close_(); }; /*! @@ -1255,36 +1329,36 @@ protected: * ends the pipeline by sending EOS and destroys the pipeline and all * elements afterwards */ -void CvVideoWriter_GStreamer::close() +void CvVideoWriter_GStreamer::close_() { GstStateChangeReturn status; if (pipeline) { handleMessage(pipeline); - if (gst_app_src_end_of_stream(GST_APP_SRC(source)) != GST_FLOW_OK) + if (gst_app_src_end_of_stream(GST_APP_SRC(source.get())) != GST_FLOW_OK) { - CV_WARN("Cannot send EOS to GStreamer pipeline\n"); - return; + CV_WARN("Cannot send EOS to GStreamer pipeline"); } - - //wait for EOS to trickle down the pipeline. This will let all elements finish properly - GstBus* bus = gst_element_get_bus(pipeline); - GstMessage *msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS)); - if (!msg || GST_MESSAGE_TYPE(msg) == GST_MESSAGE_ERROR) + else { - CV_WARN("Error during VideoWriter finalization\n"); - if(msg != NULL) + //wait for EOS to trickle down the pipeline. This will let all elements finish properly + GSafePtr bus; bus.attach(gst_element_get_bus(pipeline)); + if (bus) { - gst_message_unref(msg); - g_object_unref(G_OBJECT(bus)); + GSafePtr msg; msg.attach(gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS))); + if (!msg || GST_MESSAGE_TYPE(msg.get()) == GST_MESSAGE_ERROR) + { + CV_WARN("Error during VideoWriter finalization"); + handleMessage(pipeline); + } + } + else + { + CV_WARN("can't get GstBus"); } - return; } - gst_message_unref(msg); - g_object_unref(G_OBJECT(bus)); - status = gst_element_set_state (pipeline, GST_STATE_NULL); if (status == GST_STATE_CHANGE_ASYNC) { @@ -1296,61 +1370,65 @@ void CvVideoWriter_GStreamer::close() if (status == GST_STATE_CHANGE_FAILURE) { handleMessage (pipeline); - gst_object_unref (GST_OBJECT (pipeline)); - pipeline = NULL; - CV_WARN("Unable to stop gstreamer pipeline\n"); - return; + CV_WARN("Unable to stop writer pipeline"); } - - gst_object_unref (GST_OBJECT (pipeline)); - pipeline = NULL; } } +void CvVideoWriter_GStreamer::close() +{ + close_(); + source.release(); + pipeline.release(); +} /*! - * \brief CvVideoWriter_GStreamer::filenameToMimetype + * \brief filenameToMimetype * \param filename * \return mimetype - * Resturns a container mime type for a given filename by looking at it's extension + * Returns a container mime type for a given filename by looking at it's extension */ const char* CvVideoWriter_GStreamer::filenameToMimetype(const char *filename) { //get extension - const char *ext = strrchr(filename, '.'); - if(!ext || ext == filename) return NULL; - ext += 1; //exclude the dot + const char *ext_ = strrchr(filename, '.'); + if (!ext_ || ext_ == filename) + return NULL; + ext_ += 1; //exclude the dot + + std::string ext(ext_); + ext = toLowerCase(ext); // return a container mime based on the given extension. // gstreamer's function returns too much possibilities, which is not useful to us //return the appropriate mime - if (strncasecmp(ext,"avi", 3) == 0) - return (const char*)"video/x-msvideo"; + if (ext == "avi") + return "video/x-msvideo"; - if (strncasecmp(ext,"mkv", 3) == 0 || strncasecmp(ext,"mk3d",4) == 0 || strncasecmp(ext,"webm",4) == 0 ) - return (const char*)"video/x-matroska"; + if (ext == "mkv" || ext == "mk3d" || ext == "webm") + return "video/x-matroska"; - if (strncasecmp(ext,"wmv", 3) == 0) - return (const char*)"video/x-ms-asf"; + if (ext == "wmv") + return "video/x-ms-asf"; - if (strncasecmp(ext,"mov", 3) == 0) - return (const char*)"video/x-quicktime"; + if (ext == "mov") + return "video/x-quicktime"; - if (strncasecmp(ext,"ogg", 3) == 0 || strncasecmp(ext,"ogv", 3) == 0) - return (const char*)"application/ogg"; + if (ext == "ogg" || ext == "ogv") + return "application/ogg"; - if (strncasecmp(ext,"rm", 3) == 0) - return (const char*)"vnd.rn-realmedia"; + if (ext == "rm") + return "vnd.rn-realmedia"; - if (strncasecmp(ext,"swf", 3) == 0) - return (const char*)"application/x-shockwave-flash"; + if (ext == "swf") + return "application/x-shockwave-flash"; - if (strncasecmp(ext,"mp4", 3) == 0) - return (const char*)"video/x-quicktime, variant=(string)iso"; + if (ext == "mp4") + return "video/x-quicktime, variant=(string)iso"; //default to avi - return (const char*)"video/x-msvideo"; + return "video/x-msvideo"; } /*! @@ -1363,7 +1441,7 @@ const char* CvVideoWriter_GStreamer::filenameToMimetype(const char *filename) * \return success * * We support 2 modes of operation. Either the user enters a filename and a fourcc - * code, or enters a manual pipeline description like in CvVideoCapture_Gstreamer. + * code, or enters a manual pipeline description like in CvVideoCapture_GStreamer. * In the latter case, we just push frames on the appsink with appropriate caps. * In the former case, we try to deduce the correct container from the filename, * and the correct encoder from the fourcc profile. @@ -1375,33 +1453,26 @@ bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc, double fps, CvSize frameSize, bool is_color ) { // check arguments - assert (filename); - assert (fps > 0); - assert (frameSize.width > 0 && frameSize.height > 0); + CV_Assert(filename); + CV_Assert(fps > 0); + CV_Assert(frameSize.width > 0 && frameSize.height > 0); // init gstreamer gst_initializer::init(); // init vars + GSafePtr file; + GSafePtr encodebin; + bool manualpipeline = true; int bufsize = 0; - GError *err = NULL; - const char* mime = NULL; + GSafePtr err; GstStateChangeReturn stateret; - GstCaps* caps = NULL; - GstCaps* videocaps = NULL; - -#if FULL_GST_VERSION >= VERSION_NUM(0,10,32) - GstCaps* containercaps = NULL; - GstEncodingContainerProfile* containerprofile = NULL; - GstEncodingVideoProfile* videoprofile = NULL; -#endif + GSafePtr caps; GstIterator* it = NULL; gboolean done = FALSE; - GstElement *element = NULL; - gchar* name = NULL; #if GST_VERSION_MAJOR == 0 GstElement* splitter = NULL; @@ -1411,34 +1482,49 @@ bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc, // we first try to construct a pipeline from the given string. // if that fails, we assume it is an ordinary filename - encodebin = gst_parse_launch(filename, &err); - manualpipeline = (encodebin != NULL); + encodebin.attach(gst_parse_launch(filename, err.getRef())); + manualpipeline = (bool)encodebin; - if(manualpipeline) + if (manualpipeline) { + if (err) + { + CV_WARN("error opening writer pipeline: " << err->message); + if (encodebin) + { + gst_element_set_state(encodebin, GST_STATE_NULL); + } + handleMessage(encodebin); + encodebin.release(); + return false; + } #if GST_VERSION_MAJOR == 0 - it = gst_bin_iterate_sources(GST_BIN(encodebin)); - if(gst_iterator_next(it, (gpointer *)&source) != GST_ITERATOR_OK) { + it = gst_bin_iterate_sources(GST_BIN(encodebin.get())); + if (gst_iterator_next(it, (gpointer *)source.getRef()) != GST_ITERATOR_OK) { CV_WARN("GStreamer: cannot find appsink in manual pipeline\n"); return false; } #else - it = gst_bin_iterate_sources (GST_BIN(encodebin)); - GValue value = G_VALUE_INIT; + it = gst_bin_iterate_sources (GST_BIN(encodebin.get())); - while (!done) { + while (!done) + { + GValue value = G_VALUE_INIT; + GSafePtr name; + GstElement* element = NULL; switch (gst_iterator_next (it, &value)) { case GST_ITERATOR_OK: - element = GST_ELEMENT (g_value_get_object (&value)); - name = gst_element_get_name(element); - if (name){ - if(strstr(name, "opencvsrc") != NULL || strstr(name, "appsrc") != NULL) { - source = GST_ELEMENT ( gst_object_ref (element) ); + element = GST_ELEMENT (g_value_get_object (&value)); // no lifetime transfer + name.attach(gst_element_get_name(element)); + if (name) + { + if (strstr(name.get(), "opencvsrc") != NULL || strstr(name.get(), "appsrc") != NULL) + { + source.attach(GST_ELEMENT(gst_object_ref(element))); done = TRUE; } - g_free(name); } - g_value_unset (&value); + g_value_unset(&value); break; case GST_ITERATOR_RESYNC: @@ -1457,15 +1543,15 @@ bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc, return false; } #endif - pipeline = encodebin; + pipeline.swap(encodebin); } else { - pipeline = gst_pipeline_new (NULL); + err.release(); + pipeline.reset(gst_pipeline_new(NULL)); // we just got a filename and a fourcc code. // first, try to guess the container from the filename - //encodebin = gst_element_factory_make("encodebin", NULL); //proxy old non existing fourcc ids. These were used in previous opencv versions, //but do not even exist in gstreamer any more @@ -1475,75 +1561,83 @@ bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc, //create encoder caps from fourcc - - videocaps = gst_riff_create_video_caps(fourcc, NULL, NULL, NULL, NULL, NULL); - if (!videocaps){ - CV_WARN("Gstreamer Opencv backend does not support this codec."); + GSafePtr videocaps; + videocaps.attach(gst_riff_create_video_caps(fourcc, NULL, NULL, NULL, NULL, NULL)); + if (!videocaps) + { + CV_WARN("OpenCV backend does not support passed FOURCC value"); return false; } //create container caps from file extension - mime = filenameToMimetype(filename); - if (!mime) { - CV_WARN("Gstreamer Opencv backend does not support this file type."); + const char* mime = filenameToMimetype(filename); + if (!mime) + { + CV_WARN("OpenCV backend does not support this file type (extension): " << filename); return false; } + //create pipeline elements + encodebin.reset(gst_element_factory_make("encodebin", NULL)); + #if FULL_GST_VERSION >= VERSION_NUM(0,10,32) - containercaps = gst_caps_from_string(mime); + GSafePtr containercaps; + GSafePtr containerprofile; + GSafePtr videoprofile; - //create encodebin profile - containerprofile = gst_encoding_container_profile_new("container", "container", containercaps, NULL); - videoprofile = gst_encoding_video_profile_new(videocaps, NULL, NULL, 1); - gst_encoding_container_profile_add_profile(containerprofile, (GstEncodingProfile *) videoprofile); -#endif + containercaps.attach(gst_caps_from_string(mime)); - //create pipeline elements - encodebin = gst_element_factory_make("encodebin", NULL); + //create encodebin profile + containerprofile.attach(gst_encoding_container_profile_new("container", "container", containercaps.get(), NULL)); + videoprofile.reset(gst_encoding_video_profile_new(videocaps.get(), NULL, NULL, 1)); + gst_encoding_container_profile_add_profile(containerprofile.get(), (GstEncodingProfile*)videoprofile.get()); -#if FULL_GST_VERSION >= VERSION_NUM(0,10,32) - g_object_set(G_OBJECT(encodebin), "profile", containerprofile, NULL); + g_object_set(G_OBJECT(encodebin.get()), "profile", containerprofile.get(), NULL); #endif - source = gst_element_factory_make("appsrc", NULL); - file = gst_element_factory_make("filesink", NULL); - g_object_set(G_OBJECT(file), "location", filename, NULL); + + source.reset(gst_element_factory_make("appsrc", NULL)); + file.reset(gst_element_factory_make("filesink", NULL)); + g_object_set(G_OBJECT(file.get()), "location", (const char*)filename, NULL); } + int fps_num = 0, fps_denom = 1; + toFraction(fps, fps_num, fps_denom); + if (fourcc == CV_FOURCC('M','J','P','G') && frameSize.height == 1) { #if GST_VERSION_MAJOR > 0 input_pix_fmt = GST_VIDEO_FORMAT_ENCODED; - caps = gst_caps_new_simple("image/jpeg", - "framerate", GST_TYPE_FRACTION, int(fps), 1, - NULL); - caps = gst_caps_fixate(caps); + caps.attach(gst_caps_new_simple("image/jpeg", + "framerate", GST_TYPE_FRACTION, int(fps_num), int(fps_denom), + NULL)); + caps.attach(gst_caps_fixate(caps.detach())); #else - CV_WARN("Gstreamer 0.10 Opencv backend does not support writing encoded MJPEG data."); + CV_WARN("GStreamer 0.10 OpenCV backend does not support writing encoded MJPEG data."); return false; #endif } - else if(is_color) + else if (is_color) { input_pix_fmt = GST_VIDEO_FORMAT_BGR; bufsize = frameSize.width * frameSize.height * 3; #if GST_VERSION_MAJOR == 0 - caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_BGR, - frameSize.width, - frameSize.height, - int(fps), 1, - 1, 1); + caps.attach(gst_video_format_new_caps(GST_VIDEO_FORMAT_BGR, + frameSize.width, + frameSize.height, + gint(fps_num), gint(fps_denom), + 1, 1)); #else - caps = gst_caps_new_simple("video/x-raw", - "format", G_TYPE_STRING, "BGR", - "width", G_TYPE_INT, frameSize.width, - "height", G_TYPE_INT, frameSize.height, - "framerate", GST_TYPE_FRACTION, int(fps), 1, - NULL); - caps = gst_caps_fixate(caps); - + caps.attach(gst_caps_new_simple("video/x-raw", + "format", G_TYPE_STRING, "BGR", + "width", G_TYPE_INT, frameSize.width, + "height", G_TYPE_INT, frameSize.height, + "framerate", GST_TYPE_FRACTION, gint(fps_num), gint(fps_denom), + NULL)); + CV_Assert(caps); + caps.attach(gst_caps_fixate(caps.detach())); #endif - + CV_Assert(caps); } else { @@ -1552,41 +1646,43 @@ bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc, bufsize = frameSize.width * frameSize.height; #if GST_VERSION_MAJOR == 0 - caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_GRAY8, - frameSize.width, - frameSize.height, - int(fps), 1, - 1, 1); + caps.attach(gst_video_format_new_caps(GST_VIDEO_FORMAT_GRAY8, + frameSize.width, + frameSize.height, + gint(fps_num), gint(fps_denom), + 1, 1)); #else - caps = gst_caps_new_simple("video/x-raw", - "format", G_TYPE_STRING, "GRAY8", - "width", G_TYPE_INT, frameSize.width, - "height", G_TYPE_INT, frameSize.height, - "framerate", GST_TYPE_FRACTION, int(fps), 1, - NULL); - caps = gst_caps_fixate(caps); + caps.attach(gst_caps_new_simple("video/x-raw", + "format", G_TYPE_STRING, "GRAY8", + "width", G_TYPE_INT, frameSize.width, + "height", G_TYPE_INT, frameSize.height, + "framerate", GST_TYPE_FRACTION, gint(fps_num), gint(fps_denom), + NULL)); + caps.attach(gst_caps_fixate(caps.detach())); #endif #else CV_Error(Error::StsError, - "Gstreamer 0.10.29 or newer is required for grayscale input"); + "GStreamer 0.10.29 or newer is required for grayscale input"); #endif } - gst_app_src_set_caps(GST_APP_SRC(source), caps); - gst_app_src_set_stream_type(GST_APP_SRC(source), GST_APP_STREAM_TYPE_STREAM); - gst_app_src_set_size (GST_APP_SRC(source), -1); + gst_app_src_set_caps(GST_APP_SRC(source.get()), caps); + gst_app_src_set_stream_type(GST_APP_SRC(source.get()), GST_APP_STREAM_TYPE_STREAM); + gst_app_src_set_size (GST_APP_SRC(source.get()), -1); - g_object_set(G_OBJECT(source), "format", GST_FORMAT_TIME, NULL); - g_object_set(G_OBJECT(source), "block", 1, NULL); - g_object_set(G_OBJECT(source), "is-live", 0, NULL); + g_object_set(G_OBJECT(source.get()), "format", GST_FORMAT_TIME, NULL); + g_object_set(G_OBJECT(source.get()), "block", 1, NULL); + g_object_set(G_OBJECT(source.get()), "is-live", 0, NULL); - if(!manualpipeline) + if (!manualpipeline) { - g_object_set(G_OBJECT(file), "buffer-size", bufsize, NULL); - gst_bin_add_many(GST_BIN(pipeline), source, encodebin, file, NULL); - if(!gst_element_link_many(source, encodebin, file, NULL)) { - CV_WARN("GStreamer: cannot link elements\n"); + g_object_set(G_OBJECT(file.get()), "buffer-size", bufsize, NULL); + gst_bin_add_many(GST_BIN(pipeline.get()), source.get(), encodebin.get(), file.get(), NULL); + if (!gst_element_link_many(source.get(), encodebin.get(), file.get(), NULL)) + { + CV_WARN("cannot link elements"); + pipeline.release(); return false; } } @@ -1596,11 +1692,13 @@ bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc, // encodebin pipeline to prevent early EOF event handling // We always fetch BGR or gray-scale frames, so combiner->spliter // endge in graph is useless. - it = gst_bin_iterate_recurse (GST_BIN(encodebin)); + it = gst_bin_iterate_recurse (GST_BIN(encodebin.get())); while (!done) { + GSafePtr name; + GstElement* element = NULL; switch (gst_iterator_next (it, (void**)&element)) { case GST_ITERATOR_OK: - name = gst_element_get_name(element); + name.attach(gst_element_get_name(element)); if (strstr(name, "streamsplitter")) splitter = element; else if (strstr(name, "streamcombiner")) @@ -1648,12 +1746,14 @@ bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc, } #endif - GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "write-pipeline"); + GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "write-pipeline"); - stateret = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING); - if(stateret == GST_STATE_CHANGE_FAILURE) { + stateret = gst_element_set_state(GST_ELEMENT(pipeline.get()), GST_STATE_PLAYING); + if (stateret == GST_STATE_CHANGE_FAILURE) + { handleMessage(pipeline); CV_WARN("GStreamer: cannot put pipeline to play\n"); + pipeline.release(); return false; } @@ -1716,7 +1816,7 @@ bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image ) //gst_app_src_push_buffer takes ownership of the buffer, so we need to supply it a copy #if GST_VERSION_MAJOR == 0 - buffer = gst_buffer_try_new_and_alloc (size); + GstBuffer *buffer = gst_buffer_try_new_and_alloc (size); if (!buffer) { CV_WARN("Cannot create GStreamer buffer"); @@ -1726,7 +1826,7 @@ bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image ) GST_BUFFER_DURATION(buffer) = duration; GST_BUFFER_TIMESTAMP(buffer) = timestamp; #else - buffer = gst_buffer_new_allocate (NULL, size, NULL); + GstBuffer *buffer = gst_buffer_new_allocate(NULL, size, NULL); GstMapInfo info; gst_buffer_map(buffer, &info, (GstMapFlags)GST_MAP_READ); memcpy(info.data, (guint8*)image->imageData, size); @@ -1736,10 +1836,11 @@ bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image ) GST_BUFFER_DTS(buffer) = timestamp; #endif //set the current number in the frame - GST_BUFFER_OFFSET(buffer) = num_frames; + GST_BUFFER_OFFSET(buffer) = num_frames; - ret = gst_app_src_push_buffer(GST_APP_SRC(source), buffer); - if (ret != GST_FLOW_OK) { + ret = gst_app_src_push_buffer(GST_APP_SRC(source.get()), buffer); + if (ret != GST_FLOW_OK) + { CV_WARN("Error pushing buffer to GStreamer pipeline"); return false; } @@ -1751,46 +1852,51 @@ bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image ) return true; } -/*! - * \brief cvCreateVideoWriter_GStreamer - * \param filename - * \param fourcc - * \param fps - * \param frameSize - * \param isColor - * \return - * Constructor - */ CvVideoWriter* cvCreateVideoWriter_GStreamer(const char* filename, int fourcc, double fps, CvSize frameSize, int isColor ) { CvVideoWriter_GStreamer* wrt = new CvVideoWriter_GStreamer; - if( wrt->open(filename, fourcc, fps,frameSize, isColor)) - return wrt; + try + { + if (wrt->open(filename, fourcc, fps, frameSize, isColor)) + return wrt; + delete wrt; + } + catch (...) + { + delete wrt; + throw; + } - delete wrt; return 0; } // utility functions -/*! - * \brief toFraction - * \param decimal - * \param numerator - * \param denominator - * Split a floating point value into numerator and denominator - */ -void toFraction(double decimal, double &numerator, double &denominator) +void toFraction(const double decimal, int &numerator_i, int &denominator_i) { - double dummy; - double whole; - decimal = modf (decimal, &whole); - for (denominator = 1; denominator<=100; denominator++){ - if (modf(denominator * decimal, &dummy) < 0.001f) + double err = 1.0; + int denominator = 1; + double numerator = 0; + for (int check_denominator = 1; ; check_denominator++) + { + double check_numerator = (double)check_denominator * decimal; + double dummy; + double check_err = modf(check_numerator, &dummy); + if (check_err < err) + { + err = check_err; + denominator = check_denominator; + numerator = check_numerator; + if (err < FLT_EPSILON) + break; + } + if (check_denominator == 100) // limit break; } - numerator = denominator * decimal; + numerator_i = cvRound(numerator); + denominator_i = denominator; + //printf("%g: %d/%d (err=%g)\n", decimal, numerator_i, denominator_i, err); } @@ -1800,27 +1906,24 @@ void toFraction(double decimal, double &numerator, double &denominator) */ void handleMessage(GstElement * pipeline) { - GError *err = NULL; - gchar *debug = NULL; - GstBus* bus = NULL; + GSafePtr bus; GstStreamStatusType tp; GstElement * elem = NULL; - GstMessage* msg = NULL; - bus = gst_element_get_bus(pipeline); + bus.attach(gst_element_get_bus(pipeline)); - while(gst_bus_have_pending(bus)) { - msg = gst_bus_pop(bus); - if (!msg || !GST_IS_MESSAGE(msg)) - { + while (gst_bus_have_pending(bus)) + { + GSafePtr msg; + msg.attach(gst_bus_pop(bus)); + if (!msg || !GST_IS_MESSAGE(msg.get())) continue; - } //printf("\t\tGot %s message\n", GST_MESSAGE_TYPE_NAME(msg)); - if(gst_is_missing_plugin_message(msg)) + if (gst_is_missing_plugin_message(msg)) { - CV_WARN("your gstreamer installation is missing a required plugin\n"); + CV_WARN("your GStreamer installation is missing a required plugin"); } else { @@ -1834,15 +1937,18 @@ void handleMessage(GstElement * pipeline) // gst_element_state_get_name(newstate), gst_element_state_get_name(pendstate)); break; case GST_MESSAGE_ERROR: - gst_message_parse_error(msg, &err, &debug); - //fprintf(stderr, "\t\tGStreamer Plugin: Embedded video playback halted; module %s reported: %s\n", - // gst_element_get_name(GST_MESSAGE_SRC (msg)), err->message); - - g_error_free(err); - g_free(debug); + { + GSafePtr err; + GSafePtr debug; + gst_message_parse_error(msg, err.getRef(), debug.getRef()); + GSafePtr name; name.attach(gst_element_get_name(GST_MESSAGE_SRC (msg))); + CV_WARN("Embedded video playback halted; module " << name.get() << + " reported: " << err->message); + CV_LOG_DEBUG(NULL, "GStreamer debug: " << debug.get()); gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL); break; + } case GST_MESSAGE_EOS: //fprintf(stderr, "\t\treached the end of the stream."); break; @@ -1855,8 +1961,8 @@ void handleMessage(GstElement * pipeline) break; } } - gst_message_unref(msg); } - - gst_object_unref(GST_OBJECT(bus)); } + + +} // namespace cv diff --git a/modules/videoio/src/precomp.hpp b/modules/videoio/src/precomp.hpp index ed5fcd61a9..09b582791e 100644 --- a/modules/videoio/src/precomp.hpp +++ b/modules/videoio/src/precomp.hpp @@ -154,8 +154,11 @@ CvVideoWriter* cvCreateVideoWriter_AVFoundation( const char* filename, int fourc CvCapture * cvCreateCameraCapture_Unicap (const int index); CvCapture * cvCreateCameraCapture_PvAPI (const int index); + +namespace cv { CvVideoWriter* cvCreateVideoWriter_GStreamer( const char* filename, int fourcc, double fps, CvSize frameSize, int is_color ); +} namespace cv diff --git a/platforms/scripts/valgrind.supp b/platforms/scripts/valgrind.supp index cf3e0a98cb..aa9d24d45c 100644 --- a/platforms/scripts/valgrind.supp +++ b/platforms/scripts/valgrind.supp @@ -218,6 +218,14 @@ fun:__itt_*create* } +{ + OpenCV-gtk_init + Memcheck:Leak + ... + fun:gtk_init + fun:cvInitSystem +} + { OpenCV-FFmpeg-swsscale Memcheck:Addr16 @@ -227,6 +235,35 @@ fun:cvWriteFrame_FFMPEG } +{ + OpenCV-GStreamer-gst_init + Memcheck:Leak + ... + fun:gst_init +} + +{ + OpenCV-GStreamer-gst_deinit + Memcheck:Leak + ... + fun:gst_deinit +} + +{ + OpenCV-GStreamer-gst_init_check + Memcheck:Leak + ... + fun:gst_init_check +} + +{ + OpenCV-GStreamer-gst_parse_launch_full-reachable + Memcheck:Leak + match-leak-kinds: reachable + ... + fun:gst_parse_launch_full +} + { OpenCV-OpenEXR-ThreadPool Memcheck:Leak diff --git a/platforms/scripts/valgrind_3rdparty.supp b/platforms/scripts/valgrind_3rdparty.supp index 50811d112d..d94d43d06a 100644 --- a/platforms/scripts/valgrind_3rdparty.supp +++ b/platforms/scripts/valgrind_3rdparty.supp @@ -18,6 +18,15 @@ fun:_ZN7testing8internal11CmpHelperLEIddEENS_15AssertionResultEPKcS4_RKT_RKT0_ } +{ + GTest-RegisterTests + Memcheck:Leak + ... + fun:RegisterTests + ... + fun:_ZN7testing14InitGoogleTestEPiPPc +} + { OpenCL Memcheck:Cond @@ -55,10 +64,10 @@ } { - glib + GTK-css Memcheck:Leak - fun:*alloc - obj:*/libglib* + ... + fun:gtk_css_provider* } { @@ -120,3 +129,78 @@ ... fun:cvWriteFrame_FFMPEG } + +{ + GStreamer-orc_program_compile_full + Memcheck:Leak + match-leak-kinds: reachable + ... + fun:orc_program_compile_full + ... + fun:clone +} + +{ + GStreamer-orc_program_new_from_static_bytecode + Memcheck:Leak + match-leak-kinds: reachable + ... + fun:orc_program_new_from_static_bytecode + ... + fun:clone +} + +{ + GStreamer-matroska-other + Memcheck:Leak + ... + fun:gst* + obj:*gstmatroska* + ... + obj:*glib* + fun:start_thread + fun:clone +} + +{ + GStreamer-matroska-gst_riff_create_video_caps + Memcheck:Leak + ... + fun:gst_riff_create_video_caps + obj:*gstmatroska* + ... + fun:clone +} + + +{ + GStreamer-tls + Memcheck:Leak + match-leak-kinds: possible + fun:calloc + fun:allocate_dtv + fun:_dl_allocate_tls +} + +{ + GStreamer-registry + Memcheck:Leak + ... + fun:gst_update_registry +} + +{ + GStreamer-plugin_load + Memcheck:Leak + ... + fun:gst_plugin_load_by_name +} + +{ + GStreamer-separate-threads + Memcheck:Leak + ... + obj:*/libglib* + fun:start_thread + fun:clone +}