diff --git a/modules/ts/misc/run_long.py b/modules/ts/misc/run_long.py index 8b3d1e71ca..c23157d18f 100644 --- a/modules/ts/misc/run_long.py +++ b/modules/ts/misc/run_long.py @@ -43,7 +43,8 @@ LONG_TESTS_DEBUG_VALGRIND = [ ('tracking', 'UKF.br_mean_squared_error', 5228.27), ('tracking', '*DistanceAndOverlap*/1', 1000.0), # dudek ('tracking', '*DistanceAndOverlap*/2', 1000.0), # faceocc2 - ('videoio', 'Videoio_Video.ffmpeg_writebig', 1000), + ('videoio', 'videoio/videoio_ffmpeg.write_big*', 1000), + ('videoio', 'videoio_ffmpeg.parallel', 1000), ('xfeatures2d', 'Features2d_RotationInvariance_Descriptor_BoostDesc_LBGM.regression', 1124.51), ('xfeatures2d', 'Features2d_RotationInvariance_Descriptor_VGG120.regression', 2198.1), ('xfeatures2d', 'Features2d_RotationInvariance_Descriptor_VGG48.regression', 1958.52), diff --git a/modules/videoio/src/cap_gstreamer.cpp b/modules/videoio/src/cap_gstreamer.cpp index 3c5383d543..7b3236f1e9 100644 --- a/modules/videoio/src/cap_gstreamer.cpp +++ b/modules/videoio/src/cap_gstreamer.cpp @@ -48,12 +48,13 @@ * \brief Use GStreamer to read/write video */ #include "precomp.hpp" + +#include +#include + #include -using namespace std; -#ifndef _MSC_VER -#include -#endif #include + #include #include #include @@ -68,35 +69,104 @@ using namespace std; #include //#include -#ifdef NDEBUG -#define CV_WARN(message) -#else -#define CV_WARN(message) CV_LOG_WARNING(0, message) -#endif +#define CV_WARN(...) CV_LOG_WARNING(NULL, "OpenCV | GStreamer warning: " << __VA_ARGS__) #define COLOR_ELEM "videoconvert" #define COLOR_ELEM_NAME COLOR_ELEM -#if defined(_WIN32) || defined(_WIN64) -#if defined(__MINGW32__) -inline char *realpath(const char *path, char *resolved_path) +#define CV_GST_FORMAT(format) (format) + + +namespace cv { + +static void toFraction(double decimal, CV_OUT int& numerator, CV_OUT int& denominator); +static void handleMessage(GstElement * pipeline); + + +namespace { + +template static inline void GSafePtr_addref(T* ptr) { - return _fullpath(resolved_path,path,PATH_MAX); + if (ptr) + g_object_ref_sink(ptr); } + +template static inline void GSafePtr_release(T** pPtr); + +template<> inline void GSafePtr_release(GError** pPtr) { g_clear_error(pPtr); } +template<> inline void GSafePtr_release(GstElement** pPtr) { if (pPtr) { gst_object_unref(G_OBJECT(*pPtr)); *pPtr = NULL; } } +template<> inline void GSafePtr_release(GstElementFactory** pPtr) { if (pPtr) { gst_object_unref(G_OBJECT(*pPtr)); *pPtr = NULL; } } +template<> inline void GSafePtr_release(GstPad** pPtr) { if (pPtr) { gst_object_unref(G_OBJECT(*pPtr)); *pPtr = NULL; } } +template<> inline void GSafePtr_release(GstCaps** pPtr) { if (pPtr) { gst_caps_unref(*pPtr); *pPtr = NULL; } } +template<> inline void GSafePtr_release(GstBuffer** pPtr) { if (pPtr) { gst_buffer_unref(*pPtr); *pPtr = NULL; } } +template<> inline void GSafePtr_release(GstSample** pPtr) { if (pPtr) { gst_sample_unref(*pPtr); *pPtr = NULL; } } +template<> inline void GSafePtr_release(GstBus** pPtr) { if (pPtr) { gst_object_unref(G_OBJECT(*pPtr)); *pPtr = NULL; } } +template<> inline void GSafePtr_release(GstMessage** pPtr) { if (pPtr) { gst_message_unref(*pPtr); *pPtr = NULL; } } + +template<> inline void GSafePtr_release(GstEncodingVideoProfile** pPtr) { if (pPtr) { gst_encoding_profile_unref(*pPtr); *pPtr = NULL; } } +template<> inline void GSafePtr_release(GstEncodingContainerProfile** pPtr) { if (pPtr) { gst_object_unref(G_OBJECT(*pPtr)); *pPtr = NULL; } } + +template<> inline void GSafePtr_addref(char* pPtr); // declaration only. not defined. should not be used +template<> inline void GSafePtr_release(char** pPtr) { if (pPtr) { g_free(*pPtr); *pPtr = NULL; } } + +template +class GSafePtr +{ +protected: + T* ptr; +public: + inline GSafePtr() CV_NOEXCEPT : ptr(NULL) { } + inline ~GSafePtr() CV_NOEXCEPT { release(); } + inline void release() CV_NOEXCEPT + { +#if 0 + printf("release: %s:%d: %p\n", CV__TRACE_FUNCTION, __LINE__, ptr); + if (ptr) { + printf(" refcount: %d\n", (int)GST_OBJECT_REFCOUNT_VALUE(ptr)); \ + } #endif -#define snprintf _snprintf -#define vsnprintf _vsnprintf -#define strcasecmp _stricmp -#define strncasecmp _strnicmp -#include -#endif + if (ptr) + GSafePtr_release(&ptr); + } -void toFraction(double decimal, double &numerator, double &denominator); -void handleMessage(GstElement * pipeline); + inline operator T* () CV_NOEXCEPT { return ptr; } + inline operator /*const*/ T* () const CV_NOEXCEPT { return (T*)ptr; } // there is no const correctness in Gst C API -using namespace cv; + inline T* get() CV_NOEXCEPT { return ptr; } + inline /*const*/ T* get() const CV_NOEXCEPT { CV_Assert(ptr); return (T*)ptr; } // there is no const correctness in Gst C API -static cv::Mutex gst_initializer_mutex; + inline const T* operator -> () const { CV_Assert(ptr); return ptr; } + inline operator bool () const CV_NOEXCEPT { return ptr != NULL; } + inline bool operator ! () const CV_NOEXCEPT { return ptr == NULL; } + + inline T** getRef() { CV_Assert(ptr == NULL); return &ptr; } + + inline GSafePtr& reset(T* p) CV_NOEXCEPT // pass result of functions with "transfer floating" ownership + { + //printf("reset: %s:%d: %p\n", CV__TRACE_FUNCTION, __LINE__, p); + release(); + if (p) + { + GSafePtr_addref(p); + ptr = p; + } + return *this; + } + + inline GSafePtr& attach(T* p) CV_NOEXCEPT // pass result of functions with "transfer full" ownership + { + //printf("attach: %s:%d: %p\n", CV__TRACE_FUNCTION, __LINE__, p); + release(); ptr = p; return *this; + } + inline T* detach() CV_NOEXCEPT { T* p = ptr; ptr = NULL; return p; } + + inline void swap(GSafePtr& o) CV_NOEXCEPT { std::swap(ptr, o.ptr); } +private: + GSafePtr(const GSafePtr&); // = disabled + GSafePtr& operator=(const T*); // = disabled +}; + +} // namespace /*! * \brief The gst_initializer class @@ -105,29 +175,50 @@ static cv::Mutex gst_initializer_mutex; class gst_initializer { public: - static void init() + static gst_initializer& init() { - gst_initializer_mutex.lock(); - static gst_initializer init; - gst_initializer_mutex.unlock(); + static gst_initializer g_init; + if (g_init.isFailed) + CV_Error(Error::StsError, "Can't initialize GStreamer"); + return g_init; } private: - gst_initializer() + bool isFailed; + bool call_deinit; + gst_initializer() : + isFailed(false) { - gst_init(NULL, NULL); + call_deinit = utils::getConfigurationParameterBool("OPENCV_VIDEOIO_GSTREAMER_CALL_DEINIT", false); + + GSafePtr err; + gst_init_check(NULL, NULL, err.getRef()); + if (err) + { + CV_WARN("Can't initialize GStreamer: " << err->message); + isFailed = true; + return; + } guint major, minor, micro, nano; gst_version(&major, &minor, µ, &nano); if (GST_VERSION_MAJOR != major) { - CV_WARN("incompatible gstreamer version"); + CV_WARN("incompatible GStreamer version"); + isFailed = true; + return; + } + } + ~gst_initializer() + { + if (call_deinit) + { + // Debug leaks: GST_LEAKS_TRACER_STACK_TRACE=1 GST_DEBUG="GST_TRACER:7" GST_TRACERS="leaks" + gst_deinit(); } -// gst_debug_set_active(1); -// gst_debug_set_colored(1); -// gst_debug_set_default_threshold(GST_LEVEL_INFO); } }; -inline static string get_gst_propname(int propId) +inline static +std::string get_gst_propname(int propId) { switch (propId) { @@ -135,17 +226,15 @@ inline static string get_gst_propname(int propId) case CV_CAP_PROP_CONTRAST: return "contrast"; case CV_CAP_PROP_SATURATION: return "saturation"; case CV_CAP_PROP_HUE: return "hue"; - default: return string(); + default: return std::string(); } } -inline static bool is_gst_element_exists(const std::string & name) +inline static +bool is_gst_element_exists(const std::string& name) { - GstElementFactory * testfac = gst_element_factory_find(name.c_str()); - if (!testfac) - return false; - g_object_unref(G_OBJECT(testfac)); - return true; + GSafePtr testfac; testfac.attach(gst_element_factory_find(name.c_str())); + return (bool)testfac; } //================================================================================================== @@ -153,36 +242,35 @@ inline static bool is_gst_element_exists(const std::string & name) class GStreamerCapture CV_FINAL : public IVideoCapture { private: - GstElement* pipeline; - GstElement* v4l2src; - GstElement* sink; - GstSample* sample; - GstCaps* caps; + GSafePtr pipeline; + GSafePtr v4l2src; + GSafePtr sink; + GSafePtr sample; + GSafePtr caps; + gint64 duration; gint width; gint height; - gint channels; double fps; bool isPosFramesSupported; bool isPosFramesEmulated; gint64 emulatedFrameNumber; - bool isOutputByteBuffer; public: GStreamerCapture(); - ~GStreamerCapture(); + virtual ~GStreamerCapture() CV_OVERRIDE; virtual bool grabFrame() CV_OVERRIDE; virtual bool retrieveFrame(int /*unused*/, OutputArray dst) CV_OVERRIDE; virtual double getProperty(int propId) const CV_OVERRIDE; virtual bool setProperty(int propId, double value) CV_OVERRIDE; - virtual bool isOpened() const CV_OVERRIDE; + virtual bool isOpened() const CV_OVERRIDE { return (bool)pipeline; } virtual int getCaptureDomain() CV_OVERRIDE { return cv::CAP_GSTREAMER; } bool open(int id); bool open(const String &filename_); static void newPad(GstElement * /*elem*/, GstPad *pad, gpointer data); protected: - bool determineFrameDims(Size & sz); + bool determineFrameDims(CV_OUT Size& sz, CV_OUT gint& channels, CV_OUT bool& isOutputByteBuffer); bool isPipelinePlaying(); void startPipeline(); void stopPipeline(); @@ -191,21 +279,11 @@ protected: void removeFilter(const char *filter); }; -/*! - * \brief CvCapture_GStreamer::init - * inits the class - */ GStreamerCapture::GStreamerCapture() : - pipeline(NULL), v4l2src(NULL), sink(NULL), sample(NULL), -#if GST_VERSION_MAJOR == 0 - buffer(NULL), -#endif - caps(NULL), - duration(-1), width(-1), height(-1), channels(0), fps(-1), + duration(-1), width(-1), height(-1), fps(-1), isPosFramesSupported(false), isPosFramesEmulated(false), - emulatedFrameNumber(-1), - isOutputByteBuffer(false) + emulatedFrameNumber(-1) { } @@ -217,10 +295,10 @@ GStreamerCapture::~GStreamerCapture() { if (isPipelinePlaying()) stopPipeline(); - if (pipeline && GST_IS_ELEMENT(pipeline)) + if (pipeline && GST_IS_ELEMENT(pipeline.get())) { - gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL); - gst_object_unref(GST_OBJECT(pipeline)); + gst_element_set_state(pipeline, GST_STATE_NULL); + pipeline.release(); } } @@ -232,21 +310,19 @@ GStreamerCapture::~GStreamerCapture() */ bool GStreamerCapture::grabFrame() { - if(!GST_IS_ELEMENT(pipeline)) + if (!pipeline || !GST_IS_ELEMENT(pipeline.get())) return false; // start the pipeline if it was not in playing state yet - if(!this->isPipelinePlaying()) + if (!this->isPipelinePlaying()) this->startPipeline(); // bail out if EOS - if(gst_app_sink_is_eos(GST_APP_SINK(sink))) + if (gst_app_sink_is_eos(GST_APP_SINK(sink.get()))) return false; - if(sample) - gst_sample_unref(sample); - sample = gst_app_sink_pull_sample(GST_APP_SINK(sink)); - if(!sample) + sample.attach(gst_app_sink_pull_sample(GST_APP_SINK(sink.get()))); + if (!sample) return false; if (isPosFramesEmulated) @@ -262,25 +338,28 @@ bool GStreamerCapture::grabFrame() */ bool GStreamerCapture::retrieveFrame(int, OutputArray dst) { - if(!sample) + if (!sample) return false; Size sz; - if (!determineFrameDims(sz)) + gint channels = 0; + bool isOutputByteBuffer = false; + if (!determineFrameDims(sz, channels, isOutputByteBuffer)) return false; // gstreamer expects us to handle the memory at this point // so we can just wrap the raw buffer and be done with it - GstBuffer * buf = gst_sample_get_buffer(sample); + GstBuffer* buf = gst_sample_get_buffer(sample); // no lifetime transfer if (!buf) return false; - GstMapInfo info; + GstMapInfo info = {}; if (!gst_buffer_map(buf, &info, GST_MAP_READ)) { //something weird went wrong here. abort. abort. - CV_WARN("Failed to map GStreamerbuffer to system memory"); + CV_WARN("Failed to map GStreamer buffer to system memory"); return false; } + try { Mat src; if (isOutputByteBuffer) @@ -290,31 +369,40 @@ bool GStreamerCapture::retrieveFrame(int, OutputArray dst) CV_Assert(src.isContinuous()); src.copyTo(dst); } + catch (...) + { + gst_buffer_unmap(buf, &info); + throw; + } gst_buffer_unmap(buf, &info); return true; } -bool GStreamerCapture::determineFrameDims(Size &sz) +bool GStreamerCapture::determineFrameDims(Size &sz, gint& channels, bool& isOutputByteBuffer) { - GstCaps * frame_caps = gst_sample_get_caps(sample); + GstCaps * frame_caps = gst_sample_get_caps(sample); // no lifetime transfer + // bail out in no caps if (!GST_CAPS_IS_SIMPLE(frame_caps)) return false; - GstStructure* structure = gst_caps_get_structure(frame_caps, 0); + GstStructure* structure = gst_caps_get_structure(frame_caps, 0); // no lifetime transfer // bail out if width or height are 0 if (!gst_structure_get_int(structure, "width", &width) || !gst_structure_get_int(structure, "height", &height)) + { + CV_WARN("Can't query frame size from GStreeamer buffer"); return false; + } sz = Size(width, height); - const gchar* name = gst_structure_get_name(structure); - - if (!name) + const gchar* name_ = gst_structure_get_name(structure); + if (!name_) return false; + std::string name = toLowerCase(std::string(name_)); // we support 11 types of data: // video/x-raw, format=BGR -> 8bit, 3 channels @@ -330,49 +418,55 @@ bool GStreamerCapture::determineFrameDims(Size &sz) // image/jpeg -> 8bit, mjpeg: buffer_size x 1 x 1 // bayer data is never decoded, the user is responsible for that // everything is 8 bit, so we just test the caps for bit depth - if (strcasecmp(name, "video/x-raw") == 0) + if (name == "video/x-raw") { - const gchar* format = gst_structure_get_string(structure, "format"); - if (!format) + const gchar* format_ = gst_structure_get_string(structure, "format"); + if (!format_) return false; - if (strcasecmp(format, "BGR") == 0) + std::string format = toUpperCase(std::string(format_)); + + if (format == "BGR") { channels = 3; } - else if( (strcasecmp(format, "UYVY") == 0) || (strcasecmp(format, "YUY2") == 0) || (strcasecmp(format, "YVYU") == 0) ) + else if (format == "UYVY" || format == "YUY2" || format == "YVYU") { channels = 2; } - else if( (strcasecmp(format, "NV12") == 0) || (strcasecmp(format, "NV21") == 0) || (strcasecmp(format, "YV12") == 0) || (strcasecmp(format, "I420") == 0) ) + else if (format == "NV12" || format == "NV21" || format == "YV12" || format == "I420") { channels = 1; sz.height = sz.height * 3 / 2; } - else if(strcasecmp(format, "GRAY8") == 0) + else if (format == "GRAY8") { channels = 1; } + else + { + CV_Error_(Error::StsNotImplemented, ("Unsupported GStreamer format: %s", format.c_str())); + } } - else if (strcasecmp(name, "video/x-bayer") == 0) + else if (name == "video/x-bayer") { channels = 1; } - else if(strcasecmp(name, "image/jpeg") == 0) + else if (name == "image/jpeg") { // the correct size will be set once the first frame arrives channels = 1; isOutputByteBuffer = true; } + else + { + CV_Error_(Error::StsNotImplemented, ("Unsupported GStreamer layer type: %s", name.c_str())); + } return true; } -/*! - * \brief CvCapture_GStreamer::isPipelinePlaying - * \return if the pipeline is currently playing. - */ bool GStreamerCapture::isPipelinePlaying() { - if (!GST_IS_ELEMENT(pipeline)) + if (!pipeline || !GST_IS_ELEMENT(pipeline.get())) { CV_WARN("GStreamer: pipeline have not been created"); return false; @@ -382,7 +476,7 @@ bool GStreamerCapture::isPipelinePlaying() GstStateChangeReturn ret = gst_element_get_state(pipeline, ¤t, &pending, timeout); if (!ret) { - CV_WARN("GStreamer: unable to query pipeline state"); + CV_WARN("unable to query pipeline state"); return false; } return current == GST_STATE_PLAYING; @@ -394,7 +488,7 @@ bool GStreamerCapture::isPipelinePlaying() */ void GStreamerCapture::startPipeline() { - if (!GST_IS_ELEMENT(pipeline)) + if (!pipeline || !GST_IS_ELEMENT(pipeline.get())) { CV_WARN("GStreamer: pipeline have not been created"); return; @@ -408,9 +502,8 @@ void GStreamerCapture::startPipeline() if (status == GST_STATE_CHANGE_FAILURE) { handleMessage(pipeline); - gst_object_unref(pipeline); - pipeline = NULL; - CV_WARN("GStreamer: unable to start pipeline"); + pipeline.release(); + CV_WARN("unable to start pipeline"); return; } @@ -420,22 +513,17 @@ void GStreamerCapture::startPipeline() handleMessage(pipeline); } -/*! - * \brief CvCapture_GStreamer::stopPipeline - * Stop the pipeline by setting it to NULL - */ void GStreamerCapture::stopPipeline() { - if (!GST_IS_ELEMENT(pipeline)) + if (!pipeline || !GST_IS_ELEMENT(pipeline.get())) { CV_WARN("GStreamer: pipeline have not been created"); return; } - if(gst_element_set_state(pipeline, GST_STATE_NULL) == GST_STATE_CHANGE_FAILURE) + if (gst_element_set_state(pipeline, GST_STATE_NULL) == GST_STATE_CHANGE_FAILURE) { - CV_WARN("GStreamer: unable to stop pipeline"); - gst_object_unref(pipeline); - pipeline = NULL; + CV_WARN("unable to stop pipeline"); + pipeline.release(); } } @@ -461,31 +549,35 @@ void GStreamerCapture::restartPipeline() */ void GStreamerCapture::setFilter(const char *prop, int type, int v1, int v2) { - if(!caps || !( GST_IS_CAPS (caps) )) + if (!caps || !(GST_IS_CAPS(caps.get()))) { - if(type == G_TYPE_INT) + if (type == G_TYPE_INT) { - caps = gst_caps_new_simple("video/x-raw","format",G_TYPE_STRING,"BGR", prop, type, v1, NULL); + caps.attach(gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "BGR", prop, type, v1, NULL)); } else { - caps = gst_caps_new_simple("video/x-raw","format",G_TYPE_STRING,"BGR", prop, type, v1, v2, NULL); + caps.attach(gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "BGR", prop, type, v1, v2, NULL)); } } else { - if (! gst_caps_is_writable(caps)) - caps = gst_caps_make_writable (caps); - if(type == G_TYPE_INT){ + if (!gst_caps_is_writable(caps.get())) + caps.attach(gst_caps_make_writable(caps.detach())); + if (type == G_TYPE_INT) + { gst_caps_set_simple(caps, prop, type, v1, NULL); - }else{ + } + else + { gst_caps_set_simple(caps, prop, type, v1, v2, NULL); } } - caps = gst_caps_fixate(caps); + caps.attach(gst_caps_fixate(caps.detach())); - gst_app_sink_set_caps(GST_APP_SINK(sink), caps); + gst_app_sink_set_caps(GST_APP_SINK(sink.get()), caps); + GST_LOG("filtering with caps: %" GST_PTR_FORMAT, caps.get()); } /*! @@ -498,13 +590,15 @@ void GStreamerCapture::removeFilter(const char *filter) if(!caps) return; - if (! gst_caps_is_writable(caps)) - caps = gst_caps_make_writable (caps); + if (!gst_caps_is_writable(caps.get())) + caps.attach(gst_caps_make_writable(caps.detach())); - GstStructure *s = gst_caps_get_structure(caps, 0); + GstStructure *s = gst_caps_get_structure(caps, 0); // no lifetime transfer gst_structure_remove_field(s, filter); - gst_app_sink_set_caps(GST_APP_SINK(sink), caps); + caps.attach(gst_caps_fixate(caps.detach())); + + gst_app_sink_set_caps(GST_APP_SINK(sink.get()), caps); } /*! @@ -516,30 +610,24 @@ void GStreamerCapture::removeFilter(const char *filter) */ void GStreamerCapture::newPad(GstElement *, GstPad *pad, gpointer data) { - GstPad *sinkpad; - GstElement *color = (GstElement *) data; + GSafePtr sinkpad; + GstElement* color = (GstElement*)data; - sinkpad = gst_element_get_static_pad (color, "sink"); - if (!sinkpad){ + sinkpad.attach(gst_element_get_static_pad(color, "sink")); + if (!sinkpad) { + CV_WARN("no pad named sink"); return; } - gst_pad_link (pad, sinkpad); - gst_object_unref (sinkpad); -} - -bool GStreamerCapture::isOpened() const -{ - return pipeline != NULL; + gst_pad_link(pad, sinkpad.get()); } /*! - * \brief CvCapture_GStreamer::open Open the given file with gstreamer - * \param type CvCapture type. One of CV_CAP_GSTREAMER_* + * \brief Create GStreamer pipeline * \param filename Filename to open in case of CV_CAP_GSTREAMER_FILE * \return boolean. Specifies if opening was successful. * - * In case of CV_CAP_GSTREAMER_V4L(2), a pipelin is constructed as follows: + * In case of camera 'index', a pipeline is constructed as follows: * v4l2src ! autoconvert ! appsink * * @@ -553,16 +641,12 @@ bool GStreamerCapture::isOpened() const * e.g. videotestsrc ! videoconvert ! appsink * the appsink name should be either 'appsink0' (the default) or 'opencvsink' * - * When dealing with a file, CvCapture_GStreamer will not drop frames if the grabbing interval - * larger than the framerate period. (Unlike the uri or manual pipeline description, which assume - * a live source) + * GStreamer will not drop frames if the grabbing interval larger than the framerate period. + * To support dropping for live streams add appsink 'drop' parameter into your custom pipeline. * * The pipeline will only be started whenever the first frame is grabbed. Setting pipeline properties * is really slow if we need to restart the pipeline over and over again. * - * TODO: the 'type' parameter is imo unneeded. for v4l2, filename 'v4l2:///dev/video0' can be used. - * I expect this to be the same for CV_CAP_GSTREAMER_1394. Is anyone actually still using v4l (v1)? - * */ bool GStreamerCapture::open(int id) { @@ -573,7 +657,7 @@ bool GStreamerCapture::open(int id) std::ostringstream desc; desc << "v4l2src device=/dev/video" << id << " ! " << COLOR_ELEM - << " ! appsink"; + << " ! appsink drop=true"; return open(desc.str()); } @@ -581,14 +665,13 @@ bool GStreamerCapture::open(const String &filename_) { gst_initializer::init(); - const gchar * filename = filename_.c_str(); + const gchar* filename = filename_.c_str(); bool file = false; - //bool stream = false; bool manualpipeline = false; - char *uri = NULL; - GstElement* uridecodebin = NULL; - GstElement* color = NULL; + GSafePtr uri; + GSafePtr uridecodebin; + GSafePtr color; GstStateChangeReturn status; // test if we have a valid uri. If so, open it with an uridecodebin @@ -597,74 +680,60 @@ bool GStreamerCapture::open(const String &filename_) // ordinary file path. if (!gst_uri_is_valid(filename)) { -#ifdef _MSC_VER - uri = new char[2048]; - DWORD pathSize = GetFullPathName(filename, 2048, uri, NULL); - struct stat buf; - if (pathSize == 0 || stat(uri, &buf) != 0) + if (utils::fs::exists(filename_)) { - delete[] uri; - uri = NULL; - } -#else - uri = realpath(filename, NULL); -#endif - //stream = false; - if(uri) - { - uri = g_filename_to_uri(uri, NULL, NULL); - if(uri) + uri.attach(g_filename_to_uri(filename, NULL, NULL)); + if (uri) { file = true; } else { - CV_WARN("GStreamer: Error opening file\n"); - CV_WARN(filename); - CV_WARN(uri); + CV_WARN("Error opening file: " << filename << " (" << uri.get() << ")"); return false; } } else { - GError *err = NULL; - uridecodebin = gst_parse_launch(filename, &err); - if(!uridecodebin) + GSafePtr err; + uridecodebin.attach(gst_parse_launch(filename, err.getRef())); + if (err) { - CV_WARN("GStreamer: Error opening bin: " << err->message); + CV_WARN("Error opening bin: " << err->message); return false; } - //stream = true; manualpipeline = true; } } else { - //stream = true; - uri = g_strdup(filename); + uri.attach(g_strdup(filename)); } bool element_from_uri = false; - if(!uridecodebin) + if (!uridecodebin) { // At this writing, the v4l2 element (and maybe others too) does not support caps renegotiation. // This means that we cannot use an uridecodebin when dealing with v4l2, since setting // capture properties will not work. // The solution (probably only until gstreamer 1.2) is to make an element from uri when dealing with v4l2. - gchar * protocol = gst_uri_get_protocol(uri); - if (!strcasecmp(protocol , "v4l2")) + GSafePtr protocol_; protocol_.attach(gst_uri_get_protocol(uri)); + CV_Assert(protocol_); + std::string protocol = toLowerCase(std::string(protocol_.get())); + if (protocol == "v4l2") { - uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src", NULL); + uridecodebin.reset(gst_element_make_from_uri(GST_URI_SRC, uri.get(), "src", NULL)); + CV_Assert(uridecodebin); element_from_uri = true; } else { - uridecodebin = gst_element_factory_make("uridecodebin", NULL); - g_object_set(G_OBJECT(uridecodebin), "uri", uri, NULL); + uridecodebin.reset(gst_element_factory_make("uridecodebin", NULL)); + CV_Assert(uridecodebin); + g_object_set(G_OBJECT(uridecodebin.get()), "uri", uri.get(), NULL); } - g_free(protocol); - if(!uridecodebin) + if (!uridecodebin) { CV_WARN("Can not parse GStreamer URI bin"); return false; @@ -673,40 +742,39 @@ bool GStreamerCapture::open(const String &filename_) if (manualpipeline) { - GstIterator *it = gst_bin_iterate_elements(GST_BIN(uridecodebin)); + GstIterator *it = gst_bin_iterate_elements(GST_BIN(uridecodebin.get())); - GstElement *element = NULL; gboolean done = false; - gchar* name = NULL; GValue value = G_VALUE_INIT; while (!done) { + GstElement *element = NULL; + GSafePtr name; switch (gst_iterator_next (it, &value)) { case GST_ITERATOR_OK: element = GST_ELEMENT (g_value_get_object (&value)); - name = gst_element_get_name(element); + name.attach(gst_element_get_name(element)); if (name) { if (strstr(name, "opencvsink") != NULL || strstr(name, "appsink") != NULL) { - sink = GST_ELEMENT ( gst_object_ref (element) ); + sink.attach(GST_ELEMENT(gst_object_ref(element))); } else if (strstr(name, COLOR_ELEM_NAME) != NULL) { - color = GST_ELEMENT ( gst_object_ref (element) ); + color.attach(GST_ELEMENT(gst_object_ref(element))); } else if (strstr(name, "v4l") != NULL) { - v4l2src = GST_ELEMENT ( gst_object_ref (element) ); + v4l2src.attach(GST_ELEMENT(gst_object_ref(element))); } - g_free(name); + name.release(); done = sink && color && v4l2src; } g_value_unset (&value); - break; case GST_ITERATOR_RESYNC: gst_iterator_resync (it); @@ -721,73 +789,80 @@ bool GStreamerCapture::open(const String &filename_) if (!sink) { - CV_WARN("GStreamer: cannot find appsink in manual pipeline\n"); + CV_WARN("cannot find appsink in manual pipeline"); return false; } - pipeline = uridecodebin; + pipeline.swap(uridecodebin); } else { - pipeline = gst_pipeline_new(NULL); + pipeline.reset(gst_pipeline_new(NULL)); + CV_Assert(pipeline); + // videoconvert (in 0.10: ffmpegcolorspace, in 1.x autovideoconvert) //automatically selects the correct colorspace conversion based on caps. - color = gst_element_factory_make(COLOR_ELEM, NULL); - sink = gst_element_factory_make("appsink", NULL); + color.reset(gst_element_factory_make(COLOR_ELEM, NULL)); + CV_Assert(color); + + sink.reset(gst_element_factory_make("appsink", NULL)); + CV_Assert(sink); - gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL); + gst_bin_add_many(GST_BIN(pipeline.get()), uridecodebin.get(), color.get(), sink.get(), NULL); - if(element_from_uri) + if (element_from_uri) { - if(!gst_element_link(uridecodebin, color)) + if(!gst_element_link(uridecodebin, color.get())) { CV_WARN("cannot link color -> sink"); - gst_object_unref(pipeline); - pipeline = NULL; + pipeline.release(); return false; } } else { - g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color); + g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color.get()); } - if(!gst_element_link(color, sink)) + if (!gst_element_link(color.get(), sink.get())) { - CV_WARN("GStreamer: cannot link color -> sink\n"); - gst_object_unref(pipeline); - pipeline = NULL; + CV_WARN("GStreamer: cannot link color -> sink"); + pipeline.release(); return false; } } - //TODO: is 1 single buffer really high enough? - gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1); -// gst_app_sink_set_drop (GST_APP_SINK(sink), stream); + if (!manualpipeline || strstr(filename, " max-buffers=") == NULL) + { + //TODO: is 1 single buffer really high enough? + gst_app_sink_set_max_buffers(GST_APP_SINK(sink.get()), 1); + } + //do not emit signals: all calls will be synchronous and blocking - gst_app_sink_set_emit_signals (GST_APP_SINK(sink), FALSE); -// gst_base_sink_set_sync(GST_BASE_SINK(sink), FALSE); + gst_app_sink_set_emit_signals (GST_APP_SINK(sink.get()), FALSE); + - caps = gst_caps_from_string("video/x-raw, format=(string){BGR, GRAY8}; video/x-bayer,format=(string){rggb,bggr,grbg,gbrg}; image/jpeg"); + caps.attach(gst_caps_from_string("video/x-raw, format=(string){BGR, GRAY8}; video/x-bayer,format=(string){rggb,bggr,grbg,gbrg}; image/jpeg")); - if(manualpipeline){ - GstPad* sink_pad = gst_element_get_static_pad(sink, "sink"); - GstCaps* peer_caps = gst_pad_peer_query_caps(sink_pad,NULL); + if (manualpipeline) + { + GSafePtr peer_caps; + GSafePtr sink_pad; + sink_pad.attach(gst_element_get_static_pad(sink, "sink")); + peer_caps.attach(gst_pad_peer_query_caps(sink_pad, NULL)); if (!gst_caps_can_intersect(caps, peer_caps)) { - gst_caps_unref(caps); - caps = gst_caps_from_string("video/x-raw, format=(string){UYVY,YUY2,YVYU,NV12,NV21,YV12,I420}"); + caps.attach(gst_caps_from_string("video/x-raw, format=(string){UYVY,YUY2,YVYU,NV12,NV21,YV12,I420}")); + CV_Assert(caps); } - gst_object_unref(sink_pad); - gst_caps_unref(peer_caps); } - gst_app_sink_set_caps(GST_APP_SINK(sink), caps); - gst_caps_unref(caps); + gst_app_sink_set_caps(GST_APP_SINK(sink.get()), caps); + caps.release(); { - GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-init"); + GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-init"); - status = gst_element_set_state(GST_ELEMENT(pipeline), + status = gst_element_set_state(GST_ELEMENT(pipeline.get()), file ? GST_STATE_PAUSED : GST_STATE_PLAYING); if (status == GST_STATE_CHANGE_ASYNC) { @@ -796,11 +871,10 @@ bool GStreamerCapture::open(const String &filename_) } if (status == GST_STATE_CHANGE_FAILURE) { - GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-error"); + GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-error"); handleMessage(pipeline); - gst_object_unref(pipeline); - pipeline = NULL; - CV_WARN("GStreamer: unable to start pipeline\n"); + pipeline.release(); + CV_WARN("unable to start pipeline"); return false; } @@ -810,30 +884,29 @@ bool GStreamerCapture::open(const String &filename_) if(!gst_element_query_duration(sink, format, &duration)) { handleMessage(pipeline); - CV_WARN("GStreamer: unable to query duration of stream"); + CV_WARN("unable to query duration of stream"); duration = -1; } handleMessage(pipeline); - GstPad* pad = gst_element_get_static_pad(sink, "sink"); - GstCaps* buffer_caps = gst_pad_get_current_caps(pad); - const GstStructure *structure = gst_caps_get_structure (buffer_caps, 0); + GSafePtr pad; + pad.attach(gst_element_get_static_pad(sink, "sink")); - if (!gst_structure_get_int (structure, "width", &width)) - { - CV_WARN("Cannot query video width\n"); - } + GSafePtr buffer_caps; + buffer_caps.attach(gst_pad_get_current_caps(pad)); - if (!gst_structure_get_int (structure, "height", &height)) + const GstStructure *structure = gst_caps_get_structure(buffer_caps, 0); // no lifetime transfer + if (!gst_structure_get_int (structure, "width", &width) || + !gst_structure_get_int (structure, "height", &height)) { - CV_WARN("Cannot query video height\n"); + CV_WARN("cannot query video width/height"); } gint num = 0, denom=1; - if(!gst_structure_get_fraction(structure, "framerate", &num, &denom)) + if (!gst_structure_get_fraction(structure, "framerate", &num, &denom)) { - CV_WARN("Cannot query video fps\n"); + CV_WARN("cannot query video fps"); } fps = (double)num/(double)denom; @@ -845,7 +918,7 @@ bool GStreamerCapture::open(const String &filename_) format_ = GST_FORMAT_DEFAULT; - status_ = gst_element_query_position(sink, format_, &value_); + status_ = gst_element_query_position(sink, CV_GST_FORMAT(format_), &value_); if (!status_ || value_ != 0 || duration < 0) { CV_WARN("Cannot query video position: status=" << status_ << ", value=" << value_ << ", duration=" << duration); @@ -857,7 +930,7 @@ bool GStreamerCapture::open(const String &filename_) isPosFramesSupported = true; } - GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline"); + GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline"); } return true; @@ -883,10 +956,11 @@ double GStreamerCapture::getProperty(int propId) const return 0; } - switch(propId) { + switch(propId) + { case CV_CAP_PROP_POS_MSEC: format = GST_FORMAT_TIME; - status = gst_element_query_position(sink, format, &value); + status = gst_element_query_position(sink.get(), CV_GST_FORMAT(format), &value); if(!status) { handleMessage(pipeline); CV_WARN("GStreamer: unable to query position of stream"); @@ -901,7 +975,7 @@ double GStreamerCapture::getProperty(int propId) const return 0; // TODO getProperty() "unsupported" value should be changed } format = GST_FORMAT_DEFAULT; - status = gst_element_query_position(sink, format, &value); + status = gst_element_query_position(sink.get(), CV_GST_FORMAT(format), &value); if(!status) { handleMessage(pipeline); CV_WARN("GStreamer: unable to query position of stream"); @@ -910,7 +984,7 @@ double GStreamerCapture::getProperty(int propId) const return value; case CV_CAP_PROP_POS_AVI_RATIO: format = GST_FORMAT_PERCENT; - status = gst_element_query_position(sink, format, &value); + status = gst_element_query_position(sink.get(), CV_GST_FORMAT(format), &value); if(!status) { handleMessage(pipeline); CV_WARN("GStreamer: unable to query position of stream"); @@ -931,11 +1005,11 @@ double GStreamerCapture::getProperty(int propId) const case CV_CAP_PROP_HUE: if (v4l2src) { - string propName = get_gst_propname(propId); + std::string propName = get_gst_propname(propId); if (!propName.empty()) { gint32 val = 0; - g_object_get(G_OBJECT(v4l2src), propName.c_str(), &val, NULL); + g_object_get(G_OBJECT(v4l2src.get()), propName.c_str(), &val, NULL); return static_cast(val); } } @@ -946,9 +1020,9 @@ double GStreamerCapture::getProperty(int propId) const CV_WARN("there is no sink yet"); return 0; } - return gst_app_sink_get_max_buffers(GST_APP_SINK(sink)); + return gst_app_sink_get_max_buffers(GST_APP_SINK(sink.get())); default: - CV_WARN("GStreamer: unhandled property"); + CV_WARN("unhandled property: " << propId); break; } @@ -980,7 +1054,7 @@ bool GStreamerCapture::setProperty(int propId, double value) switch(propId) { case CV_CAP_PROP_POS_MSEC: - if(!gst_element_seek_simple(GST_ELEMENT(pipeline), GST_FORMAT_TIME, + if(!gst_element_seek_simple(GST_ELEMENT(pipeline.get()), GST_FORMAT_TIME, flags, (gint64) (value * GST_MSECOND))) { handleMessage(pipeline); CV_WARN("GStreamer: unable to seek"); @@ -1017,7 +1091,7 @@ bool GStreamerCapture::setProperty(int propId, double value) return false; CV_WARN("unable to seek"); } - if(!gst_element_seek_simple(GST_ELEMENT(pipeline), GST_FORMAT_DEFAULT, + if(!gst_element_seek_simple(GST_ELEMENT(pipeline.get()), GST_FORMAT_DEFAULT, flags, (gint64) value)) { handleMessage(pipeline); CV_WARN("GStreamer: unable to seek"); @@ -1028,7 +1102,7 @@ bool GStreamerCapture::setProperty(int propId, double value) return true; } case CV_CAP_PROP_POS_AVI_RATIO: - if(!gst_element_seek_simple(GST_ELEMENT(pipeline), GST_FORMAT_PERCENT, + if(!gst_element_seek_simple(GST_ELEMENT(pipeline.get()), GST_FORMAT_PERCENT, flags, (gint64) (value * GST_FORMAT_PERCENT_MAX))) { handleMessage(pipeline); CV_WARN("GStreamer: unable to seek"); @@ -1063,8 +1137,8 @@ bool GStreamerCapture::setProperty(int propId, double value) break; case CV_CAP_PROP_FPS: if(value > 0) { - double num=0, denom = 1; - toFraction(value, num, denom); + int num = 0, denom = 1; + toFraction(value, num, denom); setFilter("framerate", GST_TYPE_FRACTION, value, denom); } else removeFilter("framerate"); @@ -1075,11 +1149,11 @@ bool GStreamerCapture::setProperty(int propId, double value) case CV_CAP_PROP_HUE: if (v4l2src) { - string propName = get_gst_propname(propId); + std::string propName = get_gst_propname(propId); if (!propName.empty()) { gint32 val = cv::saturate_cast(value); - g_object_set(G_OBJECT(v4l2src), propName.c_str(), &val, NULL); + g_object_set(G_OBJECT(v4l2src.get()), propName.c_str(), &val, NULL); return true; } } @@ -1094,7 +1168,7 @@ bool GStreamerCapture::setProperty(int propId, double value) CV_WARN("there is no sink yet"); return false; } - gst_app_sink_set_max_buffers(GST_APP_SINK(sink), (guint) value); + gst_app_sink_set_max_buffers(GST_APP_SINK(sink.get()), (guint) value); return true; } default: @@ -1108,7 +1182,7 @@ bool GStreamerCapture::setProperty(int propId, double value) } -Ptr cv::createGStreamerCapture_file(const String& filename) +Ptr createGStreamerCapture_file(const String& filename) { Ptr cap = makePtr(); if (cap && cap->open(filename)) @@ -1116,7 +1190,7 @@ Ptr cv::createGStreamerCapture_file(const String& filename) return Ptr(); } -Ptr cv::createGStreamerCapture_cam(int index) +Ptr createGStreamerCapture_cam(int index) { Ptr cap = makePtr(); if (cap && cap->open(index)) @@ -1128,13 +1202,13 @@ Ptr cv::createGStreamerCapture_cam(int index) /*! * \brief The CvVideoWriter_GStreamer class - * Use Gstreamer to write video + * Use GStreamer to write video */ class CvVideoWriter_GStreamer : public CvVideoWriter { public: CvVideoWriter_GStreamer() - : pipeline(0), source(0), encodebin(0), file(0), buffer(0), input_pix_fmt(0), + : input_pix_fmt(0), num_frames(0), framerate(0) { } @@ -1148,15 +1222,14 @@ public: bool writeFrame( const IplImage* image ) CV_OVERRIDE; protected: const char* filenameToMimetype(const char* filename); - GstElement* pipeline; - GstElement* source; - GstElement* encodebin; - GstElement* file; + GSafePtr pipeline; + GSafePtr source; - GstBuffer* buffer; int input_pix_fmt; int num_frames; double framerate; + + void close_(); }; /*! @@ -1164,36 +1237,36 @@ protected: * ends the pipeline by sending EOS and destroys the pipeline and all * elements afterwards */ -void CvVideoWriter_GStreamer::close() +void CvVideoWriter_GStreamer::close_() { GstStateChangeReturn status; if (pipeline) { handleMessage(pipeline); - if (gst_app_src_end_of_stream(GST_APP_SRC(source)) != GST_FLOW_OK) + if (gst_app_src_end_of_stream(GST_APP_SRC(source.get())) != GST_FLOW_OK) { - CV_WARN("Cannot send EOS to GStreamer pipeline\n"); - return; + CV_WARN("Cannot send EOS to GStreamer pipeline"); } - - //wait for EOS to trickle down the pipeline. This will let all elements finish properly - GstBus* bus = gst_element_get_bus(pipeline); - GstMessage *msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS)); - if (!msg || GST_MESSAGE_TYPE(msg) == GST_MESSAGE_ERROR) + else { - CV_WARN("Error during VideoWriter finalization\n"); - if(msg != NULL) + //wait for EOS to trickle down the pipeline. This will let all elements finish properly + GSafePtr bus; bus.attach(gst_element_get_bus(pipeline)); + if (bus) { - gst_message_unref(msg); - g_object_unref(G_OBJECT(bus)); + GSafePtr msg; msg.attach(gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS))); + if (!msg || GST_MESSAGE_TYPE(msg.get()) == GST_MESSAGE_ERROR) + { + CV_WARN("Error during VideoWriter finalization"); + handleMessage(pipeline); + } + } + else + { + CV_WARN("can't get GstBus"); } - return; } - gst_message_unref(msg); - g_object_unref(G_OBJECT(bus)); - status = gst_element_set_state (pipeline, GST_STATE_NULL); if (status == GST_STATE_CHANGE_ASYNC) { @@ -1205,61 +1278,65 @@ void CvVideoWriter_GStreamer::close() if (status == GST_STATE_CHANGE_FAILURE) { handleMessage (pipeline); - gst_object_unref (GST_OBJECT (pipeline)); - pipeline = NULL; - CV_WARN("Unable to stop gstreamer pipeline\n"); - return; + CV_WARN("Unable to stop writer pipeline"); } - - gst_object_unref (GST_OBJECT (pipeline)); - pipeline = NULL; } } +void CvVideoWriter_GStreamer::close() +{ + close_(); + source.release(); + pipeline.release(); +} /*! - * \brief CvVideoWriter_GStreamer::filenameToMimetype + * \brief filenameToMimetype * \param filename * \return mimetype - * Resturns a container mime type for a given filename by looking at it's extension + * Returns a container mime type for a given filename by looking at it's extension */ const char* CvVideoWriter_GStreamer::filenameToMimetype(const char *filename) { //get extension - const char *ext = strrchr(filename, '.'); - if(!ext || ext == filename) return NULL; - ext += 1; //exclude the dot + const char *ext_ = strrchr(filename, '.'); + if (!ext_ || ext_ == filename) + return NULL; + ext_ += 1; //exclude the dot + + std::string ext(ext_); + ext = toLowerCase(ext); // return a container mime based on the given extension. // gstreamer's function returns too much possibilities, which is not useful to us //return the appropriate mime - if (strncasecmp(ext,"avi", 3) == 0) - return (const char*)"video/x-msvideo"; + if (ext == "avi") + return "video/x-msvideo"; - if (strncasecmp(ext,"mkv", 3) == 0 || strncasecmp(ext,"mk3d",4) == 0 || strncasecmp(ext,"webm",4) == 0 ) - return (const char*)"video/x-matroska"; + if (ext == "mkv" || ext == "mk3d" || ext == "webm") + return "video/x-matroska"; - if (strncasecmp(ext,"wmv", 3) == 0) - return (const char*)"video/x-ms-asf"; + if (ext == "wmv") + return "video/x-ms-asf"; - if (strncasecmp(ext,"mov", 3) == 0) - return (const char*)"video/x-quicktime"; + if (ext == "mov") + return "video/x-quicktime"; - if (strncasecmp(ext,"ogg", 3) == 0 || strncasecmp(ext,"ogv", 3) == 0) - return (const char*)"application/ogg"; + if (ext == "ogg" || ext == "ogv") + return "application/ogg"; - if (strncasecmp(ext,"rm", 3) == 0) - return (const char*)"vnd.rn-realmedia"; + if (ext == "rm") + return "vnd.rn-realmedia"; - if (strncasecmp(ext,"swf", 3) == 0) - return (const char*)"application/x-shockwave-flash"; + if (ext == "swf") + return "application/x-shockwave-flash"; - if (strncasecmp(ext,"mp4", 3) == 0) - return (const char*)"video/x-quicktime, variant=(string)iso"; + if (ext == "mp4") + return "video/x-quicktime, variant=(string)iso"; //default to avi - return (const char*)"video/x-msvideo"; + return "video/x-msvideo"; } /*! @@ -1272,7 +1349,7 @@ const char* CvVideoWriter_GStreamer::filenameToMimetype(const char *filename) * \return success * * We support 2 modes of operation. Either the user enters a filename and a fourcc - * code, or enters a manual pipeline description like in CvVideoCapture_Gstreamer. + * code, or enters a manual pipeline description like in CvVideoCapture_GStreamer. * In the latter case, we just push frames on the appsink with appropriate caps. * In the former case, we try to deduce the correct container from the filename, * and the correct encoder from the fourcc profile. @@ -1284,55 +1361,66 @@ bool CvVideoWriter_GStreamer::open( const std::string &filename, int fourcc, double fps, const cv::Size &frameSize, bool is_color ) { // check arguments - if (filename.empty() || frameSize.width <= 0 || frameSize.height <= 0 || fps <= 0) - return false; + CV_Assert(!filename.empty()); + CV_Assert(fps > 0); + CV_Assert(frameSize.width > 0 && frameSize.height > 0); // init gstreamer gst_initializer::init(); // init vars + GSafePtr file; + GSafePtr encodebin; + bool manualpipeline = true; int bufsize = 0; - GError *err = NULL; - const char* mime = NULL; + GSafePtr err; GstStateChangeReturn stateret; - GstCaps* caps = NULL; - GstCaps* videocaps = NULL; - - GstCaps* containercaps = NULL; - GstEncodingContainerProfile* containerprofile = NULL; - GstEncodingVideoProfile* videoprofile = NULL; + GSafePtr caps; GstIterator* it = NULL; gboolean done = FALSE; - GstElement *element = NULL; - gchar* name = NULL; // we first try to construct a pipeline from the given string. // if that fails, we assume it is an ordinary filename - encodebin = gst_parse_launch(filename.c_str(), &err); - manualpipeline = (encodebin != NULL); + encodebin.attach(gst_parse_launch(filename.c_str(), err.getRef())); + manualpipeline = (bool)encodebin; - if(manualpipeline) + if (manualpipeline) { - it = gst_bin_iterate_sources (GST_BIN(encodebin)); - GValue value = G_VALUE_INIT; + if (err) + { + CV_WARN("error opening writer pipeline: " << err->message); + if (encodebin) + { + gst_element_set_state(encodebin, GST_STATE_NULL); + } + handleMessage(encodebin); + encodebin.release(); + return false; + } + it = gst_bin_iterate_sources (GST_BIN(encodebin.get())); - while (!done) { + while (!done) + { + GValue value = G_VALUE_INIT; + GSafePtr name; + GstElement* element = NULL; switch (gst_iterator_next (it, &value)) { case GST_ITERATOR_OK: - element = GST_ELEMENT (g_value_get_object (&value)); - name = gst_element_get_name(element); - if (name){ - if(strstr(name, "opencvsrc") != NULL || strstr(name, "appsrc") != NULL) { - source = GST_ELEMENT ( gst_object_ref (element) ); + element = GST_ELEMENT (g_value_get_object (&value)); // no lifetime transfer + name.attach(gst_element_get_name(element)); + if (name) + { + if (strstr(name.get(), "opencvsrc") != NULL || strstr(name.get(), "appsrc") != NULL) + { + source.attach(GST_ELEMENT(gst_object_ref(element))); done = TRUE; } - g_free(name); } - g_value_unset (&value); + g_value_unset(&value); break; case GST_ITERATOR_RESYNC: @@ -1350,15 +1438,15 @@ bool CvVideoWriter_GStreamer::open( const std::string &filename, int fourcc, CV_WARN("GStreamer: cannot find appsrc in manual pipeline\n"); return false; } - pipeline = encodebin; + pipeline.swap(encodebin); } else { - pipeline = gst_pipeline_new (NULL); + err.release(); + pipeline.reset(gst_pipeline_new(NULL)); // we just got a filename and a fourcc code. // first, try to guess the container from the filename - //encodebin = gst_element_factory_make("encodebin", NULL); //proxy old non existing fourcc ids. These were used in previous opencv versions, //but do not even exist in gstreamer any more @@ -1368,97 +1456,112 @@ bool CvVideoWriter_GStreamer::open( const std::string &filename, int fourcc, //create encoder caps from fourcc - - videocaps = gst_riff_create_video_caps(fourcc, NULL, NULL, NULL, NULL, NULL); - if (!videocaps){ - CV_WARN("Gstreamer Opencv backend does not support this codec."); + GSafePtr videocaps; + videocaps.attach(gst_riff_create_video_caps(fourcc, NULL, NULL, NULL, NULL, NULL)); + if (!videocaps) + { + CV_WARN("OpenCV backend does not support passed FOURCC value"); return false; } //create container caps from file extension - mime = filenameToMimetype(filename.c_str()); - if (!mime) { - CV_WARN("Gstreamer Opencv backend does not support this file type."); + const char* mime = filenameToMimetype(filename.c_str()); + if (!mime) + { + CV_WARN("OpenCV backend does not support this file type (extension): " << filename); return false; } - containercaps = gst_caps_from_string(mime); + //create pipeline elements + encodebin.reset(gst_element_factory_make("encodebin", NULL)); + + GSafePtr containercaps; + GSafePtr containerprofile; + GSafePtr videoprofile; + + containercaps.attach(gst_caps_from_string(mime)); //create encodebin profile - containerprofile = gst_encoding_container_profile_new("container", "container", containercaps, NULL); - videoprofile = gst_encoding_video_profile_new(videocaps, NULL, NULL, 1); - gst_encoding_container_profile_add_profile(containerprofile, (GstEncodingProfile *) videoprofile); + containerprofile.attach(gst_encoding_container_profile_new("container", "container", containercaps.get(), NULL)); + videoprofile.reset(gst_encoding_video_profile_new(videocaps.get(), NULL, NULL, 1)); + gst_encoding_container_profile_add_profile(containerprofile.get(), (GstEncodingProfile*)videoprofile.get()); - //create pipeline elements - encodebin = gst_element_factory_make("encodebin", NULL); + g_object_set(G_OBJECT(encodebin.get()), "profile", containerprofile.get(), NULL); - g_object_set(G_OBJECT(encodebin), "profile", containerprofile, NULL); - source = gst_element_factory_make("appsrc", NULL); - file = gst_element_factory_make("filesink", NULL); - g_object_set(G_OBJECT(file), "location", filename.c_str(), NULL); + source.reset(gst_element_factory_make("appsrc", NULL)); + file.reset(gst_element_factory_make("filesink", NULL)); + g_object_set(G_OBJECT(file.get()), "location", (const char*)filename.c_str(), NULL); } + int fps_num = 0, fps_denom = 1; + toFraction(fps, fps_num, fps_denom); + if (fourcc == CV_FOURCC('M','J','P','G') && frameSize.height == 1) { input_pix_fmt = GST_VIDEO_FORMAT_ENCODED; - caps = gst_caps_new_simple("image/jpeg", - "framerate", GST_TYPE_FRACTION, int(fps), 1, - NULL); - caps = gst_caps_fixate(caps); + caps.attach(gst_caps_new_simple("image/jpeg", + "framerate", GST_TYPE_FRACTION, int(fps_num), int(fps_denom), + NULL)); + caps.attach(gst_caps_fixate(caps.detach())); } - else if(is_color) + else if (is_color) { input_pix_fmt = GST_VIDEO_FORMAT_BGR; bufsize = frameSize.width * frameSize.height * 3; - caps = gst_caps_new_simple("video/x-raw", - "format", G_TYPE_STRING, "BGR", - "width", G_TYPE_INT, frameSize.width, - "height", G_TYPE_INT, frameSize.height, - "framerate", GST_TYPE_FRACTION, int(fps), 1, - NULL); - caps = gst_caps_fixate(caps); - + caps.attach(gst_caps_new_simple("video/x-raw", + "format", G_TYPE_STRING, "BGR", + "width", G_TYPE_INT, frameSize.width, + "height", G_TYPE_INT, frameSize.height, + "framerate", GST_TYPE_FRACTION, gint(fps_num), gint(fps_denom), + NULL)); + CV_Assert(caps); + caps.attach(gst_caps_fixate(caps.detach())); + CV_Assert(caps); } else { input_pix_fmt = GST_VIDEO_FORMAT_GRAY8; bufsize = frameSize.width * frameSize.height; - caps = gst_caps_new_simple("video/x-raw", - "format", G_TYPE_STRING, "GRAY8", - "width", G_TYPE_INT, frameSize.width, - "height", G_TYPE_INT, frameSize.height, - "framerate", GST_TYPE_FRACTION, int(fps), 1, - NULL); - caps = gst_caps_fixate(caps); + caps.attach(gst_caps_new_simple("video/x-raw", + "format", G_TYPE_STRING, "GRAY8", + "width", G_TYPE_INT, frameSize.width, + "height", G_TYPE_INT, frameSize.height, + "framerate", GST_TYPE_FRACTION, gint(fps_num), gint(fps_denom), + NULL)); + caps.attach(gst_caps_fixate(caps.detach())); } - gst_app_src_set_caps(GST_APP_SRC(source), caps); - gst_app_src_set_stream_type(GST_APP_SRC(source), GST_APP_STREAM_TYPE_STREAM); - gst_app_src_set_size (GST_APP_SRC(source), -1); + gst_app_src_set_caps(GST_APP_SRC(source.get()), caps); + gst_app_src_set_stream_type(GST_APP_SRC(source.get()), GST_APP_STREAM_TYPE_STREAM); + gst_app_src_set_size (GST_APP_SRC(source.get()), -1); - g_object_set(G_OBJECT(source), "format", GST_FORMAT_TIME, NULL); - g_object_set(G_OBJECT(source), "block", 1, NULL); - g_object_set(G_OBJECT(source), "is-live", 0, NULL); + g_object_set(G_OBJECT(source.get()), "format", GST_FORMAT_TIME, NULL); + g_object_set(G_OBJECT(source.get()), "block", 1, NULL); + g_object_set(G_OBJECT(source.get()), "is-live", 0, NULL); - if(!manualpipeline) + if (!manualpipeline) { - g_object_set(G_OBJECT(file), "buffer-size", bufsize, NULL); - gst_bin_add_many(GST_BIN(pipeline), source, encodebin, file, NULL); - if(!gst_element_link_many(source, encodebin, file, NULL)) { - CV_WARN("GStreamer: cannot link elements\n"); + g_object_set(G_OBJECT(file.get()), "buffer-size", bufsize, NULL); + gst_bin_add_many(GST_BIN(pipeline.get()), source.get(), encodebin.get(), file.get(), NULL); + if (!gst_element_link_many(source.get(), encodebin.get(), file.get(), NULL)) + { + CV_WARN("cannot link elements"); + pipeline.release(); return false; } } - GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "write-pipeline"); + GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "write-pipeline"); - stateret = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING); - if(stateret == GST_STATE_CHANGE_FAILURE) { + stateret = gst_element_set_state(GST_ELEMENT(pipeline.get()), GST_STATE_PLAYING); + if (stateret == GST_STATE_CHANGE_FAILURE) + { handleMessage(pipeline); CV_WARN("GStreamer: cannot put pipeline to play\n"); + pipeline.release(); return false; } @@ -1516,7 +1619,7 @@ bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image ) timestamp = num_frames * duration; //gst_app_src_push_buffer takes ownership of the buffer, so we need to supply it a copy - buffer = gst_buffer_new_allocate (NULL, size, NULL); + GstBuffer *buffer = gst_buffer_new_allocate(NULL, size, NULL); GstMapInfo info; gst_buffer_map(buffer, &info, (GstMapFlags)GST_MAP_READ); memcpy(info.data, (guint8*)image->imageData, size); @@ -1525,10 +1628,11 @@ bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image ) GST_BUFFER_PTS(buffer) = timestamp; GST_BUFFER_DTS(buffer) = timestamp; //set the current number in the frame - GST_BUFFER_OFFSET(buffer) = num_frames; + GST_BUFFER_OFFSET(buffer) = num_frames; - ret = gst_app_src_push_buffer(GST_APP_SRC(source), buffer); - if (ret != GST_FLOW_OK) { + ret = gst_app_src_push_buffer(GST_APP_SRC(source.get()), buffer); + if (ret != GST_FLOW_OK) + { CV_WARN("Error pushing buffer to GStreamer pipeline"); return false; } @@ -1540,34 +1644,49 @@ bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image ) return true; } -Ptr cv::create_GStreamer_writer(const std::string &filename, int fourcc, double fps, const cv::Size &frameSize, bool isColor) +Ptr create_GStreamer_writer(const std::string &filename, int fourcc, double fps, const cv::Size &frameSize, bool isColor) { CvVideoWriter_GStreamer* wrt = new CvVideoWriter_GStreamer; - if (wrt->open(filename, fourcc, fps, frameSize, isColor)) - return makePtr(wrt); - delete wrt; + try + { + if (wrt->open(filename, fourcc, fps, frameSize, isColor)) + return makePtr(wrt); + delete wrt; + } + catch (...) + { + delete wrt; + throw; + } return 0; } // utility functions -/*! - * \brief toFraction - * \param decimal - * \param numerator - * \param denominator - * Split a floating point value into numerator and denominator - */ -void toFraction(double decimal, double &numerator, double &denominator) +void toFraction(const double decimal, int &numerator_i, int &denominator_i) { - double dummy; - double whole; - decimal = modf (decimal, &whole); - for (denominator = 1; denominator<=100; denominator++){ - if (modf(denominator * decimal, &dummy) < 0.001f) + double err = 1.0; + int denominator = 1; + double numerator = 0; + for (int check_denominator = 1; ; check_denominator++) + { + double check_numerator = (double)check_denominator * decimal; + double dummy; + double check_err = modf(check_numerator, &dummy); + if (check_err < err) + { + err = check_err; + denominator = check_denominator; + numerator = check_numerator; + if (err < FLT_EPSILON) + break; + } + if (check_denominator == 100) // limit break; } - numerator = denominator * decimal; + numerator_i = cvRound(numerator); + denominator_i = denominator; + //printf("%g: %d/%d (err=%g)\n", decimal, numerator_i, denominator_i, err); } @@ -1577,25 +1696,22 @@ void toFraction(double decimal, double &numerator, double &denominator) */ void handleMessage(GstElement * pipeline) { - GError *err = NULL; - gchar *debug = NULL; - GstBus* bus = NULL; + GSafePtr bus; GstStreamStatusType tp; GstElement * elem = NULL; - GstMessage* msg = NULL; - bus = gst_element_get_bus(pipeline); + bus.attach(gst_element_get_bus(pipeline)); - while(gst_bus_have_pending(bus)) { - msg = gst_bus_pop(bus); - if (!msg || !GST_IS_MESSAGE(msg)) - { + while (gst_bus_have_pending(bus)) + { + GSafePtr msg; + msg.attach(gst_bus_pop(bus)); + if (!msg || !GST_IS_MESSAGE(msg.get())) continue; - } - if(gst_is_missing_plugin_message(msg)) + if (gst_is_missing_plugin_message(msg)) { - CV_WARN("your gstreamer installation is missing a required plugin\n"); + CV_WARN("your GStreamer installation is missing a required plugin"); } else { @@ -1605,11 +1721,18 @@ void handleMessage(GstElement * pipeline) gst_message_parse_state_changed(msg, &oldstate, &newstate, &pendstate); break; case GST_MESSAGE_ERROR: - gst_message_parse_error(msg, &err, &debug); - g_error_free(err); - g_free(debug); + { + GSafePtr err; + GSafePtr debug; + gst_message_parse_error(msg, err.getRef(), debug.getRef()); + GSafePtr name; name.attach(gst_element_get_name(GST_MESSAGE_SRC (msg))); + CV_WARN("Embedded video playback halted; module " << name.get() << + " reported: " << err->message); + CV_LOG_DEBUG(NULL, "GStreamer debug: " << debug.get()); + gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL); break; + } case GST_MESSAGE_EOS: break; case GST_MESSAGE_STREAM_STATUS: @@ -1619,12 +1742,11 @@ void handleMessage(GstElement * pipeline) break; } } - gst_message_unref(msg); } - - gst_object_unref(GST_OBJECT(bus)); } +} // namespace cv + //================================================================================================== #if defined(BUILD_PLUGIN) diff --git a/modules/videoio/test/test_gstreamer.cpp b/modules/videoio/test/test_gstreamer.cpp index 7cadde1ce1..7bf8b750a1 100644 --- a/modules/videoio/test/test_gstreamer.cpp +++ b/modules/videoio/test/test_gstreamer.cpp @@ -73,4 +73,22 @@ Param test_data[] = { INSTANTIATE_TEST_CASE_P(videoio, videoio_gstreamer, testing::ValuesIn(test_data)); +TEST(Videoio_GStreamer, unsupported_pipeline) +{ + VideoCaptureAPIs apiPref = CAP_GSTREAMER; + if (!isBackendAvailable(apiPref, cv::videoio_registry::getStreamBackends())) + throw SkipTestException(cv::String("Backend is not available/disabled: ") + cv::videoio_registry::getBackendName(apiPref)); + + // could not link videoconvert0 to matroskamux0, matroskamux0 can't handle caps video/x-raw, format=(string)RGBA + std::string pipeline = "appsrc ! videoconvert ! video/x-raw, format=(string)RGBA ! matroskamux ! filesink location=test.mkv"; + Size frame_size(640, 480); + + VideoWriter writer; + EXPECT_NO_THROW(writer.open(pipeline, apiPref, 0/*fourcc*/, 30/*fps*/, frame_size, true)); + EXPECT_FALSE(writer.isOpened()); + // no frames + EXPECT_NO_THROW(writer.release()); + +} + } // namespace diff --git a/modules/videoio/test/test_video_io.cpp b/modules/videoio/test/test_video_io.cpp index 19caa349f7..08deff523f 100644 --- a/modules/videoio/test/test_video_io.cpp +++ b/modules/videoio/test/test_video_io.cpp @@ -227,8 +227,8 @@ public: struct Ext_Fourcc_PSNR { - string ext; - string fourcc; + const char* ext; + const char* fourcc; float PSNR; VideoCaptureAPIs api; }; @@ -345,18 +345,6 @@ INSTANTIATE_TEST_CASE_P(videoio, videoio_bunny, testing::ValuesIn(backend_params))); -//================================================================================================== - -inline Ext_Fourcc_PSNR makeParam(const char * ext, const char * fourcc, float psnr, VideoCaptureAPIs apipref) -{ - Ext_Fourcc_PSNR res; - res.ext = ext; - res.fourcc = fourcc; - res.PSNR = psnr; - res.api = apipref; - return res; -} - inline static std::ostream &operator<<(std::ostream &out, const Ext_Fourcc_PSNR &p) { out << "FOURCC(" << p.fourcc << "), ." << p.ext << ", " << p.api << ", " << p.PSNR << "dB"; return out; @@ -366,41 +354,41 @@ static Ext_Fourcc_PSNR synthetic_params[] = { #ifdef HAVE_MSMF #if !defined(_M_ARM) - makeParam("wmv", "WMV1", 30.f, CAP_MSMF), - makeParam("wmv", "WMV2", 30.f, CAP_MSMF), + {"wmv", "WMV1", 30.f, CAP_MSMF}, + {"wmv", "WMV2", 30.f, CAP_MSMF}, #endif - makeParam("wmv", "WMV3", 30.f, CAP_MSMF), - makeParam("wmv", "WVC1", 30.f, CAP_MSMF), - makeParam("mov", "H264", 30.f, CAP_MSMF), + {"wmv", "WMV3", 30.f, CAP_MSMF}, + {"wmv", "WVC1", 30.f, CAP_MSMF}, + {"mov", "H264", 30.f, CAP_MSMF}, #endif #ifdef HAVE_AVFOUNDATION - makeParam("mov", "H264", 30.f, CAP_AVFOUNDATION), - makeParam("mov", "MJPG", 30.f, CAP_AVFOUNDATION), - makeParam("mp4", "H264", 30.f, CAP_AVFOUNDATION), - makeParam("mp4", "MJPG", 30.f, CAP_AVFOUNDATION), - makeParam("m4v", "H264", 30.f, CAP_AVFOUNDATION), - makeParam("m4v", "MJPG", 30.f, CAP_AVFOUNDATION), + {"mov", "H264", 30.f, CAP_AVFOUNDATION}, + {"mov", "MJPG", 30.f, CAP_AVFOUNDATION}, + {"mp4", "H264", 30.f, CAP_AVFOUNDATION}, + {"mp4", "MJPG", 30.f, CAP_AVFOUNDATION}, + {"m4v", "H264", 30.f, CAP_AVFOUNDATION}, + {"m4v", "MJPG", 30.f, CAP_AVFOUNDATION}, #endif - makeParam("avi", "XVID", 30.f, CAP_FFMPEG), - makeParam("avi", "MPEG", 30.f, CAP_FFMPEG), - makeParam("avi", "IYUV", 30.f, CAP_FFMPEG), - makeParam("avi", "MJPG", 30.f, CAP_FFMPEG), + {"avi", "XVID", 30.f, CAP_FFMPEG}, + {"avi", "MPEG", 30.f, CAP_FFMPEG}, + {"avi", "IYUV", 30.f, CAP_FFMPEG}, + {"avi", "MJPG", 30.f, CAP_FFMPEG}, - makeParam("mkv", "XVID", 30.f, CAP_FFMPEG), - makeParam("mkv", "MPEG", 30.f, CAP_FFMPEG), - makeParam("mkv", "MJPG", 30.f, CAP_FFMPEG), + {"mkv", "XVID", 30.f, CAP_FFMPEG}, + {"mkv", "MPEG", 30.f, CAP_FFMPEG}, + {"mkv", "MJPG", 30.f, CAP_FFMPEG}, - makeParam("avi", "MPEG", 30.f, CAP_GSTREAMER), - makeParam("avi", "MJPG", 30.f, CAP_GSTREAMER), - makeParam("avi", "H264", 30.f, CAP_GSTREAMER), + {"avi", "MPEG", 30.f, CAP_GSTREAMER}, + {"avi", "MJPG", 30.f, CAP_GSTREAMER}, + {"avi", "H264", 30.f, CAP_GSTREAMER}, - makeParam("mkv", "MPEG", 30.f, CAP_GSTREAMER), - makeParam("mkv", "MJPG", 30.f, CAP_GSTREAMER), - makeParam("mkv", "H264", 30.f, CAP_GSTREAMER), + {"mkv", "MPEG", 30.f, CAP_GSTREAMER}, + {"mkv", "MJPG", 30.f, CAP_GSTREAMER}, + {"mkv", "H264", 30.f, CAP_GSTREAMER}, - makeParam("avi", "MJPG", 30.f, CAP_OPENCV_MJPEG), + {"avi", "MJPG", 30.f, CAP_OPENCV_MJPEG}, }; @@ -416,6 +404,89 @@ INSTANTIATE_TEST_CASE_P(videoio, videoio_synthetic, testing::ValuesIn(all_sizes), testing::ValuesIn(synthetic_params))); +struct Ext_Fourcc_API +{ + const char* ext; + const char* fourcc; + VideoCaptureAPIs api; +}; + +inline static std::ostream &operator<<(std::ostream &out, const Ext_Fourcc_API &p) +{ + out << "(FOURCC(" << p.fourcc << "), \"" << p.ext << "\", " << p.api << ")"; return out; +} + + +class Videoio_Writer : public Videoio_Test_Base, public testing::TestWithParam +{ +protected: + Size frame_size; + int fourcc; + double fps; +public: + Videoio_Writer() + { + frame_size = Size(640, 480); + const Ext_Fourcc_API p = GetParam(); + ext = p.ext; + fourcc = fourccFromString(p.fourcc); + if (ext.size() == 3) + video_file = cv::tempfile((fourccToString(fourcc) + "." + ext).c_str()); + else + video_file = ext; + fps = 25.; + apiPref = p.api; + } + void SetUp() + { + } + void TearDown() + { + if (ext.size() == 3) + (void)remove(video_file.c_str()); + } +}; + +TEST_P(Videoio_Writer, write_nothing) +{ + if (!cv::videoio_registry::hasBackend(apiPref)) + throw SkipTestException(cv::String("Backend is not available/disabled: ") + cv::videoio_registry::getBackendName(apiPref)); + + VideoWriter writer; + EXPECT_NO_THROW(writer.open(video_file, apiPref, fourcc, fps, frame_size, true)); + ASSERT_TRUE(writer.isOpened()); +#if 0 // no frames + cv::Mat m(frame_size, CV_8UC3, Scalar::all(127)); + writer << m; +#endif + EXPECT_NO_THROW(writer.release()); +} + +static vector generate_Ext_Fourcc_API() +{ + const size_t N = sizeof(synthetic_params)/sizeof(synthetic_params[0]); + vector result; result.reserve(N); + for (size_t i = 0; i < N; i++) + { + const Ext_Fourcc_PSNR& src = synthetic_params[i]; + Ext_Fourcc_API e = { src.ext, src.fourcc, src.api }; + result.push_back(e); + } + + { + Ext_Fourcc_API e = { "appsrc ! videoconvert ! video/x-raw, format=(string)NV12 ! filesink location=test.nv12", "\0\0\0\0", CAP_GSTREAMER }; + result.push_back(e); + } + { + Ext_Fourcc_API e = { "appsrc ! videoconvert ! video/x-raw, format=(string)I420 ! matroskamux ! filesink location=test.mkv", "\0\0\0\0", CAP_GSTREAMER }; + result.push_back(e); + } + return result; +} + +INSTANTIATE_TEST_CASE_P(videoio, Videoio_Writer, testing::ValuesIn(generate_Ext_Fourcc_API())); + + TEST(Videoio, exceptions) { VideoCapture cap; diff --git a/platforms/scripts/valgrind.supp b/platforms/scripts/valgrind.supp index ec4fff5ffb..3fdacb737f 100644 --- a/platforms/scripts/valgrind.supp +++ b/platforms/scripts/valgrind.supp @@ -218,6 +218,14 @@ fun:__itt_*create* } +{ + OpenCV-gtk_init + Memcheck:Leak + ... + fun:gtk_init + fun:cvInitSystem +} + { OpenCV-FFmpeg-swsscale Memcheck:Addr16 @@ -227,6 +235,35 @@ fun:cvWriteFrame_FFMPEG } +{ + OpenCV-GStreamer-gst_init + Memcheck:Leak + ... + fun:gst_init +} + +{ + OpenCV-GStreamer-gst_deinit + Memcheck:Leak + ... + fun:gst_deinit +} + +{ + OpenCV-GStreamer-gst_init_check + Memcheck:Leak + ... + fun:gst_init_check +} + +{ + OpenCV-GStreamer-gst_parse_launch_full-reachable + Memcheck:Leak + match-leak-kinds: reachable + ... + fun:gst_parse_launch_full +} + { OpenCV-OpenEXR-ThreadPool Memcheck:Leak diff --git a/platforms/scripts/valgrind_3rdparty.supp b/platforms/scripts/valgrind_3rdparty.supp index 50811d112d..8ca0afbecf 100644 --- a/platforms/scripts/valgrind_3rdparty.supp +++ b/platforms/scripts/valgrind_3rdparty.supp @@ -18,6 +18,15 @@ fun:_ZN7testing8internal11CmpHelperLEIddEENS_15AssertionResultEPKcS4_RKT_RKT0_ } +{ + GTest-RegisterTests + Memcheck:Leak + ... + fun:RegisterTests + ... + fun:_ZN7testing14InitGoogleTestEPiPPc +} + { OpenCL Memcheck:Cond @@ -55,10 +64,10 @@ } { - glib + GTK-css Memcheck:Leak - fun:*alloc - obj:*/libglib* + ... + fun:gtk_css_provider* } { @@ -120,3 +129,89 @@ ... fun:cvWriteFrame_FFMPEG } + +{ + GStreamer-orc_program_compile_full + Memcheck:Leak + match-leak-kinds: reachable + ... + fun:orc_program_compile_full + ... + fun:clone +} + +{ + GStreamer-orc_program_new_from_static_bytecode + Memcheck:Leak + match-leak-kinds: reachable + ... + fun:orc_program_new_from_static_bytecode + ... + fun:clone +} + +{ + GStreamer-matroska-other + Memcheck:Leak + ... + fun:gst* + obj:*gstmatroska* + ... + obj:*glib* + fun:start_thread + fun:clone +} + +{ + GStreamer-matroska-gst_riff_create_video_caps + Memcheck:Leak + ... + fun:gst_riff_create_video_caps + obj:*gstmatroska* + ... + fun:clone +} + + +{ + GStreamer-tls + Memcheck:Leak + match-leak-kinds: possible + fun:calloc + fun:allocate_dtv + fun:_dl_allocate_tls +} + +{ + GStreamer-registry + Memcheck:Leak + ... + fun:gst_update_registry +} + +{ + GStreamer-plugin_load + Memcheck:Leak + ... + fun:gst_plugin_load_by_name +} + +{ + GStreamer-separate-threads + Memcheck:Leak + ... + obj:*/libglib* + fun:start_thread + fun:clone +} + +{ + clone-unknown-leak + Memcheck:Leak + match-leak-kinds: definite + fun:_Znwm + obj:* + obj:* + fun:start_thread + fun:clone +}