From f9e674180708a26ad05865040ec09f87a0d5edfa Mon Sep 17 00:00:00 2001 From: Dmitry Trifonov Date: Mon, 27 Jun 2016 10:04:59 -0700 Subject: [PATCH] added support for OpenNI2 IR stream #4366 --- modules/videoio/include/opencv2/videoio.hpp | 14 +- .../include/opencv2/videoio/videoio_c.h | 10 +- modules/videoio/src/cap_openni2.cpp | 466 ++++++++++++------ samples/cpp/openni_capture.cpp | 77 ++- 4 files changed, 393 insertions(+), 174 deletions(-) diff --git a/modules/videoio/include/opencv2/videoio.hpp b/modules/videoio/include/opencv2/videoio.hpp index 6b3b97b6af..be49b76ae7 100644 --- a/modules/videoio/include/opencv2/videoio.hpp +++ b/modules/videoio/include/opencv2/videoio.hpp @@ -162,7 +162,8 @@ enum { CAP_PROP_DC1394_OFF = -4, //turn the feature off (not cont // OpenNI map generators enum { CAP_OPENNI_DEPTH_GENERATOR = 1 << 31, CAP_OPENNI_IMAGE_GENERATOR = 1 << 30, - CAP_OPENNI_GENERATORS_MASK = CAP_OPENNI_DEPTH_GENERATOR + CAP_OPENNI_IMAGE_GENERATOR + CAP_OPENNI_IR_GENERATOR = 1 << 29, + CAP_OPENNI_GENERATORS_MASK = CAP_OPENNI_DEPTH_GENERATOR + CAP_OPENNI_IMAGE_GENERATOR + CAP_OPENNI_IR_GENERATOR }; // Properties of cameras available through OpenNI interfaces @@ -183,13 +184,15 @@ enum { CAP_PROP_OPENNI_OUTPUT_MODE = 100, CAP_PROP_OPENNI2_MIRROR = 111 }; -// OpenNI shortcats +// OpenNI shortcuts enum { CAP_OPENNI_IMAGE_GENERATOR_PRESENT = CAP_OPENNI_IMAGE_GENERATOR + CAP_PROP_OPENNI_GENERATOR_PRESENT, CAP_OPENNI_IMAGE_GENERATOR_OUTPUT_MODE = CAP_OPENNI_IMAGE_GENERATOR + CAP_PROP_OPENNI_OUTPUT_MODE, + CAP_OPENNI_DEPTH_GENERATOR_PRESENT = CAP_OPENNI_DEPTH_GENERATOR + CAP_PROP_OPENNI_GENERATOR_PRESENT, CAP_OPENNI_DEPTH_GENERATOR_BASELINE = CAP_OPENNI_DEPTH_GENERATOR + CAP_PROP_OPENNI_BASELINE, CAP_OPENNI_DEPTH_GENERATOR_FOCAL_LENGTH = CAP_OPENNI_DEPTH_GENERATOR + CAP_PROP_OPENNI_FOCAL_LENGTH, CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION = CAP_OPENNI_DEPTH_GENERATOR + CAP_PROP_OPENNI_REGISTRATION, - CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION_ON = CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION + CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION_ON = CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION, + CAP_OPENNI_IR_GENERATOR_PRESENT = CAP_OPENNI_IR_GENERATOR + CAP_PROP_OPENNI_GENERATOR_PRESENT, }; // OpenNI data given from depth generator @@ -201,7 +204,10 @@ enum { CAP_OPENNI_DEPTH_MAP = 0, // Depth values in mm (CV_16UC1) // Data given from RGB image generator CAP_OPENNI_BGR_IMAGE = 5, - CAP_OPENNI_GRAY_IMAGE = 6 + CAP_OPENNI_GRAY_IMAGE = 6, + + // Data given from IR image generator + CAP_OPENNI_IR_IMAGE = 7 }; // Supported output modes of OpenNI image generator diff --git a/modules/videoio/include/opencv2/videoio/videoio_c.h b/modules/videoio/include/opencv2/videoio/videoio_c.h index 91d26ea5ae..e502c7be83 100644 --- a/modules/videoio/include/opencv2/videoio/videoio_c.h +++ b/modules/videoio/include/opencv2/videoio/videoio_c.h @@ -200,7 +200,8 @@ enum // OpenNI map generators CV_CAP_OPENNI_DEPTH_GENERATOR = 1 << 31, CV_CAP_OPENNI_IMAGE_GENERATOR = 1 << 30, - CV_CAP_OPENNI_GENERATORS_MASK = CV_CAP_OPENNI_DEPTH_GENERATOR + CV_CAP_OPENNI_IMAGE_GENERATOR, + CV_CAP_OPENNI_IR_GENERATOR = 1 << 29, + CV_CAP_OPENNI_GENERATORS_MASK = CV_CAP_OPENNI_DEPTH_GENERATOR + CV_CAP_OPENNI_IMAGE_GENERATOR + CV_CAP_OPENNI_IR_GENERATOR, // Properties of cameras available through OpenNI interfaces CV_CAP_PROP_OPENNI_OUTPUT_MODE = 100, @@ -222,10 +223,12 @@ enum CV_CAP_OPENNI_IMAGE_GENERATOR_PRESENT = CV_CAP_OPENNI_IMAGE_GENERATOR + CV_CAP_PROP_OPENNI_GENERATOR_PRESENT, CV_CAP_OPENNI_IMAGE_GENERATOR_OUTPUT_MODE = CV_CAP_OPENNI_IMAGE_GENERATOR + CV_CAP_PROP_OPENNI_OUTPUT_MODE, + CV_CAP_OPENNI_DEPTH_GENERATOR_PRESENT = CV_CAP_OPENNI_DEPTH_GENERATOR + CV_CAP_PROP_OPENNI_GENERATOR_PRESENT, CV_CAP_OPENNI_DEPTH_GENERATOR_BASELINE = CV_CAP_OPENNI_DEPTH_GENERATOR + CV_CAP_PROP_OPENNI_BASELINE, CV_CAP_OPENNI_DEPTH_GENERATOR_FOCAL_LENGTH = CV_CAP_OPENNI_DEPTH_GENERATOR + CV_CAP_PROP_OPENNI_FOCAL_LENGTH, CV_CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION = CV_CAP_OPENNI_DEPTH_GENERATOR + CV_CAP_PROP_OPENNI_REGISTRATION, CV_CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION_ON = CV_CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION, + CV_CAP_OPENNI_IR_GENERATOR_PRESENT = CV_CAP_OPENNI_IR_GENERATOR + CV_CAP_PROP_OPENNI_GENERATOR_PRESENT, // Properties of cameras available through GStreamer interface CV_CAP_GSTREAMER_QUEUE_LENGTH = 200, // default is 1 @@ -454,7 +457,10 @@ enum // Data given from RGB image generator. CV_CAP_OPENNI_BGR_IMAGE = 5, - CV_CAP_OPENNI_GRAY_IMAGE = 6 + CV_CAP_OPENNI_GRAY_IMAGE = 6, + + // Data given from IR image generator. + CV_CAP_OPENNI_IR_IMAGE = 7 }; // Supported output modes of OpenNI image generator diff --git a/modules/videoio/src/cap_openni2.cpp b/modules/videoio/src/cap_openni2.cpp index 12faa297f2..aebf1889a1 100644 --- a/modules/videoio/src/cap_openni2.cpp +++ b/modules/videoio/src/cap_openni2.cpp @@ -65,8 +65,10 @@ #define CV_STREAM_TIMEOUT 2000 -#define CV_DEPTH_STREAM 0 -#define CV_COLOR_STREAM 1 +#define CV_DEPTH_STREAM 0 +#define CV_COLOR_STREAM 1 +#define CV_IR_STREAM 2 +#define CV_MAX_NUM_STREAMS 3 #include "OpenNI.h" #include "PS1080.h" @@ -109,10 +111,9 @@ protected: IplImage iplHeader; }; - static const int outputMapsTypesCount = 7; + static const int outputMapsTypesCount = 8; - static openni::VideoMode defaultColorOutputMode(); - static openni::VideoMode defaultDepthOutputMode(); + static openni::VideoMode defaultStreamOutputMode(int stream); IplImage* retrieveDepthMap(); IplImage* retrievePointCloudMap(); @@ -121,13 +122,17 @@ protected: IplImage* retrieveValidDepthMask(); IplImage* retrieveBGRImage(); IplImage* retrieveGrayImage(); + IplImage* retrieveIrImage(); + openni::Status toggleStream(int stream, bool toggle); bool readCamerasParams(); double getDepthGeneratorProperty(int propIdx) const; bool setDepthGeneratorProperty(int propIdx, double propVal); double getImageGeneratorProperty(int propIdx) const; bool setImageGeneratorProperty(int propIdx, double propVal); + double getIrGeneratorProperty(int propIdx) const; + bool setIrGeneratorProperty(int propIdx, double propVal); double getCommonProperty(int propIdx) const; bool setCommonProperty(int propIdx, double propVal); @@ -137,9 +142,9 @@ protected: openni::Recorder recorder; // Data generators with its metadata - openni::VideoStream depth, color, **streams; - openni::VideoFrameRef depthFrame, colorFrame; - cv::Mat depthImage, colorImage; + openni::VideoStream streams[CV_MAX_NUM_STREAMS]; + openni::VideoFrameRef streamFrames[CV_MAX_NUM_STREAMS]; + cv::Mat streamImages[CV_MAX_NUM_STREAMS]; int maxBufferSize, maxTimeDuration; // for approx sync bool isCircleBuffer; @@ -157,9 +162,6 @@ protected: // The value for pixels without a valid disparity measurement int noSampleValue; - int currentStream; - - int numStream; std::vector outputMaps; }; @@ -177,27 +179,28 @@ bool CvCapture_OpenNI2::isOpened() const return isContextOpened; } -openni::VideoMode CvCapture_OpenNI2::defaultColorOutputMode() -{ - openni::VideoMode mode; - mode.setResolution(640, 480); - mode.setFps(30); - mode.setPixelFormat(openni::PIXEL_FORMAT_RGB888); - return mode; -} - -openni::VideoMode CvCapture_OpenNI2::defaultDepthOutputMode() +openni::VideoMode CvCapture_OpenNI2::defaultStreamOutputMode(int stream) { openni::VideoMode mode; mode.setResolution(640, 480); mode.setFps(30); - mode.setPixelFormat(openni::PIXEL_FORMAT_DEPTH_1_MM); + switch (stream) + { + case CV_DEPTH_STREAM: + mode.setPixelFormat(openni::PIXEL_FORMAT_DEPTH_1_MM); + break; + case CV_COLOR_STREAM: + mode.setPixelFormat(openni::PIXEL_FORMAT_RGB888); + break; + case CV_IR_STREAM: + mode.setPixelFormat(openni::PIXEL_FORMAT_GRAY16); + break; + } return mode; } CvCapture_OpenNI2::CvCapture_OpenNI2( int index ) { - numStream = 2; const char* deviceURI = openni::ANY_DEVICE; openni::Status status; int deviceType = DEVICE_DEFAULT; @@ -215,13 +218,6 @@ CvCapture_OpenNI2::CvCapture_OpenNI2( int index ) index %= 10; } - // Asus XTION and Occipital Structure Sensor do not have an image generator - if (deviceType == DEVICE_ASUS_XTION) - numStream = 1; - - if( deviceType > DEVICE_MAX ) - return; - // Initialize and configure the context. status = openni::OpenNI::initialize(); @@ -247,83 +243,98 @@ CvCapture_OpenNI2::CvCapture_OpenNI2( int index ) return; } - //device.setDepthColorSyncEnabled(true); - - - status = depth.create(device, openni::SENSOR_DEPTH); - if (status == openni::STATUS_OK) + status = toggleStream(CV_DEPTH_STREAM, true); + // Asus XTION and Occipital Structure Sensor do not have an image generator + if (deviceType != DEVICE_ASUS_XTION) + status = openni::Status(status | toggleStream(CV_COLOR_STREAM, true)); + if (status != openni::STATUS_OK) { - if (depth.isValid()) - { - CV_Assert(depth.setVideoMode(defaultDepthOutputMode()) == openni::STATUS_OK); // xn::DepthGenerator supports VGA only! (Jan 2011) - } - - status = depth.start(); - if (status != openni::STATUS_OK) - { - CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Couldn't start depth stream: %s\n", openni::OpenNI::getExtendedError())); - depth.destroy(); - return; - } + openni::OpenNI::shutdown(); + return; } - else + + if (!readCamerasParams()) { - CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Couldn't find depth stream:: %s\n", openni::OpenNI::getExtendedError())); + CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Could not read cameras parameters\n")); return; } - streams = new openni::VideoStream*[numStream]; - streams[CV_DEPTH_STREAM] = &depth; - // create a color object - status = color.create(device, openni::SENSOR_COLOR); - if (status == openni::STATUS_OK) + outputMaps.resize( outputMapsTypesCount ); + + isContextOpened = true; + + setProperty(CV_CAP_PROP_OPENNI_REGISTRATION, 1.0); +} + +openni::Status CvCapture_OpenNI2::toggleStream(int stream, bool toggle) +{ + openni::Status status; + + // for logging + static const char* stream_names[CV_MAX_NUM_STREAMS] = { + "depth", + "color", + "IR" + }; + + static const openni::SensorType stream_sensor_types[CV_MAX_NUM_STREAMS] = { + openni::SENSOR_DEPTH, + openni::SENSOR_COLOR, + openni::SENSOR_IR + }; + + if (toggle) // want to open stream { - // Set map output mode. - if (color.isValid()) + // already opened + if (streams[stream].isValid()) + return openni::STATUS_OK; + + // open stream + status = streams[stream].create(device, stream_sensor_types[stream]); + if (status == openni::STATUS_OK) { - CV_Assert(color.setVideoMode(defaultColorOutputMode()) == openni::STATUS_OK); + // set video mode + status = streams[stream].setVideoMode(defaultStreamOutputMode(stream)); // xn::DepthGenerator supports VGA only! (Jan 2011) + if (status != openni::STATUS_OK) + { + CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Couldn't set %s stream output mode: %s\n", + stream_names[stream], + openni::OpenNI::getExtendedError())); + streams[stream].destroy(); + return status; + } + + // start stream + status = streams[stream].start(); + if (status != openni::STATUS_OK) + { + CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Couldn't start %s stream: %s\n", + stream_names[stream], + openni::OpenNI::getExtendedError())); + streams[stream].destroy(); + return status; + } } - status = color.start(); - if (status != openni::STATUS_OK) + else { - CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Couldn't start color stream: %s\n", openni::OpenNI::getExtendedError())); - color.destroy(); - return; + CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Couldn't find %s stream:: %s\n", + stream_names[stream], + openni::OpenNI::getExtendedError())); + return status; } - streams[CV_COLOR_STREAM] = &color; } - else if (numStream == 2) + else if (streams[stream].isValid()) // want to close stream { - CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Couldn't find color stream: %s\n", openni::OpenNI::getExtendedError())); - return; + streams[stream].stop(); + streams[stream].destroy(); } - if( !readCamerasParams() ) - { - CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Could not read cameras parameters\n")); - return; - } - -// if( deviceType == DEVICE_ASUS_XTION ) -// { -// //ps/asus specific -// imageGenerator.SetIntProperty("InputFormat", 1 /*XN_IO_IMAGE_FORMAT_YUV422*/); -// imageGenerator.SetPixelFormat(XN_PIXEL_FORMAT_RGB24); -// depthGenerator.SetIntProperty("RegistrationType", 1 /*XN_PROCESSING_HARDWARE*/); -// } - - - outputMaps.resize( outputMapsTypesCount ); - - isContextOpened = true; - - setProperty(CV_CAP_PROP_OPENNI_REGISTRATION, 1.0); + return openni::STATUS_OK; } CvCapture_OpenNI2::CvCapture_OpenNI2(const char * filename) { - numStream = 2; openni::Status status; isContextOpened = false; @@ -348,6 +359,13 @@ CvCapture_OpenNI2::CvCapture_OpenNI2(const char * filename) return; } + status = openni::Status(toggleStream(CV_DEPTH_STREAM, true) | toggleStream(CV_COLOR_STREAM, true)); + if (status != openni::STATUS_OK) + { + openni::OpenNI::shutdown(); + return; + } + if( !readCamerasParams() ) { CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Could not read cameras parameters\n")); @@ -361,17 +379,20 @@ CvCapture_OpenNI2::CvCapture_OpenNI2(const char * filename) CvCapture_OpenNI2::~CvCapture_OpenNI2() { - this->depthFrame.release(); - this->colorFrame.release(); - this->depth.stop(); - this->color.stop(); + for (int i = 0; i < CV_MAX_NUM_STREAMS; ++i) + { + streamFrames[i].release(); + streams[i].stop(); + streams[i].destroy(); + } + device.close(); openni::OpenNI::shutdown(); } bool CvCapture_OpenNI2::readCamerasParams() { double pixelSize = 0; - if (depth.getProperty(XN_STREAM_PROPERTY_ZERO_PLANE_PIXEL_SIZE, &pixelSize) != openni::STATUS_OK) + if (streams[CV_DEPTH_STREAM].getProperty(XN_STREAM_PROPERTY_ZERO_PLANE_PIXEL_SIZE, &pixelSize) != openni::STATUS_OK) { CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::readCamerasParams : Could not read pixel size!\n")); return false; @@ -382,13 +403,13 @@ bool CvCapture_OpenNI2::readCamerasParams() // focal length of IR camera in pixels for VGA resolution int zeroPlanDistance; // in mm - if (depth.getProperty(XN_STREAM_PROPERTY_ZERO_PLANE_DISTANCE, &zeroPlanDistance) != openni::STATUS_OK) + if (streams[CV_DEPTH_STREAM].getProperty(XN_STREAM_PROPERTY_ZERO_PLANE_DISTANCE, &zeroPlanDistance) != openni::STATUS_OK) { CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::readCamerasParams : Could not read virtual plane distance!\n")); return false; } - if (depth.getProperty(XN_STREAM_PROPERTY_EMITTER_DCMOS_DISTANCE, &baseline) != openni::STATUS_OK) + if (streams[CV_DEPTH_STREAM].getProperty(XN_STREAM_PROPERTY_EMITTER_DCMOS_DISTANCE, &baseline) != openni::STATUS_OK) { CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::readCamerasParams : Could not read base line!\n")); return false; @@ -419,6 +440,10 @@ double CvCapture_OpenNI2::getProperty( int propIdx ) const { propValue = getDepthGeneratorProperty( purePropIdx ); } + else if ((propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_IR_GENERATOR) + { + propValue = getIrGeneratorProperty(purePropIdx); + } else { propValue = getCommonProperty( purePropIdx ); @@ -443,6 +468,10 @@ bool CvCapture_OpenNI2::setProperty( int propIdx, double propValue ) { isSet = setDepthGeneratorProperty( purePropIdx, propValue ); } + else if ((propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_IR_GENERATOR) + { + isSet = setIrGeneratorProperty(purePropIdx, propValue); + } else { isSet = setCommonProperty( purePropIdx, propValue ); @@ -458,12 +487,6 @@ double CvCapture_OpenNI2::getCommonProperty( int propIdx ) const switch( propIdx ) { - // There is a set of properties that correspond to depth generator by default - // (is they are pass without particular generator flag). Two reasons of this: - // 1) We can assume that depth generator is the main one for depth sensor. - // 2) In the initial vertions of OpenNI integration to OpenCV the value of - // flag CV_CAP_OPENNI_DEPTH_GENERATOR was 0 (it isn't zero now). - case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT : case CV_CAP_PROP_FRAME_WIDTH : case CV_CAP_PROP_FRAME_HEIGHT : case CV_CAP_PROP_FPS : @@ -477,7 +500,9 @@ double CvCapture_OpenNI2::getCommonProperty( int propIdx ) const propValue = const_cast(this)->device.getDepthColorSyncEnabled(); case CV_CAP_PROP_OPENNI2_MIRROR: { - bool isMirroring = color.getMirroringEnabled() && depth.getMirroringEnabled(); + bool isMirroring = false; + for (int i = 0; i < CV_MAX_NUM_STREAMS; ++i) + isMirroring |= streams[i].getMirroringEnabled(); propValue = isMirroring ? 1.0 : 0.0; break; } @@ -497,8 +522,11 @@ bool CvCapture_OpenNI2::setCommonProperty( int propIdx, double propValue ) case CV_CAP_PROP_OPENNI2_MIRROR: { bool mirror = propValue > 0.0 ? true : false; - isSet = color.setMirroringEnabled(mirror) == openni::STATUS_OK; - isSet = depth.setMirroringEnabled(mirror) == openni::STATUS_OK; + for (int i = 0; i < CV_MAX_NUM_STREAMS; ++i) + { + if (streams[i].isValid()) + isSet |= streams[i].setMirroringEnabled(mirror) == openni::STATUS_OK; + } } break; // There is a set of properties that correspond to depth generator by default @@ -509,6 +537,7 @@ bool CvCapture_OpenNI2::setCommonProperty( int propIdx, double propValue ) case CV_CAP_PROP_OPENNI2_SYNC: isSet = device.setDepthColorSyncEnabled(propValue > 0.0) == openni::STATUS_OK; break; + default: CV_Error( CV_StsBadArg, cv::format("Such parameter (propIdx=%d) isn't supported for setting.\n", propIdx) ); } @@ -519,29 +548,28 @@ bool CvCapture_OpenNI2::setCommonProperty( int propIdx, double propValue ) double CvCapture_OpenNI2::getDepthGeneratorProperty( int propIdx ) const { double propValue = 0; - if( !depth.isValid() ) + if( !streams[CV_DEPTH_STREAM].isValid() ) return propValue; openni::VideoMode mode; switch( propIdx ) { - case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT : - CV_DbgAssert(depth.isValid()); - propValue = 1.; + case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT: + propValue = streams[CV_DEPTH_STREAM].isValid(); break; case CV_CAP_PROP_FRAME_WIDTH : - propValue = depth.getVideoMode().getResolutionX(); + propValue = streams[CV_DEPTH_STREAM].getVideoMode().getResolutionX(); break; case CV_CAP_PROP_FRAME_HEIGHT : - propValue = depth.getVideoMode().getResolutionY(); + propValue = streams[CV_DEPTH_STREAM].getVideoMode().getResolutionY(); break; case CV_CAP_PROP_FPS : - mode = depth.getVideoMode(); + mode = streams[CV_DEPTH_STREAM].getVideoMode(); propValue = mode.getFps(); break; case CV_CAP_PROP_OPENNI_FRAME_MAX_DEPTH : - propValue = depth.getMaxPixelValue(); + propValue = streams[CV_DEPTH_STREAM].getMaxPixelValue(); break; case CV_CAP_PROP_OPENNI_BASELINE : propValue = baseline; @@ -553,10 +581,10 @@ double CvCapture_OpenNI2::getDepthGeneratorProperty( int propIdx ) const propValue = device.getImageRegistrationMode(); break; case CV_CAP_PROP_POS_MSEC : - propValue = (double)depthFrame.getTimestamp(); + propValue = (double)streamFrames[CV_DEPTH_STREAM].getTimestamp(); break; case CV_CAP_PROP_POS_FRAMES : - propValue = depthFrame.getFrameIndex(); + propValue = streamFrames[CV_DEPTH_STREAM].getFrameIndex(); break; default : CV_Error( CV_StsBadArg, cv::format("Depth generator does not support such parameter (propIdx=%d) for getting.\n", propIdx) ); @@ -569,17 +597,20 @@ bool CvCapture_OpenNI2::setDepthGeneratorProperty( int propIdx, double propValue { bool isSet = false; - CV_Assert( depth.isValid() ); - switch( propIdx ) { + case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT: + if (isContextOpened) + isSet = toggleStream(CV_DEPTH_STREAM, propValue > 0.0) == openni::STATUS_OK; + break; case CV_CAP_PROP_OPENNI_REGISTRATION: { + CV_Assert(streams[CV_DEPTH_STREAM].isValid()); if( propValue != 0.0 ) // "on" { // if there isn't image generator (i.e. ASUS XtionPro doesn't have it) // then the property isn't avaliable - if ( color.isValid() ) + if ( streams[CV_COLOR_STREAM].isValid() ) { openni::ImageRegistrationMode mode = propValue != 0.0 ? openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR : openni::IMAGE_REGISTRATION_OFF; if( !device.getImageRegistrationMode() == mode ) @@ -619,30 +650,29 @@ bool CvCapture_OpenNI2::setDepthGeneratorProperty( int propIdx, double propValue double CvCapture_OpenNI2::getImageGeneratorProperty( int propIdx ) const { double propValue = 0.; - if( !color.isValid() ) + if( !streams[CV_COLOR_STREAM].isValid() ) return propValue; openni::VideoMode mode; switch( propIdx ) { - case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT : - CV_DbgAssert( color.isValid() ); - propValue = 1.; + case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT: + propValue = streams[CV_COLOR_STREAM].isValid(); break; case CV_CAP_PROP_FRAME_WIDTH : - propValue = color.getVideoMode().getResolutionX(); + propValue = streams[CV_COLOR_STREAM].getVideoMode().getResolutionX(); break; case CV_CAP_PROP_FRAME_HEIGHT : - propValue = color.getVideoMode().getResolutionY(); + propValue = streams[CV_COLOR_STREAM].getVideoMode().getResolutionY(); break; case CV_CAP_PROP_FPS : - propValue = color.getVideoMode().getFps(); + propValue = streams[CV_COLOR_STREAM].getVideoMode().getFps(); break; case CV_CAP_PROP_POS_MSEC : - propValue = (double)colorFrame.getTimestamp(); + propValue = (double)streamFrames[CV_COLOR_STREAM].getTimestamp(); break; case CV_CAP_PROP_POS_FRAMES : - propValue = (double)colorFrame.getFrameIndex(); + propValue = (double)streamFrames[CV_COLOR_STREAM].getFrameIndex(); break; default : CV_Error( CV_StsBadArg, cv::format("Image generator does not support such parameter (propIdx=%d) for getting.\n", propIdx) ); @@ -654,14 +684,18 @@ double CvCapture_OpenNI2::getImageGeneratorProperty( int propIdx ) const bool CvCapture_OpenNI2::setImageGeneratorProperty(int propIdx, double propValue) { bool isSet = false; - if( !color.isValid() ) - return isSet; switch( propIdx ) { + case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT: + if (isContextOpened) + isSet = toggleStream(CV_COLOR_STREAM, propValue > 0.0) == openni::STATUS_OK; + break; case CV_CAP_PROP_OPENNI_OUTPUT_MODE : { - openni::VideoMode mode = color.getVideoMode(); + if (!streams[CV_COLOR_STREAM].isValid()) + return isSet; + openni::VideoMode mode = streams[CV_COLOR_STREAM].getVideoMode(); switch( cvRound(propValue) ) { @@ -689,7 +723,7 @@ bool CvCapture_OpenNI2::setImageGeneratorProperty(int propIdx, double propValue) CV_Error( CV_StsBadArg, "Unsupported image generator output mode.\n"); } - openni::Status status = color.setVideoMode( mode ); + openni::Status status = streams[CV_COLOR_STREAM].setVideoMode( mode ); if( status != openni::STATUS_OK ) CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::setImageGeneratorProperty : %s\n", openni::OpenNI::getExtendedError())); else @@ -703,6 +737,96 @@ bool CvCapture_OpenNI2::setImageGeneratorProperty(int propIdx, double propValue) return isSet; } +double CvCapture_OpenNI2::getIrGeneratorProperty(int propIdx) const +{ + double propValue = 0.; + if (!streams[CV_IR_STREAM].isValid()) + return propValue; + + openni::VideoMode mode; + switch (propIdx) + { + case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT: + propValue = streams[CV_IR_STREAM].isValid(); + break; + case CV_CAP_PROP_FRAME_WIDTH: + propValue = streams[CV_IR_STREAM].getVideoMode().getResolutionX(); + break; + case CV_CAP_PROP_FRAME_HEIGHT: + propValue = streams[CV_IR_STREAM].getVideoMode().getResolutionY(); + break; + case CV_CAP_PROP_FPS: + propValue = streams[CV_IR_STREAM].getVideoMode().getFps(); + break; + case CV_CAP_PROP_POS_MSEC: + propValue = (double)streamFrames[CV_IR_STREAM].getTimestamp(); + break; + case CV_CAP_PROP_POS_FRAMES: + propValue = (double)streamFrames[CV_IR_STREAM].getFrameIndex(); + break; + default: + CV_Error(CV_StsBadArg, cv::format("Image generator does not support such parameter (propIdx=%d) for getting.\n", propIdx)); + } + + return propValue; +} + +bool CvCapture_OpenNI2::setIrGeneratorProperty(int propIdx, double propValue) +{ + bool isSet = false; + + switch (propIdx) + { + case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT: + if (isContextOpened) + isSet = toggleStream(CV_IR_STREAM, propValue > 0.0) == openni::STATUS_OK; + break; + case CV_CAP_PROP_OPENNI_OUTPUT_MODE: + { + if (!streams[CV_IR_STREAM].isValid()) + return isSet; + openni::VideoMode mode = streams[CV_IR_STREAM].getVideoMode(); + + switch (cvRound(propValue)) + { + case CV_CAP_OPENNI_VGA_30HZ: + mode.setResolution(640, 480); + mode.setFps(30); + break; + case CV_CAP_OPENNI_SXGA_15HZ: + mode.setResolution(1280, 960); + mode.setFps(15); + break; + case CV_CAP_OPENNI_SXGA_30HZ: + mode.setResolution(1280, 960); + mode.setFps(30); + break; + case CV_CAP_OPENNI_QVGA_30HZ: + mode.setResolution(320, 240); + mode.setFps(30); + break; + case CV_CAP_OPENNI_QVGA_60HZ: + mode.setResolution(320, 240); + mode.setFps(60); + break; + default: + CV_Error(CV_StsBadArg, "Unsupported image generator output mode.\n"); + } + + openni::Status status = streams[CV_IR_STREAM].setVideoMode(mode); + if (status != openni::STATUS_OK) + CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::setImageGeneratorProperty : %s\n", openni::OpenNI::getExtendedError())); + else + isSet = true; + break; + } + default: + CV_Error(CV_StsBadArg, cv::format("Image generator does not support such parameter (propIdx=%d) for setting.\n", propIdx)); + } + + return isSet; +} + bool CvCapture_OpenNI2::grabFrame() { if( !isOpened() ) @@ -710,14 +834,22 @@ bool CvCapture_OpenNI2::grabFrame() bool isGrabbed = false; - openni::Status status = openni::OpenNI::waitForAnyStream(streams, numStream, ¤tStream, CV_STREAM_TIMEOUT); + int numActiveStreams = 0; + openni::VideoStream* streamPtrs[CV_MAX_NUM_STREAMS]; + for (int i = 0; i < CV_MAX_NUM_STREAMS; ++i) { + streamPtrs[numActiveStreams++] = &streams[i]; + } + + int currentStream; + openni::Status status = openni::OpenNI::waitForAnyStream(streamPtrs, numActiveStreams, ¤tStream, CV_STREAM_TIMEOUT); if( status != openni::STATUS_OK ) return false; - if( depth.isValid() ) - depth.readFrame(&depthFrame); - if (color.isValid()) - color.readFrame(&colorFrame); + for (int i = 0; i < CV_MAX_NUM_STREAMS; ++i) + { + if (streams[i].isValid()) + streams[i].readFrame(&streamFrames[i]); + } isGrabbed = true; return isGrabbed; @@ -736,25 +868,25 @@ inline void getDepthMapFromMetaData(const openni::VideoFrameRef& depthMetaData, IplImage* CvCapture_OpenNI2::retrieveDepthMap() { - if( !depth.isValid() ) + if( !streamFrames[CV_DEPTH_STREAM].isValid() ) return 0; - getDepthMapFromMetaData( depthFrame, outputMaps[CV_CAP_OPENNI_DEPTH_MAP].mat, noSampleValue, shadowValue ); + getDepthMapFromMetaData(streamFrames[CV_DEPTH_STREAM], outputMaps[CV_CAP_OPENNI_DEPTH_MAP].mat, noSampleValue, shadowValue ); return outputMaps[CV_CAP_OPENNI_DEPTH_MAP].getIplImagePtr(); } IplImage* CvCapture_OpenNI2::retrievePointCloudMap() { - if( !depthFrame.isValid() ) + if( !streamFrames[CV_DEPTH_STREAM].isValid() ) return 0; cv::Mat depthImg; - getDepthMapFromMetaData(depthFrame, depthImg, noSampleValue, shadowValue); + getDepthMapFromMetaData(streamFrames[CV_DEPTH_STREAM], depthImg, noSampleValue, shadowValue); const int badPoint = INVALID_PIXEL_VAL; const float badCoord = INVALID_COORDINATE_VAL; - int cols = depthFrame.getWidth(), rows = depthFrame.getHeight(); + int cols = streamFrames[CV_DEPTH_STREAM].getWidth(), rows = streamFrames[CV_DEPTH_STREAM].getHeight(); cv::Mat pointCloud_XYZ( rows, cols, CV_32FC3, cv::Scalar::all(badPoint) ); float worldX, worldY, worldZ; @@ -762,7 +894,7 @@ IplImage* CvCapture_OpenNI2::retrievePointCloudMap() { for (int x = 0; x < cols; x++) { - openni::CoordinateConverter::convertDepthToWorld(depth, x, y, depthImg.at(y, x), &worldX, &worldY, &worldZ); + openni::CoordinateConverter::convertDepthToWorld(streams[CV_DEPTH_STREAM], x, y, depthImg.at(y, x), &worldX, &worldY, &worldZ); if (depthImg.at(y, x) == badPoint) // not valid pointCloud_XYZ.at(y, x) = cv::Point3f(badCoord, badCoord, badCoord); @@ -803,11 +935,11 @@ static void computeDisparity_32F( const openni::VideoFrameRef& depthMetaData, cv IplImage* CvCapture_OpenNI2::retrieveDisparityMap() { - if (!depthFrame.isValid()) + if (!streamFrames[CV_DEPTH_STREAM].isValid()) return 0; cv::Mat disp32; - computeDisparity_32F(depthFrame, disp32, baseline, depthFocalLength_VGA, noSampleValue, shadowValue); + computeDisparity_32F(streamFrames[CV_DEPTH_STREAM], disp32, baseline, depthFocalLength_VGA, noSampleValue, shadowValue); disp32.convertTo( outputMaps[CV_CAP_OPENNI_DISPARITY_MAP].mat, CV_8UC1 ); @@ -816,21 +948,21 @@ IplImage* CvCapture_OpenNI2::retrieveDisparityMap() IplImage* CvCapture_OpenNI2::retrieveDisparityMap_32F() { - if (!depthFrame.isValid()) + if (!streamFrames[CV_DEPTH_STREAM].isValid()) return 0; - computeDisparity_32F(depthFrame, outputMaps[CV_CAP_OPENNI_DISPARITY_MAP_32F].mat, baseline, depthFocalLength_VGA, noSampleValue, shadowValue); + computeDisparity_32F(streamFrames[CV_DEPTH_STREAM], outputMaps[CV_CAP_OPENNI_DISPARITY_MAP_32F].mat, baseline, depthFocalLength_VGA, noSampleValue, shadowValue); return outputMaps[CV_CAP_OPENNI_DISPARITY_MAP_32F].getIplImagePtr(); } IplImage* CvCapture_OpenNI2::retrieveValidDepthMask() { - if (!depthFrame.isValid()) + if (!streamFrames[CV_DEPTH_STREAM].isValid()) return 0; cv::Mat d; - getDepthMapFromMetaData(depthFrame, d, noSampleValue, shadowValue); + getDepthMapFromMetaData(streamFrames[CV_DEPTH_STREAM], d, noSampleValue, shadowValue); outputMaps[CV_CAP_OPENNI_VALID_DEPTH_MASK].mat = d != CvCapture_OpenNI2::INVALID_PIXEL_VAL; @@ -850,30 +982,58 @@ inline void getBGRImageFromMetaData( const openni::VideoFrameRef& imageMetaData, cv::cvtColor(bufferImage, bgrImage, cv::COLOR_RGB2BGR); } +inline void getGrayImageFromMetaData(const openni::VideoFrameRef& imageMetaData, cv::Mat& grayImage) +{ + if (imageMetaData.getVideoMode().getPixelFormat() == openni::PIXEL_FORMAT_GRAY8) + { + grayImage.create(imageMetaData.getHeight(), imageMetaData.getWidth(), CV_8UC1); + grayImage.data = (uchar*)imageMetaData.getData(); + } + else if (imageMetaData.getVideoMode().getPixelFormat() == openni::PIXEL_FORMAT_GRAY16) + { + grayImage.create(imageMetaData.getHeight(), imageMetaData.getWidth(), CV_16UC1); + grayImage.data = (uchar*)imageMetaData.getData(); + } + else + { + CV_Error(CV_StsUnsupportedFormat, "Unsupported format of grabbed image\n"); + } +} + IplImage* CvCapture_OpenNI2::retrieveBGRImage() { - if( !color.isValid() ) + if( !streamFrames[CV_COLOR_STREAM].isValid() ) return 0; - getBGRImageFromMetaData( colorFrame, outputMaps[CV_CAP_OPENNI_BGR_IMAGE].mat ); + getBGRImageFromMetaData(streamFrames[CV_COLOR_STREAM], outputMaps[CV_CAP_OPENNI_BGR_IMAGE].mat ); return outputMaps[CV_CAP_OPENNI_BGR_IMAGE].getIplImagePtr(); } IplImage* CvCapture_OpenNI2::retrieveGrayImage() { - if (!colorFrame.isValid()) + if (!streamFrames[CV_COLOR_STREAM].isValid()) return 0; - CV_Assert(colorFrame.getVideoMode().getPixelFormat() == openni::PIXEL_FORMAT_RGB888); // RGB + CV_Assert(streamFrames[CV_COLOR_STREAM].getVideoMode().getPixelFormat() == openni::PIXEL_FORMAT_RGB888); // RGB cv::Mat rgbImage; - getBGRImageFromMetaData(colorFrame, rgbImage); + getBGRImageFromMetaData(streamFrames[CV_COLOR_STREAM], rgbImage); cv::cvtColor( rgbImage, outputMaps[CV_CAP_OPENNI_GRAY_IMAGE].mat, CV_BGR2GRAY ); return outputMaps[CV_CAP_OPENNI_GRAY_IMAGE].getIplImagePtr(); } +IplImage* CvCapture_OpenNI2::retrieveIrImage() +{ + if (!streamFrames[CV_IR_STREAM].isValid()) + return 0; + + getGrayImageFromMetaData(streamFrames[CV_IR_STREAM], outputMaps[CV_CAP_OPENNI_IR_IMAGE].mat); + + return outputMaps[CV_CAP_OPENNI_IR_IMAGE].getIplImagePtr(); +} + IplImage* CvCapture_OpenNI2::retrieveFrame( int outputType ) { IplImage* image = 0; @@ -907,6 +1067,10 @@ IplImage* CvCapture_OpenNI2::retrieveFrame( int outputType ) { image = retrieveGrayImage(); } + else if( outputType == CV_CAP_OPENNI_IR_IMAGE ) + { + image = retrieveIrImage(); + } return image; } diff --git a/samples/cpp/openni_capture.cpp b/samples/cpp/openni_capture.cpp index 70d4a7c610..f071ff66ea 100644 --- a/samples/cpp/openni_capture.cpp +++ b/samples/cpp/openni_capture.cpp @@ -21,6 +21,8 @@ static void help() "2.) Data given from RGB image generator\n" " CAP_OPENNI_BGR_IMAGE - color image (CV_8UC3)\n" " CAP_OPENNI_GRAY_IMAGE - gray image (CV_8UC1)\n" + "2.) Data given from IR image generator\n" + " CAP_OPENNI_IR_IMAGE - gray image (CV_16UC1)\n" << endl; } @@ -92,8 +94,8 @@ static void printCommandLineParams() cout << "-mode= image mode: resolution and fps, supported three values: 0 - CAP_OPENNI_VGA_30HZ, 1 - CAP_OPENNI_SXGA_15HZ," << endl; cout << " 2 - CAP_OPENNI_SXGA_30HZ (0 by default). Ignored if rgb image or gray image are not selected to show." << endl; cout << "-m= Mask to set which output images are need. It is a string of size 5. Each element of this is '0' or '1' and" << endl; - cout << " determine: is depth map, disparity map, valid pixels mask, rgb image, gray image need or not (correspondently)?" << endl ; - cout << " By default -m=01010 i.e. disparity map and rgb image will be shown." << endl ; + cout << " determine: is depth map, disparity map, valid pixels mask, rgb image, gray image need or not (correspondently), ir image" << endl ; + cout << " By default -m=010100 i.e. disparity map and rgb image will be shown." << endl ; cout << "-r= Filename of .oni video file. The data will grabbed from it." << endl ; } @@ -101,7 +103,7 @@ static void parseCommandLine( int argc, char* argv[], bool& isColorizeDisp, bool string& filename, bool& isFileReading ) { filename.clear(); - cv::CommandLineParser parser(argc, argv, "{h help||}{cd|1|}{fmd|0|}{mode|0|}{m|01010|}{r||}"); + cv::CommandLineParser parser(argc, argv, "{h help||}{cd|1|}{fmd|0|}{mode|-1|}{m|010100|}{r||}"); if (parser.has("h")) { help(); @@ -121,14 +123,14 @@ static void parseCommandLine( int argc, char* argv[], bool& isColorizeDisp, bool help(); exit(-1); } - if (flags % 100000 == 0) + if (flags % 1000000 == 0) { cout << "No one output image is selected." << endl; exit(0); } - for (int i = 0; i < 5; i++) + for (int i = 0; i < 6; i++) { - retrievedImageFlags[4 - i] = (flags % 10 != 0); + retrievedImageFlags[5 - i] = (flags % 10 != 0); flags /= 10; } } @@ -141,7 +143,7 @@ int main( int argc, char* argv[] ) { bool isColorizeDisp, isFixedMaxDisp; int imageMode; - bool retrievedImageFlags[5]; + bool retrievedImageFlags[6]; string filename; bool isVideoReading; parseCommandLine( argc, argv, isColorizeDisp, isFixedMaxDisp, imageMode, retrievedImageFlags, filename, isVideoReading ); @@ -165,7 +167,7 @@ int main( int argc, char* argv[] ) return -1; } - if( !isVideoReading ) + if( !isVideoReading && imageMode >= 0 ) { bool modeRes=false; switch ( imageMode ) @@ -193,13 +195,35 @@ int main( int argc, char* argv[] ) cout << "\nThis image mode is not supported by the device, the default value (CV_CAP_OPENNI_SXGA_15HZ) will be used.\n" << endl; } + // turn on depth, color and IR if needed + if (retrievedImageFlags[0] || retrievedImageFlags[1] || retrievedImageFlags[2]) + capture.set(CAP_OPENNI_DEPTH_GENERATOR_PRESENT, true); + else + capture.set(CAP_OPENNI_DEPTH_GENERATOR_PRESENT, false); + if (retrievedImageFlags[3] || retrievedImageFlags[4]) + capture.set(CAP_OPENNI_IMAGE_GENERATOR_PRESENT, true); + else + capture.set(CAP_OPENNI_IMAGE_GENERATOR_PRESENT, false); + if (retrievedImageFlags[5]) + capture.set(CAP_OPENNI_IR_GENERATOR_PRESENT, true); + else + capture.set(CAP_OPENNI_IR_GENERATOR_PRESENT, false); + // Print some avalible device settings. - cout << "\nDepth generator output mode:" << endl << - "FRAME_WIDTH " << capture.get( CAP_PROP_FRAME_WIDTH ) << endl << - "FRAME_HEIGHT " << capture.get( CAP_PROP_FRAME_HEIGHT ) << endl << - "FRAME_MAX_DEPTH " << capture.get( CAP_PROP_OPENNI_FRAME_MAX_DEPTH ) << " mm" << endl << - "FPS " << capture.get( CAP_PROP_FPS ) << endl << - "REGISTRATION " << capture.get( CAP_PROP_OPENNI_REGISTRATION ) << endl; + if (capture.get(CAP_OPENNI_DEPTH_GENERATOR_PRESENT)) + { + cout << "\nDepth generator output mode:" << endl << + "FRAME_WIDTH " << capture.get(CAP_PROP_FRAME_WIDTH) << endl << + "FRAME_HEIGHT " << capture.get(CAP_PROP_FRAME_HEIGHT) << endl << + "FRAME_MAX_DEPTH " << capture.get(CAP_PROP_OPENNI_FRAME_MAX_DEPTH) << " mm" << endl << + "FPS " << capture.get(CAP_PROP_FPS) << endl << + "REGISTRATION " << capture.get(CAP_PROP_OPENNI_REGISTRATION) << endl; + } + else + { + cout << "\nDevice doesn't contain depth generator or it is not selected." << endl; + } + if( capture.get( CAP_OPENNI_IMAGE_GENERATOR_PRESENT ) ) { cout << @@ -210,9 +234,20 @@ int main( int argc, char* argv[] ) } else { - cout << "\nDevice doesn't contain image generator." << endl; - if (!retrievedImageFlags[0] && !retrievedImageFlags[1] && !retrievedImageFlags[2]) - return 0; + cout << "\nDevice doesn't contain image generator or it is not selected." << endl; + } + + if( capture.get(CAP_OPENNI_IR_GENERATOR_PRESENT) ) + { + cout << + "\nIR generator output mode:" << endl << + "FRAME_WIDTH " << capture.get(CAP_OPENNI_IR_GENERATOR + CAP_PROP_FRAME_WIDTH) << endl << + "FRAME_HEIGHT " << capture.get(CAP_OPENNI_IR_GENERATOR + CAP_PROP_FRAME_HEIGHT) << endl << + "FPS " << capture.get(CAP_OPENNI_IR_GENERATOR + CAP_PROP_FPS) << endl; + } + else + { + cout << "\nDevice doesn't contain IR generator or it is not selected." << endl; } for(;;) @@ -222,6 +257,7 @@ int main( int argc, char* argv[] ) Mat disparityMap; Mat bgrImage; Mat grayImage; + Mat irImage; if( !capture.grab() ) { @@ -261,6 +297,13 @@ int main( int argc, char* argv[] ) if( retrievedImageFlags[4] && capture.retrieve( grayImage, CAP_OPENNI_GRAY_IMAGE ) ) imshow( "gray image", grayImage ); + + if( retrievedImageFlags[5] && capture.retrieve( irImage, CAP_OPENNI_IR_IMAGE ) ) + { + Mat ir8; + irImage.convertTo(ir8, CV_8U, 256.0 / 3500, 0.0); + imshow("IR image", ir8); + } } if( waitKey( 30 ) >= 0 )