Merge pull request #6619 from Slonegg:openni2_ir_stream

pull/6736/head
Alexander Alekhin 9 years ago
commit 6b511751f5
  1. 14
      modules/videoio/include/opencv2/videoio.hpp
  2. 10
      modules/videoio/include/opencv2/videoio/videoio_c.h
  3. 466
      modules/videoio/src/cap_openni2.cpp
  4. 77
      samples/cpp/openni_capture.cpp

@ -162,7 +162,8 @@ enum { CAP_PROP_DC1394_OFF = -4, //turn the feature off (not cont
// OpenNI map generators // OpenNI map generators
enum { CAP_OPENNI_DEPTH_GENERATOR = 1 << 31, enum { CAP_OPENNI_DEPTH_GENERATOR = 1 << 31,
CAP_OPENNI_IMAGE_GENERATOR = 1 << 30, CAP_OPENNI_IMAGE_GENERATOR = 1 << 30,
CAP_OPENNI_GENERATORS_MASK = CAP_OPENNI_DEPTH_GENERATOR + CAP_OPENNI_IMAGE_GENERATOR CAP_OPENNI_IR_GENERATOR = 1 << 29,
CAP_OPENNI_GENERATORS_MASK = CAP_OPENNI_DEPTH_GENERATOR + CAP_OPENNI_IMAGE_GENERATOR + CAP_OPENNI_IR_GENERATOR
}; };
// Properties of cameras available through OpenNI interfaces // Properties of cameras available through OpenNI interfaces
@ -183,13 +184,15 @@ enum { CAP_PROP_OPENNI_OUTPUT_MODE = 100,
CAP_PROP_OPENNI2_MIRROR = 111 CAP_PROP_OPENNI2_MIRROR = 111
}; };
// OpenNI shortcats // OpenNI shortcuts
enum { CAP_OPENNI_IMAGE_GENERATOR_PRESENT = CAP_OPENNI_IMAGE_GENERATOR + CAP_PROP_OPENNI_GENERATOR_PRESENT, enum { CAP_OPENNI_IMAGE_GENERATOR_PRESENT = CAP_OPENNI_IMAGE_GENERATOR + CAP_PROP_OPENNI_GENERATOR_PRESENT,
CAP_OPENNI_IMAGE_GENERATOR_OUTPUT_MODE = CAP_OPENNI_IMAGE_GENERATOR + CAP_PROP_OPENNI_OUTPUT_MODE, CAP_OPENNI_IMAGE_GENERATOR_OUTPUT_MODE = CAP_OPENNI_IMAGE_GENERATOR + CAP_PROP_OPENNI_OUTPUT_MODE,
CAP_OPENNI_DEPTH_GENERATOR_PRESENT = CAP_OPENNI_DEPTH_GENERATOR + CAP_PROP_OPENNI_GENERATOR_PRESENT,
CAP_OPENNI_DEPTH_GENERATOR_BASELINE = CAP_OPENNI_DEPTH_GENERATOR + CAP_PROP_OPENNI_BASELINE, CAP_OPENNI_DEPTH_GENERATOR_BASELINE = CAP_OPENNI_DEPTH_GENERATOR + CAP_PROP_OPENNI_BASELINE,
CAP_OPENNI_DEPTH_GENERATOR_FOCAL_LENGTH = CAP_OPENNI_DEPTH_GENERATOR + CAP_PROP_OPENNI_FOCAL_LENGTH, CAP_OPENNI_DEPTH_GENERATOR_FOCAL_LENGTH = CAP_OPENNI_DEPTH_GENERATOR + CAP_PROP_OPENNI_FOCAL_LENGTH,
CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION = CAP_OPENNI_DEPTH_GENERATOR + CAP_PROP_OPENNI_REGISTRATION, CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION = CAP_OPENNI_DEPTH_GENERATOR + CAP_PROP_OPENNI_REGISTRATION,
CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION_ON = CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION_ON = CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION,
CAP_OPENNI_IR_GENERATOR_PRESENT = CAP_OPENNI_IR_GENERATOR + CAP_PROP_OPENNI_GENERATOR_PRESENT,
}; };
// OpenNI data given from depth generator // OpenNI data given from depth generator
@ -201,7 +204,10 @@ enum { CAP_OPENNI_DEPTH_MAP = 0, // Depth values in mm (CV_16UC1)
// Data given from RGB image generator // Data given from RGB image generator
CAP_OPENNI_BGR_IMAGE = 5, CAP_OPENNI_BGR_IMAGE = 5,
CAP_OPENNI_GRAY_IMAGE = 6 CAP_OPENNI_GRAY_IMAGE = 6,
// Data given from IR image generator
CAP_OPENNI_IR_IMAGE = 7
}; };
// Supported output modes of OpenNI image generator // Supported output modes of OpenNI image generator

@ -200,7 +200,8 @@ enum
// OpenNI map generators // OpenNI map generators
CV_CAP_OPENNI_DEPTH_GENERATOR = 1 << 31, CV_CAP_OPENNI_DEPTH_GENERATOR = 1 << 31,
CV_CAP_OPENNI_IMAGE_GENERATOR = 1 << 30, CV_CAP_OPENNI_IMAGE_GENERATOR = 1 << 30,
CV_CAP_OPENNI_GENERATORS_MASK = CV_CAP_OPENNI_DEPTH_GENERATOR + CV_CAP_OPENNI_IMAGE_GENERATOR, CV_CAP_OPENNI_IR_GENERATOR = 1 << 29,
CV_CAP_OPENNI_GENERATORS_MASK = CV_CAP_OPENNI_DEPTH_GENERATOR + CV_CAP_OPENNI_IMAGE_GENERATOR + CV_CAP_OPENNI_IR_GENERATOR,
// Properties of cameras available through OpenNI interfaces // Properties of cameras available through OpenNI interfaces
CV_CAP_PROP_OPENNI_OUTPUT_MODE = 100, CV_CAP_PROP_OPENNI_OUTPUT_MODE = 100,
@ -222,10 +223,12 @@ enum
CV_CAP_OPENNI_IMAGE_GENERATOR_PRESENT = CV_CAP_OPENNI_IMAGE_GENERATOR + CV_CAP_PROP_OPENNI_GENERATOR_PRESENT, CV_CAP_OPENNI_IMAGE_GENERATOR_PRESENT = CV_CAP_OPENNI_IMAGE_GENERATOR + CV_CAP_PROP_OPENNI_GENERATOR_PRESENT,
CV_CAP_OPENNI_IMAGE_GENERATOR_OUTPUT_MODE = CV_CAP_OPENNI_IMAGE_GENERATOR + CV_CAP_PROP_OPENNI_OUTPUT_MODE, CV_CAP_OPENNI_IMAGE_GENERATOR_OUTPUT_MODE = CV_CAP_OPENNI_IMAGE_GENERATOR + CV_CAP_PROP_OPENNI_OUTPUT_MODE,
CV_CAP_OPENNI_DEPTH_GENERATOR_PRESENT = CV_CAP_OPENNI_DEPTH_GENERATOR + CV_CAP_PROP_OPENNI_GENERATOR_PRESENT,
CV_CAP_OPENNI_DEPTH_GENERATOR_BASELINE = CV_CAP_OPENNI_DEPTH_GENERATOR + CV_CAP_PROP_OPENNI_BASELINE, CV_CAP_OPENNI_DEPTH_GENERATOR_BASELINE = CV_CAP_OPENNI_DEPTH_GENERATOR + CV_CAP_PROP_OPENNI_BASELINE,
CV_CAP_OPENNI_DEPTH_GENERATOR_FOCAL_LENGTH = CV_CAP_OPENNI_DEPTH_GENERATOR + CV_CAP_PROP_OPENNI_FOCAL_LENGTH, CV_CAP_OPENNI_DEPTH_GENERATOR_FOCAL_LENGTH = CV_CAP_OPENNI_DEPTH_GENERATOR + CV_CAP_PROP_OPENNI_FOCAL_LENGTH,
CV_CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION = CV_CAP_OPENNI_DEPTH_GENERATOR + CV_CAP_PROP_OPENNI_REGISTRATION, CV_CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION = CV_CAP_OPENNI_DEPTH_GENERATOR + CV_CAP_PROP_OPENNI_REGISTRATION,
CV_CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION_ON = CV_CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION, CV_CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION_ON = CV_CAP_OPENNI_DEPTH_GENERATOR_REGISTRATION,
CV_CAP_OPENNI_IR_GENERATOR_PRESENT = CV_CAP_OPENNI_IR_GENERATOR + CV_CAP_PROP_OPENNI_GENERATOR_PRESENT,
// Properties of cameras available through GStreamer interface // Properties of cameras available through GStreamer interface
CV_CAP_GSTREAMER_QUEUE_LENGTH = 200, // default is 1 CV_CAP_GSTREAMER_QUEUE_LENGTH = 200, // default is 1
@ -454,7 +457,10 @@ enum
// Data given from RGB image generator. // Data given from RGB image generator.
CV_CAP_OPENNI_BGR_IMAGE = 5, CV_CAP_OPENNI_BGR_IMAGE = 5,
CV_CAP_OPENNI_GRAY_IMAGE = 6 CV_CAP_OPENNI_GRAY_IMAGE = 6,
// Data given from IR image generator.
CV_CAP_OPENNI_IR_IMAGE = 7
}; };
// Supported output modes of OpenNI image generator // Supported output modes of OpenNI image generator

@ -65,8 +65,10 @@
#define CV_STREAM_TIMEOUT 2000 #define CV_STREAM_TIMEOUT 2000
#define CV_DEPTH_STREAM 0 #define CV_DEPTH_STREAM 0
#define CV_COLOR_STREAM 1 #define CV_COLOR_STREAM 1
#define CV_IR_STREAM 2
#define CV_MAX_NUM_STREAMS 3
#include "OpenNI.h" #include "OpenNI.h"
#include "PS1080.h" #include "PS1080.h"
@ -109,10 +111,9 @@ protected:
IplImage iplHeader; IplImage iplHeader;
}; };
static const int outputMapsTypesCount = 7; static const int outputMapsTypesCount = 8;
static openni::VideoMode defaultColorOutputMode(); static openni::VideoMode defaultStreamOutputMode(int stream);
static openni::VideoMode defaultDepthOutputMode();
IplImage* retrieveDepthMap(); IplImage* retrieveDepthMap();
IplImage* retrievePointCloudMap(); IplImage* retrievePointCloudMap();
@ -121,13 +122,17 @@ protected:
IplImage* retrieveValidDepthMask(); IplImage* retrieveValidDepthMask();
IplImage* retrieveBGRImage(); IplImage* retrieveBGRImage();
IplImage* retrieveGrayImage(); IplImage* retrieveGrayImage();
IplImage* retrieveIrImage();
openni::Status toggleStream(int stream, bool toggle);
bool readCamerasParams(); bool readCamerasParams();
double getDepthGeneratorProperty(int propIdx) const; double getDepthGeneratorProperty(int propIdx) const;
bool setDepthGeneratorProperty(int propIdx, double propVal); bool setDepthGeneratorProperty(int propIdx, double propVal);
double getImageGeneratorProperty(int propIdx) const; double getImageGeneratorProperty(int propIdx) const;
bool setImageGeneratorProperty(int propIdx, double propVal); bool setImageGeneratorProperty(int propIdx, double propVal);
double getIrGeneratorProperty(int propIdx) const;
bool setIrGeneratorProperty(int propIdx, double propVal);
double getCommonProperty(int propIdx) const; double getCommonProperty(int propIdx) const;
bool setCommonProperty(int propIdx, double propVal); bool setCommonProperty(int propIdx, double propVal);
@ -137,9 +142,9 @@ protected:
openni::Recorder recorder; openni::Recorder recorder;
// Data generators with its metadata // Data generators with its metadata
openni::VideoStream depth, color, **streams; openni::VideoStream streams[CV_MAX_NUM_STREAMS];
openni::VideoFrameRef depthFrame, colorFrame; openni::VideoFrameRef streamFrames[CV_MAX_NUM_STREAMS];
cv::Mat depthImage, colorImage; cv::Mat streamImages[CV_MAX_NUM_STREAMS];
int maxBufferSize, maxTimeDuration; // for approx sync int maxBufferSize, maxTimeDuration; // for approx sync
bool isCircleBuffer; bool isCircleBuffer;
@ -157,9 +162,6 @@ protected:
// The value for pixels without a valid disparity measurement // The value for pixels without a valid disparity measurement
int noSampleValue; int noSampleValue;
int currentStream;
int numStream;
std::vector<OutputMap> outputMaps; std::vector<OutputMap> outputMaps;
}; };
@ -177,27 +179,28 @@ bool CvCapture_OpenNI2::isOpened() const
return isContextOpened; return isContextOpened;
} }
openni::VideoMode CvCapture_OpenNI2::defaultColorOutputMode() openni::VideoMode CvCapture_OpenNI2::defaultStreamOutputMode(int stream)
{
openni::VideoMode mode;
mode.setResolution(640, 480);
mode.setFps(30);
mode.setPixelFormat(openni::PIXEL_FORMAT_RGB888);
return mode;
}
openni::VideoMode CvCapture_OpenNI2::defaultDepthOutputMode()
{ {
openni::VideoMode mode; openni::VideoMode mode;
mode.setResolution(640, 480); mode.setResolution(640, 480);
mode.setFps(30); mode.setFps(30);
mode.setPixelFormat(openni::PIXEL_FORMAT_DEPTH_1_MM); switch (stream)
{
case CV_DEPTH_STREAM:
mode.setPixelFormat(openni::PIXEL_FORMAT_DEPTH_1_MM);
break;
case CV_COLOR_STREAM:
mode.setPixelFormat(openni::PIXEL_FORMAT_RGB888);
break;
case CV_IR_STREAM:
mode.setPixelFormat(openni::PIXEL_FORMAT_GRAY16);
break;
}
return mode; return mode;
} }
CvCapture_OpenNI2::CvCapture_OpenNI2( int index ) CvCapture_OpenNI2::CvCapture_OpenNI2( int index )
{ {
numStream = 2;
const char* deviceURI = openni::ANY_DEVICE; const char* deviceURI = openni::ANY_DEVICE;
openni::Status status; openni::Status status;
int deviceType = DEVICE_DEFAULT; int deviceType = DEVICE_DEFAULT;
@ -215,13 +218,6 @@ CvCapture_OpenNI2::CvCapture_OpenNI2( int index )
index %= 10; index %= 10;
} }
// Asus XTION and Occipital Structure Sensor do not have an image generator
if (deviceType == DEVICE_ASUS_XTION)
numStream = 1;
if( deviceType > DEVICE_MAX )
return;
// Initialize and configure the context. // Initialize and configure the context.
status = openni::OpenNI::initialize(); status = openni::OpenNI::initialize();
@ -247,83 +243,98 @@ CvCapture_OpenNI2::CvCapture_OpenNI2( int index )
return; return;
} }
//device.setDepthColorSyncEnabled(true); status = toggleStream(CV_DEPTH_STREAM, true);
// Asus XTION and Occipital Structure Sensor do not have an image generator
if (deviceType != DEVICE_ASUS_XTION)
status = depth.create(device, openni::SENSOR_DEPTH); status = openni::Status(status | toggleStream(CV_COLOR_STREAM, true));
if (status == openni::STATUS_OK) if (status != openni::STATUS_OK)
{ {
if (depth.isValid()) openni::OpenNI::shutdown();
{ return;
CV_Assert(depth.setVideoMode(defaultDepthOutputMode()) == openni::STATUS_OK); // xn::DepthGenerator supports VGA only! (Jan 2011)
}
status = depth.start();
if (status != openni::STATUS_OK)
{
CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Couldn't start depth stream: %s\n", openni::OpenNI::getExtendedError()));
depth.destroy();
return;
}
} }
else
if (!readCamerasParams())
{ {
CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Couldn't find depth stream:: %s\n", openni::OpenNI::getExtendedError())); CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Could not read cameras parameters\n"));
return; return;
} }
streams = new openni::VideoStream*[numStream];
streams[CV_DEPTH_STREAM] = &depth;
// create a color object outputMaps.resize( outputMapsTypesCount );
status = color.create(device, openni::SENSOR_COLOR);
if (status == openni::STATUS_OK) isContextOpened = true;
setProperty(CV_CAP_PROP_OPENNI_REGISTRATION, 1.0);
}
openni::Status CvCapture_OpenNI2::toggleStream(int stream, bool toggle)
{
openni::Status status;
// for logging
static const char* stream_names[CV_MAX_NUM_STREAMS] = {
"depth",
"color",
"IR"
};
static const openni::SensorType stream_sensor_types[CV_MAX_NUM_STREAMS] = {
openni::SENSOR_DEPTH,
openni::SENSOR_COLOR,
openni::SENSOR_IR
};
if (toggle) // want to open stream
{ {
// Set map output mode. // already opened
if (color.isValid()) if (streams[stream].isValid())
return openni::STATUS_OK;
// open stream
status = streams[stream].create(device, stream_sensor_types[stream]);
if (status == openni::STATUS_OK)
{ {
CV_Assert(color.setVideoMode(defaultColorOutputMode()) == openni::STATUS_OK); // set video mode
status = streams[stream].setVideoMode(defaultStreamOutputMode(stream)); // xn::DepthGenerator supports VGA only! (Jan 2011)
if (status != openni::STATUS_OK)
{
CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Couldn't set %s stream output mode: %s\n",
stream_names[stream],
openni::OpenNI::getExtendedError()));
streams[stream].destroy();
return status;
}
// start stream
status = streams[stream].start();
if (status != openni::STATUS_OK)
{
CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Couldn't start %s stream: %s\n",
stream_names[stream],
openni::OpenNI::getExtendedError()));
streams[stream].destroy();
return status;
}
} }
status = color.start(); else
if (status != openni::STATUS_OK)
{ {
CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Couldn't start color stream: %s\n", openni::OpenNI::getExtendedError())); CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Couldn't find %s stream:: %s\n",
color.destroy(); stream_names[stream],
return; openni::OpenNI::getExtendedError()));
return status;
} }
streams[CV_COLOR_STREAM] = &color;
} }
else if (numStream == 2) else if (streams[stream].isValid()) // want to close stream
{ {
CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Couldn't find color stream: %s\n", openni::OpenNI::getExtendedError())); streams[stream].stop();
return; streams[stream].destroy();
} }
if( !readCamerasParams() ) return openni::STATUS_OK;
{
CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Could not read cameras parameters\n"));
return;
}
// if( deviceType == DEVICE_ASUS_XTION )
// {
// //ps/asus specific
// imageGenerator.SetIntProperty("InputFormat", 1 /*XN_IO_IMAGE_FORMAT_YUV422*/);
// imageGenerator.SetPixelFormat(XN_PIXEL_FORMAT_RGB24);
// depthGenerator.SetIntProperty("RegistrationType", 1 /*XN_PROCESSING_HARDWARE*/);
// }
outputMaps.resize( outputMapsTypesCount );
isContextOpened = true;
setProperty(CV_CAP_PROP_OPENNI_REGISTRATION, 1.0);
} }
CvCapture_OpenNI2::CvCapture_OpenNI2(const char * filename) CvCapture_OpenNI2::CvCapture_OpenNI2(const char * filename)
{ {
numStream = 2;
openni::Status status; openni::Status status;
isContextOpened = false; isContextOpened = false;
@ -348,6 +359,13 @@ CvCapture_OpenNI2::CvCapture_OpenNI2(const char * filename)
return; return;
} }
status = openni::Status(toggleStream(CV_DEPTH_STREAM, true) | toggleStream(CV_COLOR_STREAM, true));
if (status != openni::STATUS_OK)
{
openni::OpenNI::shutdown();
return;
}
if( !readCamerasParams() ) if( !readCamerasParams() )
{ {
CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Could not read cameras parameters\n")); CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Could not read cameras parameters\n"));
@ -361,17 +379,20 @@ CvCapture_OpenNI2::CvCapture_OpenNI2(const char * filename)
CvCapture_OpenNI2::~CvCapture_OpenNI2() CvCapture_OpenNI2::~CvCapture_OpenNI2()
{ {
this->depthFrame.release(); for (int i = 0; i < CV_MAX_NUM_STREAMS; ++i)
this->colorFrame.release(); {
this->depth.stop(); streamFrames[i].release();
this->color.stop(); streams[i].stop();
streams[i].destroy();
}
device.close();
openni::OpenNI::shutdown(); openni::OpenNI::shutdown();
} }
bool CvCapture_OpenNI2::readCamerasParams() bool CvCapture_OpenNI2::readCamerasParams()
{ {
double pixelSize = 0; double pixelSize = 0;
if (depth.getProperty<double>(XN_STREAM_PROPERTY_ZERO_PLANE_PIXEL_SIZE, &pixelSize) != openni::STATUS_OK) if (streams[CV_DEPTH_STREAM].getProperty<double>(XN_STREAM_PROPERTY_ZERO_PLANE_PIXEL_SIZE, &pixelSize) != openni::STATUS_OK)
{ {
CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::readCamerasParams : Could not read pixel size!\n")); CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::readCamerasParams : Could not read pixel size!\n"));
return false; return false;
@ -382,13 +403,13 @@ bool CvCapture_OpenNI2::readCamerasParams()
// focal length of IR camera in pixels for VGA resolution // focal length of IR camera in pixels for VGA resolution
int zeroPlanDistance; // in mm int zeroPlanDistance; // in mm
if (depth.getProperty(XN_STREAM_PROPERTY_ZERO_PLANE_DISTANCE, &zeroPlanDistance) != openni::STATUS_OK) if (streams[CV_DEPTH_STREAM].getProperty(XN_STREAM_PROPERTY_ZERO_PLANE_DISTANCE, &zeroPlanDistance) != openni::STATUS_OK)
{ {
CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::readCamerasParams : Could not read virtual plane distance!\n")); CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::readCamerasParams : Could not read virtual plane distance!\n"));
return false; return false;
} }
if (depth.getProperty<double>(XN_STREAM_PROPERTY_EMITTER_DCMOS_DISTANCE, &baseline) != openni::STATUS_OK) if (streams[CV_DEPTH_STREAM].getProperty<double>(XN_STREAM_PROPERTY_EMITTER_DCMOS_DISTANCE, &baseline) != openni::STATUS_OK)
{ {
CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::readCamerasParams : Could not read base line!\n")); CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::readCamerasParams : Could not read base line!\n"));
return false; return false;
@ -419,6 +440,10 @@ double CvCapture_OpenNI2::getProperty( int propIdx ) const
{ {
propValue = getDepthGeneratorProperty( purePropIdx ); propValue = getDepthGeneratorProperty( purePropIdx );
} }
else if ((propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_IR_GENERATOR)
{
propValue = getIrGeneratorProperty(purePropIdx);
}
else else
{ {
propValue = getCommonProperty( purePropIdx ); propValue = getCommonProperty( purePropIdx );
@ -443,6 +468,10 @@ bool CvCapture_OpenNI2::setProperty( int propIdx, double propValue )
{ {
isSet = setDepthGeneratorProperty( purePropIdx, propValue ); isSet = setDepthGeneratorProperty( purePropIdx, propValue );
} }
else if ((propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_IR_GENERATOR)
{
isSet = setIrGeneratorProperty(purePropIdx, propValue);
}
else else
{ {
isSet = setCommonProperty( purePropIdx, propValue ); isSet = setCommonProperty( purePropIdx, propValue );
@ -458,12 +487,6 @@ double CvCapture_OpenNI2::getCommonProperty( int propIdx ) const
switch( propIdx ) switch( propIdx )
{ {
// There is a set of properties that correspond to depth generator by default
// (is they are pass without particular generator flag). Two reasons of this:
// 1) We can assume that depth generator is the main one for depth sensor.
// 2) In the initial vertions of OpenNI integration to OpenCV the value of
// flag CV_CAP_OPENNI_DEPTH_GENERATOR was 0 (it isn't zero now).
case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT :
case CV_CAP_PROP_FRAME_WIDTH : case CV_CAP_PROP_FRAME_WIDTH :
case CV_CAP_PROP_FRAME_HEIGHT : case CV_CAP_PROP_FRAME_HEIGHT :
case CV_CAP_PROP_FPS : case CV_CAP_PROP_FPS :
@ -477,7 +500,9 @@ double CvCapture_OpenNI2::getCommonProperty( int propIdx ) const
propValue = const_cast<CvCapture_OpenNI2 *>(this)->device.getDepthColorSyncEnabled(); propValue = const_cast<CvCapture_OpenNI2 *>(this)->device.getDepthColorSyncEnabled();
case CV_CAP_PROP_OPENNI2_MIRROR: case CV_CAP_PROP_OPENNI2_MIRROR:
{ {
bool isMirroring = color.getMirroringEnabled() && depth.getMirroringEnabled(); bool isMirroring = false;
for (int i = 0; i < CV_MAX_NUM_STREAMS; ++i)
isMirroring |= streams[i].getMirroringEnabled();
propValue = isMirroring ? 1.0 : 0.0; propValue = isMirroring ? 1.0 : 0.0;
break; break;
} }
@ -497,8 +522,11 @@ bool CvCapture_OpenNI2::setCommonProperty( int propIdx, double propValue )
case CV_CAP_PROP_OPENNI2_MIRROR: case CV_CAP_PROP_OPENNI2_MIRROR:
{ {
bool mirror = propValue > 0.0 ? true : false; bool mirror = propValue > 0.0 ? true : false;
isSet = color.setMirroringEnabled(mirror) == openni::STATUS_OK; for (int i = 0; i < CV_MAX_NUM_STREAMS; ++i)
isSet = depth.setMirroringEnabled(mirror) == openni::STATUS_OK; {
if (streams[i].isValid())
isSet |= streams[i].setMirroringEnabled(mirror) == openni::STATUS_OK;
}
} }
break; break;
// There is a set of properties that correspond to depth generator by default // There is a set of properties that correspond to depth generator by default
@ -509,6 +537,7 @@ bool CvCapture_OpenNI2::setCommonProperty( int propIdx, double propValue )
case CV_CAP_PROP_OPENNI2_SYNC: case CV_CAP_PROP_OPENNI2_SYNC:
isSet = device.setDepthColorSyncEnabled(propValue > 0.0) == openni::STATUS_OK; isSet = device.setDepthColorSyncEnabled(propValue > 0.0) == openni::STATUS_OK;
break; break;
default: default:
CV_Error( CV_StsBadArg, cv::format("Such parameter (propIdx=%d) isn't supported for setting.\n", propIdx) ); CV_Error( CV_StsBadArg, cv::format("Such parameter (propIdx=%d) isn't supported for setting.\n", propIdx) );
} }
@ -519,29 +548,28 @@ bool CvCapture_OpenNI2::setCommonProperty( int propIdx, double propValue )
double CvCapture_OpenNI2::getDepthGeneratorProperty( int propIdx ) const double CvCapture_OpenNI2::getDepthGeneratorProperty( int propIdx ) const
{ {
double propValue = 0; double propValue = 0;
if( !depth.isValid() ) if( !streams[CV_DEPTH_STREAM].isValid() )
return propValue; return propValue;
openni::VideoMode mode; openni::VideoMode mode;
switch( propIdx ) switch( propIdx )
{ {
case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT : case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT:
CV_DbgAssert(depth.isValid()); propValue = streams[CV_DEPTH_STREAM].isValid();
propValue = 1.;
break; break;
case CV_CAP_PROP_FRAME_WIDTH : case CV_CAP_PROP_FRAME_WIDTH :
propValue = depth.getVideoMode().getResolutionX(); propValue = streams[CV_DEPTH_STREAM].getVideoMode().getResolutionX();
break; break;
case CV_CAP_PROP_FRAME_HEIGHT : case CV_CAP_PROP_FRAME_HEIGHT :
propValue = depth.getVideoMode().getResolutionY(); propValue = streams[CV_DEPTH_STREAM].getVideoMode().getResolutionY();
break; break;
case CV_CAP_PROP_FPS : case CV_CAP_PROP_FPS :
mode = depth.getVideoMode(); mode = streams[CV_DEPTH_STREAM].getVideoMode();
propValue = mode.getFps(); propValue = mode.getFps();
break; break;
case CV_CAP_PROP_OPENNI_FRAME_MAX_DEPTH : case CV_CAP_PROP_OPENNI_FRAME_MAX_DEPTH :
propValue = depth.getMaxPixelValue(); propValue = streams[CV_DEPTH_STREAM].getMaxPixelValue();
break; break;
case CV_CAP_PROP_OPENNI_BASELINE : case CV_CAP_PROP_OPENNI_BASELINE :
propValue = baseline; propValue = baseline;
@ -553,10 +581,10 @@ double CvCapture_OpenNI2::getDepthGeneratorProperty( int propIdx ) const
propValue = device.getImageRegistrationMode(); propValue = device.getImageRegistrationMode();
break; break;
case CV_CAP_PROP_POS_MSEC : case CV_CAP_PROP_POS_MSEC :
propValue = (double)depthFrame.getTimestamp(); propValue = (double)streamFrames[CV_DEPTH_STREAM].getTimestamp();
break; break;
case CV_CAP_PROP_POS_FRAMES : case CV_CAP_PROP_POS_FRAMES :
propValue = depthFrame.getFrameIndex(); propValue = streamFrames[CV_DEPTH_STREAM].getFrameIndex();
break; break;
default : default :
CV_Error( CV_StsBadArg, cv::format("Depth generator does not support such parameter (propIdx=%d) for getting.\n", propIdx) ); CV_Error( CV_StsBadArg, cv::format("Depth generator does not support such parameter (propIdx=%d) for getting.\n", propIdx) );
@ -569,17 +597,20 @@ bool CvCapture_OpenNI2::setDepthGeneratorProperty( int propIdx, double propValue
{ {
bool isSet = false; bool isSet = false;
CV_Assert( depth.isValid() );
switch( propIdx ) switch( propIdx )
{ {
case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT:
if (isContextOpened)
isSet = toggleStream(CV_DEPTH_STREAM, propValue > 0.0) == openni::STATUS_OK;
break;
case CV_CAP_PROP_OPENNI_REGISTRATION: case CV_CAP_PROP_OPENNI_REGISTRATION:
{ {
CV_Assert(streams[CV_DEPTH_STREAM].isValid());
if( propValue != 0.0 ) // "on" if( propValue != 0.0 ) // "on"
{ {
// if there isn't image generator (i.e. ASUS XtionPro doesn't have it) // if there isn't image generator (i.e. ASUS XtionPro doesn't have it)
// then the property isn't avaliable // then the property isn't avaliable
if ( color.isValid() ) if ( streams[CV_COLOR_STREAM].isValid() )
{ {
openni::ImageRegistrationMode mode = propValue != 0.0 ? openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR : openni::IMAGE_REGISTRATION_OFF; openni::ImageRegistrationMode mode = propValue != 0.0 ? openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR : openni::IMAGE_REGISTRATION_OFF;
if( !device.getImageRegistrationMode() == mode ) if( !device.getImageRegistrationMode() == mode )
@ -619,30 +650,29 @@ bool CvCapture_OpenNI2::setDepthGeneratorProperty( int propIdx, double propValue
double CvCapture_OpenNI2::getImageGeneratorProperty( int propIdx ) const double CvCapture_OpenNI2::getImageGeneratorProperty( int propIdx ) const
{ {
double propValue = 0.; double propValue = 0.;
if( !color.isValid() ) if( !streams[CV_COLOR_STREAM].isValid() )
return propValue; return propValue;
openni::VideoMode mode; openni::VideoMode mode;
switch( propIdx ) switch( propIdx )
{ {
case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT : case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT:
CV_DbgAssert( color.isValid() ); propValue = streams[CV_COLOR_STREAM].isValid();
propValue = 1.;
break; break;
case CV_CAP_PROP_FRAME_WIDTH : case CV_CAP_PROP_FRAME_WIDTH :
propValue = color.getVideoMode().getResolutionX(); propValue = streams[CV_COLOR_STREAM].getVideoMode().getResolutionX();
break; break;
case CV_CAP_PROP_FRAME_HEIGHT : case CV_CAP_PROP_FRAME_HEIGHT :
propValue = color.getVideoMode().getResolutionY(); propValue = streams[CV_COLOR_STREAM].getVideoMode().getResolutionY();
break; break;
case CV_CAP_PROP_FPS : case CV_CAP_PROP_FPS :
propValue = color.getVideoMode().getFps(); propValue = streams[CV_COLOR_STREAM].getVideoMode().getFps();
break; break;
case CV_CAP_PROP_POS_MSEC : case CV_CAP_PROP_POS_MSEC :
propValue = (double)colorFrame.getTimestamp(); propValue = (double)streamFrames[CV_COLOR_STREAM].getTimestamp();
break; break;
case CV_CAP_PROP_POS_FRAMES : case CV_CAP_PROP_POS_FRAMES :
propValue = (double)colorFrame.getFrameIndex(); propValue = (double)streamFrames[CV_COLOR_STREAM].getFrameIndex();
break; break;
default : default :
CV_Error( CV_StsBadArg, cv::format("Image generator does not support such parameter (propIdx=%d) for getting.\n", propIdx) ); CV_Error( CV_StsBadArg, cv::format("Image generator does not support such parameter (propIdx=%d) for getting.\n", propIdx) );
@ -654,14 +684,18 @@ double CvCapture_OpenNI2::getImageGeneratorProperty( int propIdx ) const
bool CvCapture_OpenNI2::setImageGeneratorProperty(int propIdx, double propValue) bool CvCapture_OpenNI2::setImageGeneratorProperty(int propIdx, double propValue)
{ {
bool isSet = false; bool isSet = false;
if( !color.isValid() )
return isSet;
switch( propIdx ) switch( propIdx )
{ {
case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT:
if (isContextOpened)
isSet = toggleStream(CV_COLOR_STREAM, propValue > 0.0) == openni::STATUS_OK;
break;
case CV_CAP_PROP_OPENNI_OUTPUT_MODE : case CV_CAP_PROP_OPENNI_OUTPUT_MODE :
{ {
openni::VideoMode mode = color.getVideoMode(); if (!streams[CV_COLOR_STREAM].isValid())
return isSet;
openni::VideoMode mode = streams[CV_COLOR_STREAM].getVideoMode();
switch( cvRound(propValue) ) switch( cvRound(propValue) )
{ {
@ -689,7 +723,7 @@ bool CvCapture_OpenNI2::setImageGeneratorProperty(int propIdx, double propValue)
CV_Error( CV_StsBadArg, "Unsupported image generator output mode.\n"); CV_Error( CV_StsBadArg, "Unsupported image generator output mode.\n");
} }
openni::Status status = color.setVideoMode( mode ); openni::Status status = streams[CV_COLOR_STREAM].setVideoMode( mode );
if( status != openni::STATUS_OK ) if( status != openni::STATUS_OK )
CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::setImageGeneratorProperty : %s\n", openni::OpenNI::getExtendedError())); CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::setImageGeneratorProperty : %s\n", openni::OpenNI::getExtendedError()));
else else
@ -703,6 +737,96 @@ bool CvCapture_OpenNI2::setImageGeneratorProperty(int propIdx, double propValue)
return isSet; return isSet;
} }
double CvCapture_OpenNI2::getIrGeneratorProperty(int propIdx) const
{
double propValue = 0.;
if (!streams[CV_IR_STREAM].isValid())
return propValue;
openni::VideoMode mode;
switch (propIdx)
{
case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT:
propValue = streams[CV_IR_STREAM].isValid();
break;
case CV_CAP_PROP_FRAME_WIDTH:
propValue = streams[CV_IR_STREAM].getVideoMode().getResolutionX();
break;
case CV_CAP_PROP_FRAME_HEIGHT:
propValue = streams[CV_IR_STREAM].getVideoMode().getResolutionY();
break;
case CV_CAP_PROP_FPS:
propValue = streams[CV_IR_STREAM].getVideoMode().getFps();
break;
case CV_CAP_PROP_POS_MSEC:
propValue = (double)streamFrames[CV_IR_STREAM].getTimestamp();
break;
case CV_CAP_PROP_POS_FRAMES:
propValue = (double)streamFrames[CV_IR_STREAM].getFrameIndex();
break;
default:
CV_Error(CV_StsBadArg, cv::format("Image generator does not support such parameter (propIdx=%d) for getting.\n", propIdx));
}
return propValue;
}
bool CvCapture_OpenNI2::setIrGeneratorProperty(int propIdx, double propValue)
{
bool isSet = false;
switch (propIdx)
{
case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT:
if (isContextOpened)
isSet = toggleStream(CV_IR_STREAM, propValue > 0.0) == openni::STATUS_OK;
break;
case CV_CAP_PROP_OPENNI_OUTPUT_MODE:
{
if (!streams[CV_IR_STREAM].isValid())
return isSet;
openni::VideoMode mode = streams[CV_IR_STREAM].getVideoMode();
switch (cvRound(propValue))
{
case CV_CAP_OPENNI_VGA_30HZ:
mode.setResolution(640, 480);
mode.setFps(30);
break;
case CV_CAP_OPENNI_SXGA_15HZ:
mode.setResolution(1280, 960);
mode.setFps(15);
break;
case CV_CAP_OPENNI_SXGA_30HZ:
mode.setResolution(1280, 960);
mode.setFps(30);
break;
case CV_CAP_OPENNI_QVGA_30HZ:
mode.setResolution(320, 240);
mode.setFps(30);
break;
case CV_CAP_OPENNI_QVGA_60HZ:
mode.setResolution(320, 240);
mode.setFps(60);
break;
default:
CV_Error(CV_StsBadArg, "Unsupported image generator output mode.\n");
}
openni::Status status = streams[CV_IR_STREAM].setVideoMode(mode);
if (status != openni::STATUS_OK)
CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::setImageGeneratorProperty : %s\n", openni::OpenNI::getExtendedError()));
else
isSet = true;
break;
}
default:
CV_Error(CV_StsBadArg, cv::format("Image generator does not support such parameter (propIdx=%d) for setting.\n", propIdx));
}
return isSet;
}
bool CvCapture_OpenNI2::grabFrame() bool CvCapture_OpenNI2::grabFrame()
{ {
if( !isOpened() ) if( !isOpened() )
@ -710,14 +834,22 @@ bool CvCapture_OpenNI2::grabFrame()
bool isGrabbed = false; bool isGrabbed = false;
openni::Status status = openni::OpenNI::waitForAnyStream(streams, numStream, &currentStream, CV_STREAM_TIMEOUT); int numActiveStreams = 0;
openni::VideoStream* streamPtrs[CV_MAX_NUM_STREAMS];
for (int i = 0; i < CV_MAX_NUM_STREAMS; ++i) {
streamPtrs[numActiveStreams++] = &streams[i];
}
int currentStream;
openni::Status status = openni::OpenNI::waitForAnyStream(streamPtrs, numActiveStreams, &currentStream, CV_STREAM_TIMEOUT);
if( status != openni::STATUS_OK ) if( status != openni::STATUS_OK )
return false; return false;
if( depth.isValid() ) for (int i = 0; i < CV_MAX_NUM_STREAMS; ++i)
depth.readFrame(&depthFrame); {
if (color.isValid()) if (streams[i].isValid())
color.readFrame(&colorFrame); streams[i].readFrame(&streamFrames[i]);
}
isGrabbed = true; isGrabbed = true;
return isGrabbed; return isGrabbed;
@ -736,25 +868,25 @@ inline void getDepthMapFromMetaData(const openni::VideoFrameRef& depthMetaData,
IplImage* CvCapture_OpenNI2::retrieveDepthMap() IplImage* CvCapture_OpenNI2::retrieveDepthMap()
{ {
if( !depth.isValid() ) if( !streamFrames[CV_DEPTH_STREAM].isValid() )
return 0; return 0;
getDepthMapFromMetaData( depthFrame, outputMaps[CV_CAP_OPENNI_DEPTH_MAP].mat, noSampleValue, shadowValue ); getDepthMapFromMetaData(streamFrames[CV_DEPTH_STREAM], outputMaps[CV_CAP_OPENNI_DEPTH_MAP].mat, noSampleValue, shadowValue );
return outputMaps[CV_CAP_OPENNI_DEPTH_MAP].getIplImagePtr(); return outputMaps[CV_CAP_OPENNI_DEPTH_MAP].getIplImagePtr();
} }
IplImage* CvCapture_OpenNI2::retrievePointCloudMap() IplImage* CvCapture_OpenNI2::retrievePointCloudMap()
{ {
if( !depthFrame.isValid() ) if( !streamFrames[CV_DEPTH_STREAM].isValid() )
return 0; return 0;
cv::Mat depthImg; cv::Mat depthImg;
getDepthMapFromMetaData(depthFrame, depthImg, noSampleValue, shadowValue); getDepthMapFromMetaData(streamFrames[CV_DEPTH_STREAM], depthImg, noSampleValue, shadowValue);
const int badPoint = INVALID_PIXEL_VAL; const int badPoint = INVALID_PIXEL_VAL;
const float badCoord = INVALID_COORDINATE_VAL; const float badCoord = INVALID_COORDINATE_VAL;
int cols = depthFrame.getWidth(), rows = depthFrame.getHeight(); int cols = streamFrames[CV_DEPTH_STREAM].getWidth(), rows = streamFrames[CV_DEPTH_STREAM].getHeight();
cv::Mat pointCloud_XYZ( rows, cols, CV_32FC3, cv::Scalar::all(badPoint) ); cv::Mat pointCloud_XYZ( rows, cols, CV_32FC3, cv::Scalar::all(badPoint) );
float worldX, worldY, worldZ; float worldX, worldY, worldZ;
@ -762,7 +894,7 @@ IplImage* CvCapture_OpenNI2::retrievePointCloudMap()
{ {
for (int x = 0; x < cols; x++) for (int x = 0; x < cols; x++)
{ {
openni::CoordinateConverter::convertDepthToWorld(depth, x, y, depthImg.at<unsigned short>(y, x), &worldX, &worldY, &worldZ); openni::CoordinateConverter::convertDepthToWorld(streams[CV_DEPTH_STREAM], x, y, depthImg.at<unsigned short>(y, x), &worldX, &worldY, &worldZ);
if (depthImg.at<unsigned short>(y, x) == badPoint) // not valid if (depthImg.at<unsigned short>(y, x) == badPoint) // not valid
pointCloud_XYZ.at<cv::Point3f>(y, x) = cv::Point3f(badCoord, badCoord, badCoord); pointCloud_XYZ.at<cv::Point3f>(y, x) = cv::Point3f(badCoord, badCoord, badCoord);
@ -803,11 +935,11 @@ static void computeDisparity_32F( const openni::VideoFrameRef& depthMetaData, cv
IplImage* CvCapture_OpenNI2::retrieveDisparityMap() IplImage* CvCapture_OpenNI2::retrieveDisparityMap()
{ {
if (!depthFrame.isValid()) if (!streamFrames[CV_DEPTH_STREAM].isValid())
return 0; return 0;
cv::Mat disp32; cv::Mat disp32;
computeDisparity_32F(depthFrame, disp32, baseline, depthFocalLength_VGA, noSampleValue, shadowValue); computeDisparity_32F(streamFrames[CV_DEPTH_STREAM], disp32, baseline, depthFocalLength_VGA, noSampleValue, shadowValue);
disp32.convertTo( outputMaps[CV_CAP_OPENNI_DISPARITY_MAP].mat, CV_8UC1 ); disp32.convertTo( outputMaps[CV_CAP_OPENNI_DISPARITY_MAP].mat, CV_8UC1 );
@ -816,21 +948,21 @@ IplImage* CvCapture_OpenNI2::retrieveDisparityMap()
IplImage* CvCapture_OpenNI2::retrieveDisparityMap_32F() IplImage* CvCapture_OpenNI2::retrieveDisparityMap_32F()
{ {
if (!depthFrame.isValid()) if (!streamFrames[CV_DEPTH_STREAM].isValid())
return 0; return 0;
computeDisparity_32F(depthFrame, outputMaps[CV_CAP_OPENNI_DISPARITY_MAP_32F].mat, baseline, depthFocalLength_VGA, noSampleValue, shadowValue); computeDisparity_32F(streamFrames[CV_DEPTH_STREAM], outputMaps[CV_CAP_OPENNI_DISPARITY_MAP_32F].mat, baseline, depthFocalLength_VGA, noSampleValue, shadowValue);
return outputMaps[CV_CAP_OPENNI_DISPARITY_MAP_32F].getIplImagePtr(); return outputMaps[CV_CAP_OPENNI_DISPARITY_MAP_32F].getIplImagePtr();
} }
IplImage* CvCapture_OpenNI2::retrieveValidDepthMask() IplImage* CvCapture_OpenNI2::retrieveValidDepthMask()
{ {
if (!depthFrame.isValid()) if (!streamFrames[CV_DEPTH_STREAM].isValid())
return 0; return 0;
cv::Mat d; cv::Mat d;
getDepthMapFromMetaData(depthFrame, d, noSampleValue, shadowValue); getDepthMapFromMetaData(streamFrames[CV_DEPTH_STREAM], d, noSampleValue, shadowValue);
outputMaps[CV_CAP_OPENNI_VALID_DEPTH_MASK].mat = d != CvCapture_OpenNI2::INVALID_PIXEL_VAL; outputMaps[CV_CAP_OPENNI_VALID_DEPTH_MASK].mat = d != CvCapture_OpenNI2::INVALID_PIXEL_VAL;
@ -850,30 +982,58 @@ inline void getBGRImageFromMetaData( const openni::VideoFrameRef& imageMetaData,
cv::cvtColor(bufferImage, bgrImage, cv::COLOR_RGB2BGR); cv::cvtColor(bufferImage, bgrImage, cv::COLOR_RGB2BGR);
} }
inline void getGrayImageFromMetaData(const openni::VideoFrameRef& imageMetaData, cv::Mat& grayImage)
{
if (imageMetaData.getVideoMode().getPixelFormat() == openni::PIXEL_FORMAT_GRAY8)
{
grayImage.create(imageMetaData.getHeight(), imageMetaData.getWidth(), CV_8UC1);
grayImage.data = (uchar*)imageMetaData.getData();
}
else if (imageMetaData.getVideoMode().getPixelFormat() == openni::PIXEL_FORMAT_GRAY16)
{
grayImage.create(imageMetaData.getHeight(), imageMetaData.getWidth(), CV_16UC1);
grayImage.data = (uchar*)imageMetaData.getData();
}
else
{
CV_Error(CV_StsUnsupportedFormat, "Unsupported format of grabbed image\n");
}
}
IplImage* CvCapture_OpenNI2::retrieveBGRImage() IplImage* CvCapture_OpenNI2::retrieveBGRImage()
{ {
if( !color.isValid() ) if( !streamFrames[CV_COLOR_STREAM].isValid() )
return 0; return 0;
getBGRImageFromMetaData( colorFrame, outputMaps[CV_CAP_OPENNI_BGR_IMAGE].mat ); getBGRImageFromMetaData(streamFrames[CV_COLOR_STREAM], outputMaps[CV_CAP_OPENNI_BGR_IMAGE].mat );
return outputMaps[CV_CAP_OPENNI_BGR_IMAGE].getIplImagePtr(); return outputMaps[CV_CAP_OPENNI_BGR_IMAGE].getIplImagePtr();
} }
IplImage* CvCapture_OpenNI2::retrieveGrayImage() IplImage* CvCapture_OpenNI2::retrieveGrayImage()
{ {
if (!colorFrame.isValid()) if (!streamFrames[CV_COLOR_STREAM].isValid())
return 0; return 0;
CV_Assert(colorFrame.getVideoMode().getPixelFormat() == openni::PIXEL_FORMAT_RGB888); // RGB CV_Assert(streamFrames[CV_COLOR_STREAM].getVideoMode().getPixelFormat() == openni::PIXEL_FORMAT_RGB888); // RGB
cv::Mat rgbImage; cv::Mat rgbImage;
getBGRImageFromMetaData(colorFrame, rgbImage); getBGRImageFromMetaData(streamFrames[CV_COLOR_STREAM], rgbImage);
cv::cvtColor( rgbImage, outputMaps[CV_CAP_OPENNI_GRAY_IMAGE].mat, CV_BGR2GRAY ); cv::cvtColor( rgbImage, outputMaps[CV_CAP_OPENNI_GRAY_IMAGE].mat, CV_BGR2GRAY );
return outputMaps[CV_CAP_OPENNI_GRAY_IMAGE].getIplImagePtr(); return outputMaps[CV_CAP_OPENNI_GRAY_IMAGE].getIplImagePtr();
} }
IplImage* CvCapture_OpenNI2::retrieveIrImage()
{
if (!streamFrames[CV_IR_STREAM].isValid())
return 0;
getGrayImageFromMetaData(streamFrames[CV_IR_STREAM], outputMaps[CV_CAP_OPENNI_IR_IMAGE].mat);
return outputMaps[CV_CAP_OPENNI_IR_IMAGE].getIplImagePtr();
}
IplImage* CvCapture_OpenNI2::retrieveFrame( int outputType ) IplImage* CvCapture_OpenNI2::retrieveFrame( int outputType )
{ {
IplImage* image = 0; IplImage* image = 0;
@ -907,6 +1067,10 @@ IplImage* CvCapture_OpenNI2::retrieveFrame( int outputType )
{ {
image = retrieveGrayImage(); image = retrieveGrayImage();
} }
else if( outputType == CV_CAP_OPENNI_IR_IMAGE )
{
image = retrieveIrImage();
}
return image; return image;
} }

@ -21,6 +21,8 @@ static void help()
"2.) Data given from RGB image generator\n" "2.) Data given from RGB image generator\n"
" CAP_OPENNI_BGR_IMAGE - color image (CV_8UC3)\n" " CAP_OPENNI_BGR_IMAGE - color image (CV_8UC3)\n"
" CAP_OPENNI_GRAY_IMAGE - gray image (CV_8UC1)\n" " CAP_OPENNI_GRAY_IMAGE - gray image (CV_8UC1)\n"
"2.) Data given from IR image generator\n"
" CAP_OPENNI_IR_IMAGE - gray image (CV_16UC1)\n"
<< endl; << endl;
} }
@ -92,8 +94,8 @@ static void printCommandLineParams()
cout << "-mode= image mode: resolution and fps, supported three values: 0 - CAP_OPENNI_VGA_30HZ, 1 - CAP_OPENNI_SXGA_15HZ," << endl; cout << "-mode= image mode: resolution and fps, supported three values: 0 - CAP_OPENNI_VGA_30HZ, 1 - CAP_OPENNI_SXGA_15HZ," << endl;
cout << " 2 - CAP_OPENNI_SXGA_30HZ (0 by default). Ignored if rgb image or gray image are not selected to show." << endl; cout << " 2 - CAP_OPENNI_SXGA_30HZ (0 by default). Ignored if rgb image or gray image are not selected to show." << endl;
cout << "-m= Mask to set which output images are need. It is a string of size 5. Each element of this is '0' or '1' and" << endl; cout << "-m= Mask to set which output images are need. It is a string of size 5. Each element of this is '0' or '1' and" << endl;
cout << " determine: is depth map, disparity map, valid pixels mask, rgb image, gray image need or not (correspondently)?" << endl ; cout << " determine: is depth map, disparity map, valid pixels mask, rgb image, gray image need or not (correspondently), ir image" << endl ;
cout << " By default -m=01010 i.e. disparity map and rgb image will be shown." << endl ; cout << " By default -m=010100 i.e. disparity map and rgb image will be shown." << endl ;
cout << "-r= Filename of .oni video file. The data will grabbed from it." << endl ; cout << "-r= Filename of .oni video file. The data will grabbed from it." << endl ;
} }
@ -101,7 +103,7 @@ static void parseCommandLine( int argc, char* argv[], bool& isColorizeDisp, bool
string& filename, bool& isFileReading ) string& filename, bool& isFileReading )
{ {
filename.clear(); filename.clear();
cv::CommandLineParser parser(argc, argv, "{h help||}{cd|1|}{fmd|0|}{mode|0|}{m|01010|}{r||}"); cv::CommandLineParser parser(argc, argv, "{h help||}{cd|1|}{fmd|0|}{mode|-1|}{m|010100|}{r||}");
if (parser.has("h")) if (parser.has("h"))
{ {
help(); help();
@ -121,14 +123,14 @@ static void parseCommandLine( int argc, char* argv[], bool& isColorizeDisp, bool
help(); help();
exit(-1); exit(-1);
} }
if (flags % 100000 == 0) if (flags % 1000000 == 0)
{ {
cout << "No one output image is selected." << endl; cout << "No one output image is selected." << endl;
exit(0); exit(0);
} }
for (int i = 0; i < 5; i++) for (int i = 0; i < 6; i++)
{ {
retrievedImageFlags[4 - i] = (flags % 10 != 0); retrievedImageFlags[5 - i] = (flags % 10 != 0);
flags /= 10; flags /= 10;
} }
} }
@ -141,7 +143,7 @@ int main( int argc, char* argv[] )
{ {
bool isColorizeDisp, isFixedMaxDisp; bool isColorizeDisp, isFixedMaxDisp;
int imageMode; int imageMode;
bool retrievedImageFlags[5]; bool retrievedImageFlags[6];
string filename; string filename;
bool isVideoReading; bool isVideoReading;
parseCommandLine( argc, argv, isColorizeDisp, isFixedMaxDisp, imageMode, retrievedImageFlags, filename, isVideoReading ); parseCommandLine( argc, argv, isColorizeDisp, isFixedMaxDisp, imageMode, retrievedImageFlags, filename, isVideoReading );
@ -165,7 +167,7 @@ int main( int argc, char* argv[] )
return -1; return -1;
} }
if( !isVideoReading ) if( !isVideoReading && imageMode >= 0 )
{ {
bool modeRes=false; bool modeRes=false;
switch ( imageMode ) switch ( imageMode )
@ -193,13 +195,35 @@ int main( int argc, char* argv[] )
cout << "\nThis image mode is not supported by the device, the default value (CV_CAP_OPENNI_SXGA_15HZ) will be used.\n" << endl; cout << "\nThis image mode is not supported by the device, the default value (CV_CAP_OPENNI_SXGA_15HZ) will be used.\n" << endl;
} }
// turn on depth, color and IR if needed
if (retrievedImageFlags[0] || retrievedImageFlags[1] || retrievedImageFlags[2])
capture.set(CAP_OPENNI_DEPTH_GENERATOR_PRESENT, true);
else
capture.set(CAP_OPENNI_DEPTH_GENERATOR_PRESENT, false);
if (retrievedImageFlags[3] || retrievedImageFlags[4])
capture.set(CAP_OPENNI_IMAGE_GENERATOR_PRESENT, true);
else
capture.set(CAP_OPENNI_IMAGE_GENERATOR_PRESENT, false);
if (retrievedImageFlags[5])
capture.set(CAP_OPENNI_IR_GENERATOR_PRESENT, true);
else
capture.set(CAP_OPENNI_IR_GENERATOR_PRESENT, false);
// Print some avalible device settings. // Print some avalible device settings.
cout << "\nDepth generator output mode:" << endl << if (capture.get(CAP_OPENNI_DEPTH_GENERATOR_PRESENT))
"FRAME_WIDTH " << capture.get( CAP_PROP_FRAME_WIDTH ) << endl << {
"FRAME_HEIGHT " << capture.get( CAP_PROP_FRAME_HEIGHT ) << endl << cout << "\nDepth generator output mode:" << endl <<
"FRAME_MAX_DEPTH " << capture.get( CAP_PROP_OPENNI_FRAME_MAX_DEPTH ) << " mm" << endl << "FRAME_WIDTH " << capture.get(CAP_PROP_FRAME_WIDTH) << endl <<
"FPS " << capture.get( CAP_PROP_FPS ) << endl << "FRAME_HEIGHT " << capture.get(CAP_PROP_FRAME_HEIGHT) << endl <<
"REGISTRATION " << capture.get( CAP_PROP_OPENNI_REGISTRATION ) << endl; "FRAME_MAX_DEPTH " << capture.get(CAP_PROP_OPENNI_FRAME_MAX_DEPTH) << " mm" << endl <<
"FPS " << capture.get(CAP_PROP_FPS) << endl <<
"REGISTRATION " << capture.get(CAP_PROP_OPENNI_REGISTRATION) << endl;
}
else
{
cout << "\nDevice doesn't contain depth generator or it is not selected." << endl;
}
if( capture.get( CAP_OPENNI_IMAGE_GENERATOR_PRESENT ) ) if( capture.get( CAP_OPENNI_IMAGE_GENERATOR_PRESENT ) )
{ {
cout << cout <<
@ -210,9 +234,20 @@ int main( int argc, char* argv[] )
} }
else else
{ {
cout << "\nDevice doesn't contain image generator." << endl; cout << "\nDevice doesn't contain image generator or it is not selected." << endl;
if (!retrievedImageFlags[0] && !retrievedImageFlags[1] && !retrievedImageFlags[2]) }
return 0;
if( capture.get(CAP_OPENNI_IR_GENERATOR_PRESENT) )
{
cout <<
"\nIR generator output mode:" << endl <<
"FRAME_WIDTH " << capture.get(CAP_OPENNI_IR_GENERATOR + CAP_PROP_FRAME_WIDTH) << endl <<
"FRAME_HEIGHT " << capture.get(CAP_OPENNI_IR_GENERATOR + CAP_PROP_FRAME_HEIGHT) << endl <<
"FPS " << capture.get(CAP_OPENNI_IR_GENERATOR + CAP_PROP_FPS) << endl;
}
else
{
cout << "\nDevice doesn't contain IR generator or it is not selected." << endl;
} }
for(;;) for(;;)
@ -222,6 +257,7 @@ int main( int argc, char* argv[] )
Mat disparityMap; Mat disparityMap;
Mat bgrImage; Mat bgrImage;
Mat grayImage; Mat grayImage;
Mat irImage;
if( !capture.grab() ) if( !capture.grab() )
{ {
@ -261,6 +297,13 @@ int main( int argc, char* argv[] )
if( retrievedImageFlags[4] && capture.retrieve( grayImage, CAP_OPENNI_GRAY_IMAGE ) ) if( retrievedImageFlags[4] && capture.retrieve( grayImage, CAP_OPENNI_GRAY_IMAGE ) )
imshow( "gray image", grayImage ); imshow( "gray image", grayImage );
if( retrievedImageFlags[5] && capture.retrieve( irImage, CAP_OPENNI_IR_IMAGE ) )
{
Mat ir8;
irImage.convertTo(ir8, CV_8U, 256.0 / 3500, 0.0);
imshow("IR image", ir8);
}
} }
if( waitKey( 30 ) >= 0 ) if( waitKey( 30 ) >= 0 )

Loading…
Cancel
Save