Merge pull request #2045 from SpecLad:merge-2.4
commit
faddd5b97f
41 changed files with 3609 additions and 934 deletions
@ -0,0 +1,20 @@ |
||||
# Main variables: |
||||
# INTELPERC_LIBRARIES and INTELPERC_INCLUDE to link Intel Perceptial Computing SDK modules |
||||
# HAVE_INTELPERC for conditional compilation OpenCV with/without Intel Perceptial Computing SDK |
||||
|
||||
if(X86_64) |
||||
find_path(INTELPERC_INCLUDE_DIR "pxcsession.h" PATHS "$ENV{PCSDK_DIR}include" DOC "Path to Intel Perceptual Computing SDK interface headers") |
||||
find_file(INTELPERC_LIBRARIES "libpxc.lib" PATHS "$ENV{PCSDK_DIR}lib/x64" DOC "Path to Intel Perceptual Computing SDK interface libraries") |
||||
else() |
||||
find_path(INTELPERC_INCLUDE_DIR "pxcsession.h" PATHS "$ENV{PCSDK_DIR}include" DOC "Path to Intel Perceptual Computing SDK interface headers") |
||||
find_file(INTELPERC_LIBRARIES "libpxc.lib" PATHS "$ENV{PCSDK_DIR}lib/Win32" DOC "Path to Intel Perceptual Computing SDK interface libraries") |
||||
endif() |
||||
|
||||
if(INTELPERC_INCLUDE_DIR AND INTELPERC_LIBRARIES) |
||||
set(HAVE_INTELPERC TRUE) |
||||
else() |
||||
set(HAVE_INTELPERC FALSE) |
||||
message(WARNING "Intel Perceptual Computing SDK library directory (set by INTELPERC_LIB_DIR variable) is not found or does not have Intel Perceptual Computing SDK libraries.") |
||||
endif() #if(INTELPERC_INCLUDE_DIR AND INTELPERC_LIBRARIES) |
||||
|
||||
mark_as_advanced(FORCE INTELPERC_LIBRARIES INTELPERC_INCLUDE_DIR) |
@ -0,0 +1,79 @@ |
||||
******* |
||||
HighGUI |
||||
******* |
||||
|
||||
.. highlight:: cpp |
||||
|
||||
Using Creative Senz3D and other Intel Perceptual Computing SDK compatible depth sensors |
||||
======================================================================================= |
||||
|
||||
Depth sensors compatible with Intel Perceptual Computing SDK are supported through ``VideoCapture`` class. Depth map, RGB image and some other formats of output can be retrieved by using familiar interface of ``VideoCapture``. |
||||
|
||||
In order to use depth sensor with OpenCV you should do the following preliminary steps: |
||||
|
||||
#. |
||||
Install Intel Perceptual Computing SDK (from here http://www.intel.com/software/perceptual). |
||||
|
||||
#. |
||||
Configure OpenCV with Intel Perceptual Computing SDK support by setting ``WITH_INTELPERC`` flag in CMake. If Intel Perceptual Computing SDK is found in install folders OpenCV will be built with Intel Perceptual Computing SDK library (see a status ``INTELPERC`` in CMake log). If CMake process doesn't find Intel Perceptual Computing SDK installation folder automatically, the user should change corresponding CMake variables ``INTELPERC_LIB_DIR`` and ``INTELPERC_INCLUDE_DIR`` to the proper value. |
||||
|
||||
#. |
||||
Build OpenCV. |
||||
|
||||
VideoCapture can retrieve the following data: |
||||
|
||||
#. |
||||
data given from depth generator: |
||||
* ``CV_CAP_INTELPERC_DEPTH_MAP`` - each pixel is a 16-bit integer. The value indicates the distance from an object to the camera's XY plane or the Cartesian depth. (CV_16UC1) |
||||
* ``CV_CAP_INTELPERC_UVDEPTH_MAP`` - each pixel contains two 32-bit floating point values in the range of 0-1, representing the mapping of depth coordinates to the color coordinates. (CV_32FC2) |
||||
* ``CV_CAP_INTELPERC_IR_MAP`` - each pixel is a 16-bit integer. The value indicates the intensity of the reflected laser beam. (CV_16UC1) |
||||
#. |
||||
data given from RGB image generator: |
||||
* ``CV_CAP_INTELPERC_IMAGE`` - color image. (CV_8UC3) |
||||
|
||||
In order to get depth map from depth sensor use ``VideoCapture::operator >>``, e. g. :: |
||||
|
||||
VideoCapture capture( CV_CAP_INTELPERC ); |
||||
for(;;) |
||||
{ |
||||
Mat depthMap; |
||||
capture >> depthMap; |
||||
|
||||
if( waitKey( 30 ) >= 0 ) |
||||
break; |
||||
} |
||||
|
||||
For getting several data maps use ``VideoCapture::grab`` and ``VideoCapture::retrieve``, e.g. :: |
||||
|
||||
VideoCapture capture(CV_CAP_INTELPERC); |
||||
for(;;) |
||||
{ |
||||
Mat depthMap; |
||||
Mat image; |
||||
Mat irImage; |
||||
|
||||
capture.grab(); |
||||
|
||||
capture.retrieve( depthMap, CV_CAP_INTELPERC_DEPTH_MAP ); |
||||
capture.retrieve( image, CV_CAP_INTELPERC_IMAGE ); |
||||
capture.retrieve( irImage, CV_CAP_INTELPERC_IR_MAP); |
||||
|
||||
if( waitKey( 30 ) >= 0 ) |
||||
break; |
||||
} |
||||
|
||||
For setting and getting some property of sensor` data generators use ``VideoCapture::set`` and ``VideoCapture::get`` methods respectively, e.g. :: |
||||
|
||||
VideoCapture capture( CV_CAP_INTELPERC ); |
||||
capture.set( CV_CAP_INTELPERC_DEPTH_GENERATOR | CV_CAP_PROP_INTELPERC_PROFILE_IDX, 0 ); |
||||
cout << "FPS " << capture.get( CV_CAP_INTELPERC_DEPTH_GENERATOR+CV_CAP_PROP_FPS ) << endl; |
||||
|
||||
Since two types of sensor's data generators are supported (image generator and depth generator), there are two flags that should be used to set/get property of the needed generator: |
||||
|
||||
* CV_CAP_INTELPERC_IMAGE_GENERATOR -- a flag for access to the image generator properties. |
||||
|
||||
* CV_CAP_INTELPERC_DEPTH_GENERATOR -- a flag for access to the depth generator properties. This flag value is assumed by default if neither of the two possible values of the property is set. |
||||
|
||||
For more information please refer to the example of usage intelperc_capture.cpp_ in ``opencv/samples/cpp`` folder. |
||||
|
||||
.. _intelperc_capture.cpp: https://github.com/Itseez/opencv/tree/master/samples/cpp/intelperc_capture.cpp |
@ -0,0 +1,714 @@ |
||||
#include "precomp.hpp" |
||||
|
||||
#ifdef HAVE_INTELPERC |
||||
|
||||
#include "pxcsession.h" |
||||
#include "pxcsmartptr.h" |
||||
#include "pxccapture.h" |
||||
|
||||
class CvIntelPerCStreamBase |
||||
{ |
||||
protected: |
||||
struct FrameInternal |
||||
{ |
||||
IplImage* retrieveFrame() |
||||
{ |
||||
if (m_mat.empty()) |
||||
return NULL; |
||||
m_iplHeader = IplImage(m_mat); |
||||
return &m_iplHeader; |
||||
} |
||||
cv::Mat m_mat; |
||||
private: |
||||
IplImage m_iplHeader; |
||||
}; |
||||
public: |
||||
CvIntelPerCStreamBase() |
||||
: m_profileIdx(-1) |
||||
, m_frameIdx(0) |
||||
, m_timeStampStartNS(0) |
||||
{ |
||||
} |
||||
virtual ~CvIntelPerCStreamBase() |
||||
{ |
||||
} |
||||
|
||||
bool isValid() |
||||
{ |
||||
return (m_device.IsValid() && m_stream.IsValid()); |
||||
} |
||||
bool grabFrame() |
||||
{ |
||||
if (!m_stream.IsValid()) |
||||
return false; |
||||
if (-1 == m_profileIdx) |
||||
{ |
||||
if (!setProperty(CV_CAP_PROP_INTELPERC_PROFILE_IDX, 0)) |
||||
return false; |
||||
} |
||||
PXCSmartPtr<PXCImage> pxcImage; PXCSmartSP sp; |
||||
if (PXC_STATUS_NO_ERROR > m_stream->ReadStreamAsync(&pxcImage, &sp)) |
||||
return false; |
||||
if (PXC_STATUS_NO_ERROR > sp->Synchronize()) |
||||
return false; |
||||
if (0 == m_timeStampStartNS) |
||||
m_timeStampStartNS = pxcImage->QueryTimeStamp(); |
||||
m_timeStamp = (double)((pxcImage->QueryTimeStamp() - m_timeStampStartNS) / 10000); |
||||
m_frameIdx++; |
||||
return prepareIplImage(pxcImage); |
||||
} |
||||
int getProfileIDX() const |
||||
{ |
||||
return m_profileIdx; |
||||
} |
||||
public: |
||||
virtual bool initStream(PXCSession *session) = 0; |
||||
virtual double getProperty(int propIdx) |
||||
{ |
||||
double ret = 0.0; |
||||
switch (propIdx) |
||||
{ |
||||
case CV_CAP_PROP_INTELPERC_PROFILE_COUNT: |
||||
ret = (double)m_profiles.size(); |
||||
break; |
||||
case CV_CAP_PROP_FRAME_WIDTH : |
||||
if ((0 <= m_profileIdx) && (m_profileIdx < m_profiles.size())) |
||||
ret = (double)m_profiles[m_profileIdx].imageInfo.width; |
||||
break; |
||||
case CV_CAP_PROP_FRAME_HEIGHT : |
||||
if ((0 <= m_profileIdx) && (m_profileIdx < m_profiles.size())) |
||||
ret = (double)m_profiles[m_profileIdx].imageInfo.height; |
||||
break; |
||||
case CV_CAP_PROP_FPS : |
||||
if ((0 <= m_profileIdx) && (m_profileIdx < m_profiles.size())) |
||||
{ |
||||
ret = ((double)m_profiles[m_profileIdx].frameRateMin.numerator / (double)m_profiles[m_profileIdx].frameRateMin.denominator |
||||
+ (double)m_profiles[m_profileIdx].frameRateMax.numerator / (double)m_profiles[m_profileIdx].frameRateMax.denominator) / 2.0; |
||||
} |
||||
break; |
||||
case CV_CAP_PROP_POS_FRAMES: |
||||
ret = (double)m_frameIdx; |
||||
break; |
||||
case CV_CAP_PROP_POS_MSEC: |
||||
ret = m_timeStamp; |
||||
break; |
||||
}; |
||||
return ret; |
||||
} |
||||
virtual bool setProperty(int propIdx, double propVal) |
||||
{ |
||||
bool isSet = false; |
||||
switch (propIdx) |
||||
{ |
||||
case CV_CAP_PROP_INTELPERC_PROFILE_IDX: |
||||
{ |
||||
int propValInt = (int)propVal; |
||||
if ((0 <= propValInt) && (propValInt < m_profiles.size())) |
||||
{ |
||||
if (m_profileIdx != propValInt) |
||||
{ |
||||
m_profileIdx = propValInt; |
||||
if (m_stream.IsValid()) |
||||
m_stream->SetProfile(&m_profiles[m_profileIdx]); |
||||
m_frameIdx = 0; |
||||
m_timeStampStartNS = 0; |
||||
} |
||||
isSet = true; |
||||
} |
||||
} |
||||
break; |
||||
}; |
||||
return isSet; |
||||
} |
||||
protected: |
||||
PXCSmartPtr<PXCCapture::Device> m_device; |
||||
bool initDevice(PXCSession *session) |
||||
{ |
||||
if (NULL == session) |
||||
return false; |
||||
|
||||
pxcStatus sts = PXC_STATUS_NO_ERROR; |
||||
PXCSession::ImplDesc templat; |
||||
memset(&templat,0,sizeof(templat)); |
||||
templat.group = PXCSession::IMPL_GROUP_SENSOR; |
||||
templat.subgroup= PXCSession::IMPL_SUBGROUP_VIDEO_CAPTURE; |
||||
|
||||
for (int modidx = 0; PXC_STATUS_NO_ERROR <= sts; modidx++) |
||||
{ |
||||
PXCSession::ImplDesc desc; |
||||
sts = session->QueryImpl(&templat, modidx, &desc); |
||||
if (PXC_STATUS_NO_ERROR > sts) |
||||
break; |
||||
|
||||
PXCSmartPtr<PXCCapture> capture; |
||||
sts = session->CreateImpl<PXCCapture>(&desc, &capture); |
||||
if (!capture.IsValid()) |
||||
continue; |
||||
|
||||
/* enumerate devices */ |
||||
for (int devidx = 0; PXC_STATUS_NO_ERROR <= sts; devidx++) |
||||
{ |
||||
PXCSmartPtr<PXCCapture::Device> device; |
||||
sts = capture->CreateDevice(devidx, &device); |
||||
if (PXC_STATUS_NO_ERROR <= sts) |
||||
{ |
||||
m_device = device.ReleasePtr(); |
||||
return true; |
||||
} |
||||
} |
||||
} |
||||
return false; |
||||
} |
||||
|
||||
PXCSmartPtr<PXCCapture::VideoStream> m_stream; |
||||
void initStreamImpl(PXCImage::ImageType type) |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return; |
||||
|
||||
pxcStatus sts = PXC_STATUS_NO_ERROR; |
||||
/* enumerate streams */ |
||||
for (int streamidx = 0; PXC_STATUS_NO_ERROR <= sts; streamidx++) |
||||
{ |
||||
PXCCapture::Device::StreamInfo sinfo; |
||||
sts = m_device->QueryStream(streamidx, &sinfo); |
||||
if (PXC_STATUS_NO_ERROR > sts) |
||||
break; |
||||
if (PXCCapture::VideoStream::CUID != sinfo.cuid) |
||||
continue; |
||||
if (type != sinfo.imageType) |
||||
continue; |
||||
|
||||
sts = m_device->CreateStream<PXCCapture::VideoStream>(streamidx, &m_stream); |
||||
if (PXC_STATUS_NO_ERROR == sts) |
||||
break; |
||||
m_stream.ReleaseRef(); |
||||
} |
||||
} |
||||
protected: |
||||
std::vector<PXCCapture::VideoStream::ProfileInfo> m_profiles; |
||||
int m_profileIdx; |
||||
int m_frameIdx; |
||||
pxcU64 m_timeStampStartNS; |
||||
double m_timeStamp; |
||||
|
||||
virtual bool validProfile(const PXCCapture::VideoStream::ProfileInfo& /*pinfo*/) |
||||
{ |
||||
return true; |
||||
} |
||||
void enumProfiles() |
||||
{ |
||||
m_profiles.clear(); |
||||
if (!m_stream.IsValid()) |
||||
return; |
||||
pxcStatus sts = PXC_STATUS_NO_ERROR; |
||||
for (int profidx = 0; PXC_STATUS_NO_ERROR <= sts; profidx++) |
||||
{ |
||||
PXCCapture::VideoStream::ProfileInfo pinfo; |
||||
sts = m_stream->QueryProfile(profidx, &pinfo); |
||||
if (PXC_STATUS_NO_ERROR > sts) |
||||
break; |
||||
if (validProfile(pinfo)) |
||||
m_profiles.push_back(pinfo); |
||||
} |
||||
} |
||||
virtual bool prepareIplImage(PXCImage *pxcImage) = 0; |
||||
}; |
||||
|
||||
class CvIntelPerCStreamImage |
||||
: public CvIntelPerCStreamBase |
||||
{ |
||||
public: |
||||
CvIntelPerCStreamImage() |
||||
{ |
||||
} |
||||
virtual ~CvIntelPerCStreamImage() |
||||
{ |
||||
} |
||||
|
||||
virtual bool initStream(PXCSession *session) |
||||
{ |
||||
if (!initDevice(session)) |
||||
return false; |
||||
initStreamImpl(PXCImage::IMAGE_TYPE_COLOR); |
||||
if (!m_stream.IsValid()) |
||||
return false; |
||||
enumProfiles(); |
||||
return true; |
||||
} |
||||
virtual double getProperty(int propIdx) |
||||
{ |
||||
switch (propIdx) |
||||
{ |
||||
case CV_CAP_PROP_BRIGHTNESS: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return 0.0; |
||||
float fret = 0.0f; |
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_BRIGHTNESS, &fret)) |
||||
return (double)fret; |
||||
return 0.0; |
||||
} |
||||
break; |
||||
case CV_CAP_PROP_CONTRAST: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return 0.0; |
||||
float fret = 0.0f; |
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_CONTRAST, &fret)) |
||||
return (double)fret; |
||||
return 0.0; |
||||
} |
||||
break; |
||||
case CV_CAP_PROP_SATURATION: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return 0.0; |
||||
float fret = 0.0f; |
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_SATURATION, &fret)) |
||||
return (double)fret; |
||||
return 0.0; |
||||
} |
||||
break; |
||||
case CV_CAP_PROP_HUE: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return 0.0; |
||||
float fret = 0.0f; |
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_HUE, &fret)) |
||||
return (double)fret; |
||||
return 0.0; |
||||
} |
||||
break; |
||||
case CV_CAP_PROP_GAMMA: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return 0.0; |
||||
float fret = 0.0f; |
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_GAMMA, &fret)) |
||||
return (double)fret; |
||||
return 0.0; |
||||
} |
||||
break; |
||||
case CV_CAP_PROP_SHARPNESS: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return 0.0; |
||||
float fret = 0.0f; |
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_SHARPNESS, &fret)) |
||||
return (double)fret; |
||||
return 0.0; |
||||
} |
||||
break; |
||||
case CV_CAP_PROP_GAIN: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return 0.0; |
||||
float fret = 0.0f; |
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_GAIN, &fret)) |
||||
return (double)fret; |
||||
return 0.0; |
||||
} |
||||
break; |
||||
case CV_CAP_PROP_BACKLIGHT: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return 0.0; |
||||
float fret = 0.0f; |
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_BACK_LIGHT_COMPENSATION, &fret)) |
||||
return (double)fret; |
||||
return 0.0; |
||||
} |
||||
break; |
||||
case CV_CAP_PROP_EXPOSURE: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return 0.0; |
||||
float fret = 0.0f; |
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_EXPOSURE, &fret)) |
||||
return (double)fret; |
||||
return 0.0; |
||||
} |
||||
break; |
||||
//Add image stream specific properties
|
||||
} |
||||
return CvIntelPerCStreamBase::getProperty(propIdx); |
||||
} |
||||
virtual bool setProperty(int propIdx, double propVal) |
||||
{ |
||||
switch (propIdx) |
||||
{ |
||||
case CV_CAP_PROP_BRIGHTNESS: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return false; |
||||
return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_BRIGHTNESS, (float)propVal)); |
||||
} |
||||
break; |
||||
case CV_CAP_PROP_CONTRAST: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return false; |
||||
return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_CONTRAST, (float)propVal)); |
||||
} |
||||
break; |
||||
case CV_CAP_PROP_SATURATION: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return false; |
||||
return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_SATURATION, (float)propVal)); |
||||
} |
||||
break; |
||||
case CV_CAP_PROP_HUE: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return false; |
||||
return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_HUE, (float)propVal)); |
||||
} |
||||
break; |
||||
case CV_CAP_PROP_GAMMA: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return false; |
||||
return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_GAMMA, (float)propVal)); |
||||
} |
||||
break; |
||||
case CV_CAP_PROP_SHARPNESS: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return false; |
||||
return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_SHARPNESS, (float)propVal)); |
||||
} |
||||
break; |
||||
case CV_CAP_PROP_GAIN: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return false; |
||||
return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_GAIN, (float)propVal)); |
||||
} |
||||
break; |
||||
case CV_CAP_PROP_BACKLIGHT: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return false; |
||||
return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_BACK_LIGHT_COMPENSATION, (float)propVal)); |
||||
} |
||||
break; |
||||
case CV_CAP_PROP_EXPOSURE: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return false; |
||||
return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_EXPOSURE, (float)propVal)); |
||||
} |
||||
break; |
||||
//Add image stream specific properties
|
||||
} |
||||
return CvIntelPerCStreamBase::setProperty(propIdx, propVal); |
||||
} |
||||
public: |
||||
IplImage* retrieveFrame() |
||||
{ |
||||
return m_frame.retrieveFrame(); |
||||
} |
||||
protected: |
||||
FrameInternal m_frame; |
||||
bool prepareIplImage(PXCImage *pxcImage) |
||||
{ |
||||
if (NULL == pxcImage) |
||||
return false; |
||||
PXCImage::ImageInfo info; |
||||
pxcImage->QueryInfo(&info); |
||||
|
||||
PXCImage::ImageData data; |
||||
pxcImage->AcquireAccess(PXCImage::ACCESS_READ, PXCImage::COLOR_FORMAT_RGB24, &data); |
||||
|
||||
if (PXCImage::SURFACE_TYPE_SYSTEM_MEMORY != data.type) |
||||
return false; |
||||
|
||||
cv::Mat temp(info.height, info.width, CV_8UC3, data.planes[0], data.pitches[0]); |
||||
temp.copyTo(m_frame.m_mat); |
||||
|
||||
pxcImage->ReleaseAccess(&data); |
||||
return true; |
||||
} |
||||
}; |
||||
|
||||
class CvIntelPerCStreamDepth |
||||
: public CvIntelPerCStreamBase |
||||
{ |
||||
public: |
||||
CvIntelPerCStreamDepth() |
||||
{ |
||||
} |
||||
virtual ~CvIntelPerCStreamDepth() |
||||
{ |
||||
} |
||||
|
||||
virtual bool initStream(PXCSession *session) |
||||
{ |
||||
if (!initDevice(session)) |
||||
return false; |
||||
initStreamImpl(PXCImage::IMAGE_TYPE_DEPTH); |
||||
if (!m_stream.IsValid()) |
||||
return false; |
||||
enumProfiles(); |
||||
return true; |
||||
} |
||||
virtual double getProperty(int propIdx) |
||||
{ |
||||
switch (propIdx) |
||||
{ |
||||
case CV_CAP_PROP_INTELPERC_DEPTH_LOW_CONFIDENCE_VALUE: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return 0.0; |
||||
float fret = 0.0f; |
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_DEPTH_LOW_CONFIDENCE_VALUE, &fret)) |
||||
return (double)fret; |
||||
return 0.0; |
||||
} |
||||
break; |
||||
case CV_CAP_PROP_INTELPERC_DEPTH_SATURATION_VALUE: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return 0.0; |
||||
float fret = 0.0f; |
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_DEPTH_SATURATION_VALUE, &fret)) |
||||
return (double)fret; |
||||
return 0.0; |
||||
} |
||||
break; |
||||
case CV_CAP_PROP_INTELPERC_DEPTH_CONFIDENCE_THRESHOLD: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return 0.0; |
||||
float fret = 0.0f; |
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_DEPTH_CONFIDENCE_THRESHOLD, &fret)) |
||||
return (double)fret; |
||||
return 0.0; |
||||
} |
||||
break; |
||||
case CV_CAP_PROP_INTELPERC_DEPTH_FOCAL_LENGTH_HORZ: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return 0.0f; |
||||
PXCPointF32 ptf; |
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryPropertyAsPoint(PXCCapture::Device::PROPERTY_DEPTH_FOCAL_LENGTH, &ptf)) |
||||
return (double)ptf.x; |
||||
return 0.0; |
||||
} |
||||
break; |
||||
case CV_CAP_PROP_INTELPERC_DEPTH_FOCAL_LENGTH_VERT: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return 0.0f; |
||||
PXCPointF32 ptf; |
||||
if (PXC_STATUS_NO_ERROR == m_device->QueryPropertyAsPoint(PXCCapture::Device::PROPERTY_DEPTH_FOCAL_LENGTH, &ptf)) |
||||
return (double)ptf.y; |
||||
return 0.0; |
||||
} |
||||
break; |
||||
//Add depth stream sepcific properties
|
||||
} |
||||
return CvIntelPerCStreamBase::getProperty(propIdx); |
||||
} |
||||
virtual bool setProperty(int propIdx, double propVal) |
||||
{ |
||||
switch (propIdx) |
||||
{ |
||||
case CV_CAP_PROP_INTELPERC_DEPTH_LOW_CONFIDENCE_VALUE: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return false; |
||||
return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_DEPTH_LOW_CONFIDENCE_VALUE, (float)propVal)); |
||||
} |
||||
break; |
||||
case CV_CAP_PROP_INTELPERC_DEPTH_SATURATION_VALUE: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return false; |
||||
return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_DEPTH_SATURATION_VALUE, (float)propVal)); |
||||
} |
||||
break; |
||||
case CV_CAP_PROP_INTELPERC_DEPTH_CONFIDENCE_THRESHOLD: |
||||
{ |
||||
if (!m_device.IsValid()) |
||||
return false; |
||||
return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_DEPTH_CONFIDENCE_THRESHOLD, (float)propVal)); |
||||
} |
||||
break; |
||||
//Add depth stream sepcific properties
|
||||
} |
||||
return CvIntelPerCStreamBase::setProperty(propIdx, propVal); |
||||
} |
||||
public: |
||||
IplImage* retrieveDepthFrame() |
||||
{ |
||||
return m_frameDepth.retrieveFrame(); |
||||
} |
||||
IplImage* retrieveIRFrame() |
||||
{ |
||||
return m_frameIR.retrieveFrame(); |
||||
} |
||||
IplImage* retrieveUVFrame() |
||||
{ |
||||
return m_frameUV.retrieveFrame(); |
||||
} |
||||
protected: |
||||
virtual bool validProfile(const PXCCapture::VideoStream::ProfileInfo& pinfo) |
||||
{ |
||||
return (PXCImage::COLOR_FORMAT_DEPTH == pinfo.imageInfo.format); |
||||
} |
||||
protected: |
||||
FrameInternal m_frameDepth; |
||||
FrameInternal m_frameIR; |
||||
FrameInternal m_frameUV; |
||||
|
||||
bool prepareIplImage(PXCImage *pxcImage) |
||||
{ |
||||
if (NULL == pxcImage) |
||||
return false; |
||||
PXCImage::ImageInfo info; |
||||
pxcImage->QueryInfo(&info); |
||||
|
||||
PXCImage::ImageData data; |
||||
pxcImage->AcquireAccess(PXCImage::ACCESS_READ, &data); |
||||
|
||||
if (PXCImage::SURFACE_TYPE_SYSTEM_MEMORY != data.type) |
||||
return false; |
||||
|
||||
if (PXCImage::COLOR_FORMAT_DEPTH != data.format) |
||||
return false; |
||||
|
||||
{ |
||||
cv::Mat temp(info.height, info.width, CV_16SC1, data.planes[0], data.pitches[0]); |
||||
temp.copyTo(m_frameDepth.m_mat); |
||||
} |
||||
{ |
||||
cv::Mat temp(info.height, info.width, CV_16SC1, data.planes[1], data.pitches[1]); |
||||
temp.copyTo(m_frameIR.m_mat); |
||||
} |
||||
{ |
||||
cv::Mat temp(info.height, info.width, CV_32FC2, data.planes[2], data.pitches[2]); |
||||
temp.copyTo(m_frameUV.m_mat); |
||||
} |
||||
|
||||
pxcImage->ReleaseAccess(&data); |
||||
return true; |
||||
} |
||||
}; |
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
class CvCapture_IntelPerC : public CvCapture |
||||
{ |
||||
public: |
||||
CvCapture_IntelPerC(int /*index*/) |
||||
: m_contextOpened(false) |
||||
{ |
||||
pxcStatus sts = PXCSession_Create(&m_session); |
||||
if (PXC_STATUS_NO_ERROR > sts) |
||||
return; |
||||
m_contextOpened = m_imageStream.initStream(m_session); |
||||
m_contextOpened &= m_depthStream.initStream(m_session); |
||||
} |
||||
virtual ~CvCapture_IntelPerC(){} |
||||
|
||||
virtual double getProperty(int propIdx) |
||||
{ |
||||
double propValue = 0; |
||||
int purePropIdx = propIdx & ~CV_CAP_INTELPERC_GENERATORS_MASK; |
||||
if (CV_CAP_INTELPERC_IMAGE_GENERATOR == (propIdx & CV_CAP_INTELPERC_GENERATORS_MASK)) |
||||
{ |
||||
propValue = m_imageStream.getProperty(purePropIdx); |
||||
} |
||||
else if (CV_CAP_INTELPERC_DEPTH_GENERATOR == (propIdx & CV_CAP_INTELPERC_GENERATORS_MASK)) |
||||
{ |
||||
propValue = m_depthStream.getProperty(purePropIdx); |
||||
} |
||||
else |
||||
{ |
||||
propValue = m_depthStream.getProperty(purePropIdx); |
||||
} |
||||
return propValue; |
||||
} |
||||
virtual bool setProperty(int propIdx, double propVal) |
||||
{ |
||||
bool isSet = false; |
||||
int purePropIdx = propIdx & ~CV_CAP_INTELPERC_GENERATORS_MASK; |
||||
if (CV_CAP_INTELPERC_IMAGE_GENERATOR == (propIdx & CV_CAP_INTELPERC_GENERATORS_MASK)) |
||||
{ |
||||
isSet = m_imageStream.setProperty(purePropIdx, propVal); |
||||
} |
||||
else if (CV_CAP_INTELPERC_DEPTH_GENERATOR == (propIdx & CV_CAP_INTELPERC_GENERATORS_MASK)) |
||||
{ |
||||
isSet = m_depthStream.setProperty(purePropIdx, propVal); |
||||
} |
||||
else |
||||
{ |
||||
isSet = m_depthStream.setProperty(purePropIdx, propVal); |
||||
} |
||||
return isSet; |
||||
} |
||||
|
||||
bool grabFrame() |
||||
{ |
||||
if (!isOpened()) |
||||
return false; |
||||
|
||||
bool isGrabbed = false; |
||||
if (m_depthStream.isValid()) |
||||
isGrabbed = m_depthStream.grabFrame(); |
||||
if ((m_imageStream.isValid()) && (-1 != m_imageStream.getProfileIDX())) |
||||
isGrabbed &= m_imageStream.grabFrame(); |
||||
|
||||
return isGrabbed; |
||||
} |
||||
|
||||
virtual IplImage* retrieveFrame(int outputType) |
||||
{ |
||||
IplImage* image = 0; |
||||
switch (outputType) |
||||
{ |
||||
case CV_CAP_INTELPERC_DEPTH_MAP: |
||||
image = m_depthStream.retrieveDepthFrame(); |
||||
break; |
||||
case CV_CAP_INTELPERC_UVDEPTH_MAP: |
||||
image = m_depthStream.retrieveUVFrame(); |
||||
break; |
||||
case CV_CAP_INTELPERC_IR_MAP: |
||||
image = m_depthStream.retrieveIRFrame(); |
||||
break; |
||||
case CV_CAP_INTELPERC_IMAGE: |
||||
image = m_imageStream.retrieveFrame(); |
||||
break; |
||||
} |
||||
CV_Assert(NULL != image); |
||||
return image; |
||||
} |
||||
|
||||
bool isOpened() const |
||||
{ |
||||
return m_contextOpened; |
||||
} |
||||
protected: |
||||
bool m_contextOpened; |
||||
|
||||
PXCSmartPtr<PXCSession> m_session; |
||||
CvIntelPerCStreamImage m_imageStream; |
||||
CvIntelPerCStreamDepth m_depthStream; |
||||
}; |
||||
|
||||
|
||||
CvCapture* cvCreateCameraCapture_IntelPerC(int index) |
||||
{ |
||||
CvCapture_IntelPerC* capture = new CvCapture_IntelPerC(index); |
||||
|
||||
if( capture->isOpened() ) |
||||
return capture; |
||||
|
||||
delete capture; |
||||
return 0; |
||||
} |
||||
|
||||
|
||||
#endif //HAVE_INTELPERC
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,185 @@ |
||||
/*M/////////////////////////////////////////////////////////////////////////////////////// |
||||
// |
||||
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. |
||||
// |
||||
// By downloading, copying, installing or using the software you agree to this license. |
||||
// If you do not agree to this license, do not download, install, |
||||
// copy or use the software. |
||||
// |
||||
// |
||||
// License Agreement |
||||
// For Open Source Computer Vision Library |
||||
// |
||||
// Copyright (C) 2013, Intel Corporation, all rights reserved. |
||||
// Third party copyrights are property of their respective owners. |
||||
// |
||||
// Redistribution and use in source and binary forms, with or without modification, |
||||
// are permitted provided that the following conditions are met: |
||||
// |
||||
// * Redistribution's of source code must retain the above copyright notice, |
||||
// this list of conditions and the following disclaimer. |
||||
// |
||||
// * Redistribution's in binary form must reproduce the above copyright notice, |
||||
// this list of conditions and the following disclaimer in the documentation |
||||
// and/or other materials provided with the distribution. |
||||
// |
||||
// * The name of the copyright holders may not be used to endorse or promote products |
||||
// derived from this software without specific prior written permission. |
||||
// |
||||
// This software is provided by the copyright holders and contributors "as is" and |
||||
// any express or implied warranties, including, but not limited to, the implied |
||||
// warranties of merchantability and fitness for a particular purpose are disclaimed. |
||||
// In no event shall the Intel Corporation or contributors be liable for any direct, |
||||
// indirect, incidental, special, exemplary, or consequential damages |
||||
// (including, but not limited to, procurement of substitute goods or services; |
||||
// loss of use, data, or profits; or business interruption) however caused |
||||
// and on any theory of liability, whether in contract, strict liability, |
||||
// or tort (including negligence or otherwise) arising in any way out of |
||||
// the use of this software, even if advised of the possibility of such damage. |
||||
// |
||||
//M*/ |
||||
/////////////////////////////////////////////////////////////////////////////////////////////////// |
||||
/////////////////////////////////Macro for border type//////////////////////////////////////////// |
||||
///////////////////////////////////////////////////////////////////////////////////////////////// |
||||
|
||||
#ifdef BORDER_CONSTANT |
||||
//CCCCCC|abcdefgh|CCCCCCC |
||||
#define EXTRAPOLATE(x, maxV) |
||||
#elif defined BORDER_REPLICATE |
||||
//aaaaaa|abcdefgh|hhhhhhh |
||||
#define EXTRAPOLATE(x, maxV) \ |
||||
{ \ |
||||
(x) = max(min((x), (maxV) - 1), 0); \ |
||||
} |
||||
#elif defined BORDER_WRAP |
||||
//cdefgh|abcdefgh|abcdefg |
||||
#define EXTRAPOLATE(x, maxV) \ |
||||
{ \ |
||||
(x) = ( (x) + (maxV) ) % (maxV); \ |
||||
} |
||||
#elif defined BORDER_REFLECT |
||||
//fedcba|abcdefgh|hgfedcb |
||||
#define EXTRAPOLATE(x, maxV) \ |
||||
{ \ |
||||
(x) = min(((maxV)-1)*2-(x)+1, max((x),-(x)-1) ); \ |
||||
} |
||||
#elif defined BORDER_REFLECT_101 |
||||
//gfedcb|abcdefgh|gfedcba |
||||
#define EXTRAPOLATE(x, maxV) \ |
||||
{ \ |
||||
(x) = min(((maxV)-1)*2-(x), max((x),-(x)) ); \ |
||||
} |
||||
#else |
||||
#error No extrapolation method |
||||
#endif |
||||
|
||||
#define SRC(_x,_y) CONVERT_SRCTYPE(((global SRCTYPE*)(Src+(_y)*SrcPitch))[_x]) |
||||
|
||||
#ifdef BORDER_CONSTANT |
||||
//CCCCCC|abcdefgh|CCCCCCC |
||||
#define ELEM(_x,_y,r_edge,t_edge,const_v) (_x)<0 | (_x) >= (r_edge) | (_y)<0 | (_y) >= (t_edge) ? (const_v) : SRC((_x),(_y)) |
||||
#else |
||||
#define ELEM(_x,_y,r_edge,t_edge,const_v) SRC((_x),(_y)) |
||||
#endif |
||||
|
||||
#define DST(_x,_y) (((global DSTTYPE*)(Dst+DstOffset+(_y)*DstPitch))[_x]) |
||||
|
||||
//horizontal and vertical filter kernels |
||||
//should be defined on host during compile time to avoid overhead |
||||
__constant uint mat_kernelX[] = {KERNEL_MATRIX_X}; |
||||
__constant uint mat_kernelY[] = {KERNEL_MATRIX_Y}; |
||||
|
||||
__kernel __attribute__((reqd_work_group_size(BLK_X,BLK_Y,1))) void sep_filter_singlepass |
||||
( |
||||
__global uchar* Src, |
||||
const uint SrcPitch, |
||||
const int srcOffsetX, |
||||
const int srcOffsetY, |
||||
__global uchar* Dst, |
||||
const int DstOffset, |
||||
const uint DstPitch, |
||||
int width, |
||||
int height, |
||||
int dstWidth, |
||||
int dstHeight |
||||
) |
||||
{ |
||||
//RADIUSX, RADIUSY are filter dimensions |
||||
//BLK_X, BLK_Y are local wrogroup sizes |
||||
//all these should be defined on host during compile time |
||||
//first lsmem array for source pixels used in first pass, |
||||
//second lsmemDy for storing first pass results |
||||
__local WORKTYPE lsmem[BLK_Y+2*RADIUSY][BLK_X+2*RADIUSX]; |
||||
__local WORKTYPE lsmemDy[BLK_Y][BLK_X+2*RADIUSX]; |
||||
|
||||
//get local and global ids - used as image and local memory array indexes |
||||
int lix = get_local_id(0); |
||||
int liy = get_local_id(1); |
||||
|
||||
int x = (int)get_global_id(0); |
||||
int y = (int)get_global_id(1); |
||||
|
||||
//calculate pixel position in source image taking image offset into account |
||||
int srcX = x + srcOffsetX - RADIUSX; |
||||
int srcY = y + srcOffsetY - RADIUSY; |
||||
int xb = srcX; |
||||
int yb = srcY; |
||||
|
||||
//extrapolate coordinates, if needed |
||||
//and read my own source pixel into local memory |
||||
//with account for extra border pixels, which will be read by starting workitems |
||||
int clocY = liy; |
||||
int cSrcY = srcY; |
||||
do |
||||
{ |
||||
int yb = cSrcY; |
||||
EXTRAPOLATE(yb, (height)); |
||||
|
||||
int clocX = lix; |
||||
int cSrcX = srcX; |
||||
do |
||||
{ |
||||
int xb = cSrcX; |
||||
EXTRAPOLATE(xb,(width)); |
||||
lsmem[clocY][clocX] = ELEM(xb, yb, (width), (height), 0 ); |
||||
|
||||
clocX += BLK_X; |
||||
cSrcX += BLK_X; |
||||
} |
||||
while(clocX < BLK_X+(RADIUSX*2)); |
||||
|
||||
clocY += BLK_Y; |
||||
cSrcY += BLK_Y; |
||||
} |
||||
while(clocY < BLK_Y+(RADIUSY*2)); |
||||
barrier(CLK_LOCAL_MEM_FENCE); |
||||
|
||||
//do vertical filter pass |
||||
//and store intermediate results to second local memory array |
||||
int i; |
||||
WORKTYPE sum = 0.0f; |
||||
int clocX = lix; |
||||
do |
||||
{ |
||||
sum = 0.0f; |
||||
for(i=0; i<=2*RADIUSY; i++) |
||||
sum = mad(lsmem[liy+i][clocX], as_float(mat_kernelY[i]), sum); |
||||
lsmemDy[liy][clocX] = sum; |
||||
clocX += BLK_X; |
||||
} |
||||
while(clocX < BLK_X+(RADIUSX*2)); |
||||
barrier(CLK_LOCAL_MEM_FENCE); |
||||
|
||||
//if this pixel happened to be out of image borders because of global size rounding, |
||||
//then just return |
||||
if( x >= dstWidth || y >=dstHeight ) return; |
||||
|
||||
//do second horizontal filter pass |
||||
//and calculate final result |
||||
sum = 0.0f; |
||||
for(i=0; i<=2*RADIUSX; i++) |
||||
sum = mad(lsmemDy[liy][lix+i], as_float(mat_kernelX[i]), sum); |
||||
|
||||
//store result into destination image |
||||
DST(x,y) = CONVERT_DSTTYPE(sum); |
||||
} |
@ -0,0 +1,376 @@ |
||||
// testOpenCVCam.cpp : Defines the entry point for the console application.
|
||||
//
|
||||
|
||||
#include "opencv2/highgui/highgui.hpp" |
||||
|
||||
#include <iostream> |
||||
|
||||
using namespace cv; |
||||
using namespace std; |
||||
|
||||
static bool g_printStreamSetting = false; |
||||
static int g_imageStreamProfileIdx = -1; |
||||
static int g_depthStreamProfileIdx = -1; |
||||
static bool g_irStreamShow = false; |
||||
static double g_imageBrightness = -DBL_MAX; |
||||
static double g_imageContrast = -DBL_MAX; |
||||
static bool g_printTiming = false; |
||||
static bool g_showClosedPoint = false; |
||||
|
||||
|
||||
static int g_closedDepthPoint[2]; |
||||
|
||||
static void printUsage(const char *arg0) |
||||
{ |
||||
const char *filename = arg0; |
||||
while (*filename) |
||||
filename++; |
||||
while ((arg0 <= filename) && ('\\' != *filename) && ('/' != *filename)) |
||||
filename--; |
||||
filename++; |
||||
|
||||
cout << "This program demonstrates usage of camera supported\nby Intel Perceptual computing SDK." << endl << endl; |
||||
cout << "usage: " << filename << "[-ps] [-isp IDX] [-dsp IDX]\n [-ir] [-imb VAL] [-imc VAL]" << endl << endl; |
||||
cout << " -ps, print streams setting and profiles" << endl; |
||||
cout << " -isp IDX, set profile index of the image stream" << endl; |
||||
cout << " -dsp IDX, set profile index of the depth stream" << endl; |
||||
cout << " -ir, show data from IR stream" << endl; |
||||
cout << " -imb VAL, set brighness value for a image stream" << endl; |
||||
cout << " -imc VAL, set contrast value for a image stream" << endl; |
||||
cout << " -pts, print frame index and frame time" << endl; |
||||
cout << " --show-closed, print frame index and frame time" << endl; |
||||
cout << endl; |
||||
} |
||||
|
||||
static void parseCMDLine(int argc, char* argv[]) |
||||
{ |
||||
if( argc == 1 ) |
||||
{ |
||||
printUsage(argv[0]); |
||||
} |
||||
else |
||||
{ |
||||
for( int i = 1; i < argc; i++ ) |
||||
{ |
||||
if ((0 == strcmp(argv[i], "--help")) || (0 == strcmp( argv[i], "-h"))) |
||||
{ |
||||
printUsage(argv[0]); |
||||
exit(0); |
||||
} |
||||
else if ((0 == strcmp( argv[i], "--print-streams")) || (0 == strcmp( argv[i], "-ps"))) |
||||
{ |
||||
g_printStreamSetting = true; |
||||
} |
||||
else if ((0 == strcmp( argv[i], "--image-stream-prof")) || (0 == strcmp( argv[i], "-isp"))) |
||||
{ |
||||
g_imageStreamProfileIdx = atoi(argv[++i]); |
||||
} |
||||
else if ((0 == strcmp( argv[i], "--depth-stream-prof")) || (0 == strcmp( argv[i], "-dsp"))) |
||||
{ |
||||
g_depthStreamProfileIdx = atoi(argv[++i]); |
||||
} |
||||
else if (0 == strcmp( argv[i], "-ir")) |
||||
{ |
||||
g_irStreamShow = true; |
||||
} |
||||
else if (0 == strcmp( argv[i], "-imb")) |
||||
{ |
||||
g_imageBrightness = atof(argv[++i]); |
||||
} |
||||
else if (0 == strcmp( argv[i], "-imc")) |
||||
{ |
||||
g_imageContrast = atof(argv[++i]); |
||||
} |
||||
else if (0 == strcmp(argv[i], "-pts")) |
||||
{ |
||||
g_printTiming = true; |
||||
} |
||||
else if (0 == strcmp(argv[i], "--show-closed")) |
||||
{ |
||||
g_showClosedPoint = true; |
||||
} |
||||
else |
||||
{ |
||||
cout << "Unsupported command line argument: " << argv[i] << "." << endl; |
||||
exit(-1); |
||||
} |
||||
} |
||||
if (g_showClosedPoint && (-1 == g_depthStreamProfileIdx)) |
||||
{ |
||||
cerr << "For --show-closed depth profile has be selected" << endl; |
||||
exit(-1); |
||||
} |
||||
} |
||||
} |
||||
|
||||
static void printStreamProperties(VideoCapture &capture) |
||||
{ |
||||
size_t profilesCount = (size_t)capture.get(CAP_INTELPERC_IMAGE_GENERATOR | CAP_PROP_INTELPERC_PROFILE_COUNT); |
||||
cout << "Image stream." << endl; |
||||
cout << " Brightness = " << capture.get(CAP_INTELPERC_IMAGE_GENERATOR | CAP_PROP_BRIGHTNESS) << endl; |
||||
cout << " Contrast = " << capture.get(CAP_INTELPERC_IMAGE_GENERATOR | CAP_PROP_CONTRAST) << endl; |
||||
cout << " Saturation = " << capture.get(CAP_INTELPERC_IMAGE_GENERATOR | CAP_PROP_SATURATION) << endl; |
||||
cout << " Hue = " << capture.get(CAP_INTELPERC_IMAGE_GENERATOR | CAP_PROP_HUE) << endl; |
||||
cout << " Gamma = " << capture.get(CAP_INTELPERC_IMAGE_GENERATOR | CAP_PROP_GAMMA) << endl; |
||||
cout << " Sharpness = " << capture.get(CAP_INTELPERC_IMAGE_GENERATOR | CAP_PROP_SHARPNESS) << endl; |
||||
cout << " Gain = " << capture.get(CAP_INTELPERC_IMAGE_GENERATOR | CAP_PROP_GAIN) << endl; |
||||
cout << " Backligh = " << capture.get(CAP_INTELPERC_IMAGE_GENERATOR | CAP_PROP_BACKLIGHT) << endl; |
||||
cout << "Image streams profiles:" << endl; |
||||
for (size_t i = 0; i < profilesCount; i++) |
||||
{ |
||||
capture.set(CAP_INTELPERC_IMAGE_GENERATOR | CAP_PROP_INTELPERC_PROFILE_IDX, (double)i); |
||||
cout << " Profile[" << i << "]: "; |
||||
cout << "width = " << |
||||
(int)capture.get(CAP_INTELPERC_IMAGE_GENERATOR | CAP_PROP_FRAME_WIDTH); |
||||
cout << ", height = " << |
||||
(int)capture.get(CAP_INTELPERC_IMAGE_GENERATOR | CAP_PROP_FRAME_HEIGHT); |
||||
cout << ", fps = " << |
||||
capture.get(CAP_INTELPERC_IMAGE_GENERATOR | CAP_PROP_FPS); |
||||
cout << endl; |
||||
} |
||||
|
||||
profilesCount = (size_t)capture.get(CAP_INTELPERC_DEPTH_GENERATOR | CAP_PROP_INTELPERC_PROFILE_COUNT); |
||||
cout << "Depth stream." << endl; |
||||
cout << " Low confidence value = " << capture.get(CAP_INTELPERC_DEPTH_GENERATOR | CAP_PROP_INTELPERC_DEPTH_LOW_CONFIDENCE_VALUE) << endl; |
||||
cout << " Saturation value = " << capture.get(CAP_INTELPERC_DEPTH_GENERATOR | CAP_PROP_INTELPERC_DEPTH_SATURATION_VALUE) << endl; |
||||
cout << " Confidence threshold = " << capture.get(CAP_INTELPERC_DEPTH_GENERATOR | CAP_PROP_INTELPERC_DEPTH_CONFIDENCE_THRESHOLD) << endl; |
||||
cout << " Focal length = (" << capture.get(CAP_INTELPERC_DEPTH_GENERATOR | CAP_PROP_INTELPERC_DEPTH_FOCAL_LENGTH_HORZ) << ", " |
||||
<< capture.get(CAP_INTELPERC_DEPTH_GENERATOR | CAP_PROP_INTELPERC_DEPTH_FOCAL_LENGTH_VERT) << ")" << endl; |
||||
cout << "Depth streams profiles:" << endl; |
||||
for (size_t i = 0; i < profilesCount; i++) |
||||
{ |
||||
capture.set(CAP_INTELPERC_DEPTH_GENERATOR | CAP_PROP_INTELPERC_PROFILE_IDX, (double)i); |
||||
cout << " Profile[" << i << "]: "; |
||||
cout << "width = " << |
||||
(int)capture.get(CAP_INTELPERC_DEPTH_GENERATOR | CAP_PROP_FRAME_WIDTH); |
||||
cout << ", height = " << |
||||
(int)capture.get(CAP_INTELPERC_DEPTH_GENERATOR | CAP_PROP_FRAME_HEIGHT); |
||||
cout << ", fps = " << |
||||
capture.get(CAP_INTELPERC_DEPTH_GENERATOR | CAP_PROP_FPS); |
||||
cout << endl; |
||||
} |
||||
} |
||||
|
||||
static void imshowImage(const char *winname, Mat &image, VideoCapture &capture) |
||||
{ |
||||
if (g_showClosedPoint) |
||||
{ |
||||
Mat uvMap; |
||||
if (capture.retrieve(uvMap, CAP_INTELPERC_UVDEPTH_MAP)) |
||||
{ |
||||
float *uvmap = (float *)uvMap.ptr() + 2 * (g_closedDepthPoint[0] * uvMap.cols + g_closedDepthPoint[1]); |
||||
int x = (int)((*uvmap) * image.cols); uvmap++; |
||||
int y = (int)((*uvmap) * image.rows); |
||||
|
||||
if ((0 <= x) && (0 <= y)) |
||||
{ |
||||
static const int pointSize = 4; |
||||
for (int row = y; row < min(y + pointSize, image.rows); row++) |
||||
{ |
||||
uchar* ptrDst = image.ptr(row) + x * 3 + 2;//+2 -> Red
|
||||
for (int col = 0; col < min(pointSize, image.cols - x); col++, ptrDst+=3) |
||||
{ |
||||
*ptrDst = 255; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
} |
||||
imshow(winname, image); |
||||
} |
||||
static void imshowIR(const char *winname, Mat &ir) |
||||
{ |
||||
Mat image; |
||||
if (g_showClosedPoint) |
||||
{ |
||||
image.create(ir.rows, ir.cols, CV_8UC3); |
||||
for (int row = 0; row < ir.rows; row++) |
||||
{ |
||||
uchar* ptrDst = image.ptr(row); |
||||
short* ptrSrc = (short*)ir.ptr(row); |
||||
for (int col = 0; col < ir.cols; col++, ptrSrc++) |
||||
{ |
||||
uchar val = (uchar) ((*ptrSrc) >> 2); |
||||
*ptrDst = val; ptrDst++; |
||||
*ptrDst = val; ptrDst++; |
||||
*ptrDst = val; ptrDst++; |
||||
} |
||||
} |
||||
|
||||
static const int pointSize = 4; |
||||
for (int row = g_closedDepthPoint[0]; row < min(g_closedDepthPoint[0] + pointSize, image.rows); row++) |
||||
{ |
||||
uchar* ptrDst = image.ptr(row) + g_closedDepthPoint[1] * 3 + 2;//+2 -> Red
|
||||
for (int col = 0; col < min(pointSize, image.cols - g_closedDepthPoint[1]); col++, ptrDst+=3) |
||||
{ |
||||
*ptrDst = 255; |
||||
} |
||||
} |
||||
} |
||||
else |
||||
{ |
||||
image.create(ir.rows, ir.cols, CV_8UC1); |
||||
for (int row = 0; row < ir.rows; row++) |
||||
{ |
||||
uchar* ptrDst = image.ptr(row); |
||||
short* ptrSrc = (short*)ir.ptr(row); |
||||
for (int col = 0; col < ir.cols; col++, ptrSrc++, ptrDst++) |
||||
{ |
||||
*ptrDst = (uchar) ((*ptrSrc) >> 2); |
||||
} |
||||
} |
||||
} |
||||
|
||||
imshow(winname, image); |
||||
} |
||||
static void imshowDepth(const char *winname, Mat &depth, VideoCapture &capture) |
||||
{ |
||||
short lowValue = (short)capture.get(CAP_INTELPERC_DEPTH_GENERATOR | CAP_PROP_INTELPERC_DEPTH_LOW_CONFIDENCE_VALUE); |
||||
short saturationValue = (short)capture.get(CAP_INTELPERC_DEPTH_GENERATOR | CAP_PROP_INTELPERC_DEPTH_SATURATION_VALUE); |
||||
|
||||
Mat image; |
||||
if (g_showClosedPoint) |
||||
{ |
||||
image.create(depth.rows, depth.cols, CV_8UC3); |
||||
for (int row = 0; row < depth.rows; row++) |
||||
{ |
||||
uchar* ptrDst = image.ptr(row); |
||||
short* ptrSrc = (short*)depth.ptr(row); |
||||
for (int col = 0; col < depth.cols; col++, ptrSrc++) |
||||
{ |
||||
if ((lowValue == (*ptrSrc)) || (saturationValue == (*ptrSrc))) |
||||
{ |
||||
*ptrDst = 0; ptrDst++; |
||||
*ptrDst = 0; ptrDst++; |
||||
*ptrDst = 0; ptrDst++; |
||||
} |
||||
else |
||||
{ |
||||
uchar val = (uchar) ((*ptrSrc) >> 2); |
||||
*ptrDst = val; ptrDst++; |
||||
*ptrDst = val; ptrDst++; |
||||
*ptrDst = val; ptrDst++; |
||||
} |
||||
} |
||||
} |
||||
|
||||
static const int pointSize = 4; |
||||
for (int row = g_closedDepthPoint[0]; row < min(g_closedDepthPoint[0] + pointSize, image.rows); row++) |
||||
{ |
||||
uchar* ptrDst = image.ptr(row) + g_closedDepthPoint[1] * 3 + 2;//+2 -> Red
|
||||
for (int col = 0; col < min(pointSize, image.cols - g_closedDepthPoint[1]); col++, ptrDst+=3) |
||||
{ |
||||
*ptrDst = 255; |
||||
} |
||||
} |
||||
} |
||||
else |
||||
{ |
||||
image.create(depth.rows, depth.cols, CV_8UC1); |
||||
for (int row = 0; row < depth.rows; row++) |
||||
{ |
||||
uchar* ptrDst = image.ptr(row); |
||||
short* ptrSrc = (short*)depth.ptr(row); |
||||
for (int col = 0; col < depth.cols; col++, ptrSrc++, ptrDst++) |
||||
{ |
||||
if ((lowValue == (*ptrSrc)) || (saturationValue == (*ptrSrc))) |
||||
*ptrDst = 0; |
||||
else |
||||
*ptrDst = (uchar) ((*ptrSrc) >> 2); |
||||
} |
||||
} |
||||
} |
||||
imshow(winname, image); |
||||
} |
||||
|
||||
int main(int argc, char* argv[]) |
||||
{ |
||||
parseCMDLine(argc, argv); |
||||
|
||||
VideoCapture capture; |
||||
capture.open(CAP_INTELPERC); |
||||
if (!capture.isOpened()) |
||||
{ |
||||
cerr << "Can not open a capture object." << endl; |
||||
return -1; |
||||
} |
||||
|
||||
if (g_printStreamSetting) |
||||
printStreamProperties(capture); |
||||
|
||||
if (-1 != g_imageStreamProfileIdx) |
||||
{ |
||||
if (!capture.set(CAP_INTELPERC_IMAGE_GENERATOR | CAP_PROP_INTELPERC_PROFILE_IDX, (double)g_imageStreamProfileIdx)) |
||||
{ |
||||
cerr << "Can not setup a image stream." << endl; |
||||
return -1; |
||||
} |
||||
} |
||||
if (-1 != g_depthStreamProfileIdx) |
||||
{ |
||||
if (!capture.set(CAP_INTELPERC_DEPTH_GENERATOR | CAP_PROP_INTELPERC_PROFILE_IDX, (double)g_depthStreamProfileIdx)) |
||||
{ |
||||
cerr << "Can not setup a depth stream." << endl; |
||||
return -1; |
||||
} |
||||
} |
||||
else if (g_irStreamShow) |
||||
{ |
||||
if (!capture.set(CAP_INTELPERC_DEPTH_GENERATOR | CAP_PROP_INTELPERC_PROFILE_IDX, 0.0)) |
||||
{ |
||||
cerr << "Can not setup a IR stream." << endl; |
||||
return -1; |
||||
} |
||||
} |
||||
else |
||||
{ |
||||
cout << "Streams not selected" << endl; |
||||
return 0; |
||||
} |
||||
|
||||
//Setup additional properies only after set profile of the stream
|
||||
if ( (-10000.0 < g_imageBrightness) && (g_imageBrightness < 10000.0)) |
||||
capture.set(CAP_INTELPERC_IMAGE_GENERATOR | CAP_PROP_BRIGHTNESS, g_imageBrightness); |
||||
if ( (0 < g_imageContrast) && (g_imageContrast < 10000.0)) |
||||
capture.set(CAP_INTELPERC_IMAGE_GENERATOR | CAP_PROP_BRIGHTNESS, g_imageContrast); |
||||
|
||||
int frame = 0; |
||||
for(;;frame++) |
||||
{ |
||||
Mat bgrImage; |
||||
Mat depthImage; |
||||
Mat irImage; |
||||
|
||||
if (!capture.grab()) |
||||
{ |
||||
cout << "Can not grab images." << endl; |
||||
return -1; |
||||
} |
||||
|
||||
if ((-1 != g_depthStreamProfileIdx) && (capture.retrieve(depthImage, CAP_INTELPERC_DEPTH_MAP))) |
||||
{ |
||||
if (g_showClosedPoint) |
||||
{ |
||||
double minVal = 0.0; double maxVal = 0.0; |
||||
minMaxIdx(depthImage, &minVal, &maxVal, g_closedDepthPoint); |
||||
} |
||||
imshowDepth("depth image", depthImage, capture); |
||||
} |
||||
if ((g_irStreamShow) && (capture.retrieve(irImage, CAP_INTELPERC_IR_MAP))) |
||||
imshowIR("ir image", irImage); |
||||
if ((-1 != g_imageStreamProfileIdx) && (capture.retrieve(bgrImage, CAP_INTELPERC_IMAGE))) |
||||
imshowImage("color image", bgrImage, capture); |
||||
|
||||
if (g_printTiming) |
||||
{ |
||||
cout << "Image frame: " << capture.get(CAP_INTELPERC_IMAGE_GENERATOR | CAP_PROP_POS_FRAMES) |
||||
<< ", Depth(IR) frame: " << capture.get(CAP_INTELPERC_DEPTH_GENERATOR | CAP_PROP_POS_FRAMES) << endl; |
||||
cout << "Image frame: " << capture.get(CAP_INTELPERC_IMAGE_GENERATOR | CAP_PROP_POS_MSEC) |
||||
<< ", Depth(IR) frame: " << capture.get(CAP_INTELPERC_DEPTH_GENERATOR | CAP_PROP_POS_MSEC) << endl; |
||||
} |
||||
if( waitKey(30) >= 0 ) |
||||
break; |
||||
} |
||||
|
||||
return 0; |
||||
} |
Loading…
Reference in new issue