videoio: add support for obsensor (Orbbec RGB-D Camera ) (#22196)

* videoio: add support for obsensor (Orbbec RGB-D Camera )

* obsensor: code format issues fixed and some code optimized

* obsensor: fix typo and format issues

* obsensor: fix crosses initialization error
pull/22313/head
hzc 2 years ago committed by GitHub
parent ebaf8cc06c
commit fc3e393516
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 3
      CMakeLists.txt
  2. 24
      modules/videoio/CMakeLists.txt
  3. 18
      modules/videoio/cmake/detect_obsensor.cmake
  4. 1
      modules/videoio/cmake/init.cmake
  5. 30
      modules/videoio/include/opencv2/videoio.hpp
  6. 2
      modules/videoio/src/cap_interface.hpp
  7. 103
      modules/videoio/src/cap_obsensor/obsensor_stream_channel_interface.hpp
  8. 505
      modules/videoio/src/cap_obsensor/obsensor_stream_channel_msmf.cpp
  9. 180
      modules/videoio/src/cap_obsensor/obsensor_stream_channel_msmf.hpp
  10. 379
      modules/videoio/src/cap_obsensor/obsensor_stream_channel_v4l2.cpp
  11. 90
      modules/videoio/src/cap_obsensor/obsensor_stream_channel_v4l2.hpp
  12. 265
      modules/videoio/src/cap_obsensor/obsensor_uvc_stream_channel.cpp
  13. 96
      modules/videoio/src/cap_obsensor/obsensor_uvc_stream_channel.hpp
  14. 150
      modules/videoio/src/cap_obsensor_capture.cpp
  15. 66
      modules/videoio/src/cap_obsensor_capture.hpp
  16. 7
      modules/videoio/src/videoio_registry.cpp
  17. 74
      samples/cpp/videocapture_obsensor.cpp

@ -459,6 +459,9 @@ OCV_OPTION(WITH_ONNX "Include Microsoft ONNX Runtime support" OFF
OCV_OPTION(WITH_TIMVX "Include Tim-VX support" OFF
VISIBLE_IF TRUE
VERIFY HAVE_TIMVX)
OCV_OPTION(WITH_OBSENSOR "Include obsensor support (Orbbec RGB-D modules: Astra+/Femto)" ON
VISIBLE_IF (WIN32 AND NOT ARM AND NOT WINRT) OR ( UNIX AND NOT APPLE AND NOT ANDROID)
VERIFY HAVE_OBSENSOR)
# OpenCV build components
# ===================================================

@ -235,6 +235,30 @@ if(TARGET ocv.3rdparty.android_native_camera)
list(APPEND tgts ocv.3rdparty.android_native_camera)
endif()
if(TARGET ocv.3rdparty.obsensor)
list(APPEND videoio_srcs
${CMAKE_CURRENT_LIST_DIR}/src/cap_obsensor_capture.cpp)
list(APPEND videoio_hdrs
${CMAKE_CURRENT_LIST_DIR}/src/cap_obsensor_capture.hpp
${CMAKE_CURRENT_LIST_DIR}/src/cap_obsensor/obsensor_stream_channel_interface.hpp)
if(HAVE_OBSENSOR_MSMF)
list(APPEND videoio_srcs
${CMAKE_CURRENT_LIST_DIR}/src/cap_obsensor/obsensor_uvc_stream_channel.cpp
${CMAKE_CURRENT_LIST_DIR}/src/cap_obsensor/obsensor_stream_channel_msmf.cpp)
list(APPEND videoio_hdrs
${CMAKE_CURRENT_LIST_DIR}/src/cap_obsensor/obsensor_uvc_stream_channel.hpp
${CMAKE_CURRENT_LIST_DIR}/src/cap_obsensor/obsensor_stream_channel_msmf.hpp)
elseif(HAVE_OBSENSOR_V4L2)
list(APPEND videoio_srcs
${CMAKE_CURRENT_LIST_DIR}/src/cap_obsensor/obsensor_uvc_stream_channel.cpp
${CMAKE_CURRENT_LIST_DIR}/src/cap_obsensor/obsensor_stream_channel_v4l2.cpp)
list(APPEND videoio_hdrs
${CMAKE_CURRENT_LIST_DIR}/src/cap_obsensor/obsensor_uvc_stream_channel.hpp
${CMAKE_CURRENT_LIST_DIR}/src/cap_obsensor/obsensor_stream_channel_v4l2.hpp)
endif()
list(APPEND tgts ocv.3rdparty.obsensor)
endif()
if(tgts STREQUAL "PRIVATE")
set(tgts "")
endif()

@ -0,0 +1,18 @@
# --- obsensor ---
if(NOT HAVE_OBSENSOR)
if(WIN32)
check_include_file(mfapi.h HAVE_MFAPI)
if(HAVE_MFAPI)
set(HAVE_OBSENSOR TRUE)
set(HAVE_OBSENSOR_MSMF TRUE)
ocv_add_external_target(obsensor "" "" "HAVE_OBSENSOR;HAVE_OBSENSOR_MSMF")
endif()
elseif(UNIX)
check_include_file(linux/videodev2.h HAVE_CAMV4L2)
if(HAVE_CAMV4L2)
set(HAVE_OBSENSOR TRUE)
set(HAVE_OBSENSOR_V4L2 TRUE)
ocv_add_external_target(obsensor "" "" "HAVE_OBSENSOR;HAVE_OBSENSOR_V4L2")
endif()
endif()
endif()

@ -22,6 +22,7 @@ add_backend("realsense" WITH_LIBREALSENSE)
add_backend("ueye" WITH_UEYE)
add_backend("ximea" WITH_XIMEA)
add_backend("xine" WITH_XINE)
add_backend("obsensor" WITH_OBSENSOR)
add_backend("avfoundation" WITH_AVFOUNDATION)
add_backend("ios" WITH_CAP_IOS)

@ -123,6 +123,7 @@ enum VideoCaptureAPIs {
CAP_INTEL_MFX = 2300, //!< Intel MediaSDK
CAP_XINE = 2400, //!< XINE engine (Linux)
CAP_UEYE = 2500, //!< uEye Camera API
CAP_OBSENSOR = 2600, //!< For Orbbec 3D-Sensor device/module (Astra+, Femto)
};
/** @brief cv::VideoCapture generic properties identifier.
@ -654,6 +655,35 @@ enum { CAP_PROP_IMAGES_BASE = 18000,
//! @} Images
/** @name OBSENSOR (for Orbbec 3D-Sensor device/module )
@{
*/
//! OBSENSOR data given from image generator
enum VideoCaptureOBSensorDataType{
CAP_OBSENSOR_DEPTH_MAP = 0, //!< Depth values in mm (CV_16UC1)
CAP_OBSENSOR_BGR_IMAGE = 1, //!< Data given from BGR stream generator
CAP_OBSENSOR_IR_IMAGE = 2 //!< Data given from IR stream generator(CV_16UC1)
};
//! OBSENSOR stream generator
enum VideoCaptureOBSensorGenerators{
CAP_OBSENSOR_DEPTH_GENERATOR = 1 << 29,
CAP_OBSENSOR_IMAGE_GENERATOR = 1 << 28,
CAP_OBSENSOR_IR_GENERATOR = 1 << 27,
CAP_OBSENSOR_GENERATORS_MASK = CAP_OBSENSOR_DEPTH_GENERATOR + CAP_OBSENSOR_IMAGE_GENERATOR + CAP_OBSENSOR_IR_GENERATOR
};
//!OBSENSOR properties
enum VideoCaptureOBSensorProperties{
// INTRINSIC
CAP_PROP_OBSENSOR_INTRINSIC_FX=26001,
CAP_PROP_OBSENSOR_INTRINSIC_FY=26002,
CAP_PROP_OBSENSOR_INTRINSIC_CX=26003,
CAP_PROP_OBSENSOR_INTRINSIC_CY=26004,
};
//! @} OBSENSOR
//! @} videoio_flags_others

@ -411,6 +411,8 @@ Ptr<IVideoCapture> createXINECapture(const std::string &filename);
Ptr<IVideoCapture> createAndroidCapture_cam( int index );
Ptr<IVideoCapture> createAndroidCapture_file(const std::string &filename);
Ptr<IVideoCapture> create_obsensor_capture(int index);
bool VideoCapture_V4L_waitAny(
const std::vector<VideoCapture>& streams,
CV_OUT std::vector<int>& ready,

@ -0,0 +1,103 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
/*
* Copyright(C) 2022 by ORBBEC Technology., Inc.
* Authors:
* Huang Zhenchang <yufeng@orbbec.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OPENCV_VIDEOIO_OBSENSOR_STREAM_CHANNEL_INTERFACE_HPP
#define OPENCV_VIDEOIO_OBSENSOR_STREAM_CHANNEL_INTERFACE_HPP
#ifdef HAVE_OBSENSOR
#include "../precomp.hpp" // #include "precomp.hpp : compile error on linux
#include <functional>
#include <vector>
#include <memory>
namespace cv {
namespace obsensor {
enum StreamType
{
OBSENSOR_STREAM_IR = 1,
OBSENSOR_STREAM_COLOR = 2,
OBSENSOR_STREAM_DEPTH = 3,
};
enum FrameFormat
{
FRAME_FORMAT_UNKNOWN = -1,
FRAME_FORMAT_YUYV = 0,
FRAME_FORMAT_MJPG = 5,
FRAME_FORMAT_Y16 = 8,
};
enum PropertyId
{
DEPTH_TO_COLOR_ALIGN = 42,
CAMERA_PARAM = 1001,
};
struct Frame
{
FrameFormat format;
uint32_t width;
uint32_t height;
uint32_t dataSize;
uint8_t* data;
};
struct StreamProfile
{
uint32_t width;
uint32_t height;
uint32_t fps;
FrameFormat format;
};
struct CameraParam
{
float p0[4];
float p1[4];
float p2[9];
float p3[3];
float p4[5];
float p5[5];
uint32_t p6[2];
uint32_t p7[2];
};
typedef std::function<void(Frame*)> FrameCallback;
class IStreamChannel
{
public:
virtual ~IStreamChannel() noexcept {}
virtual void start(const StreamProfile& profile, FrameCallback frameCallback) = 0;
virtual void stop() = 0;
virtual bool setProperty(int propId, const uint8_t* data, uint32_t dataSize) = 0;
virtual bool getProperty(int propId, uint8_t* recvData, uint32_t* recvDataSize) = 0;
virtual StreamType streamType() const = 0;
};
// "StreamChannelGroup" mean a group of stream channels from same one physical device
std::vector<Ptr<IStreamChannel>> getStreamChannelGroup(uint32_t groupIdx = 0);
}} // namespace cv::obsensor::
#endif // HAVE_OBSENSOR
#endif // OPENCV_VIDEOIO_OBSENSOR_STREAM_CHANNEL_INTERFACE_HPP

@ -0,0 +1,505 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
/*
* Copyright(C) 2022 by ORBBEC Technology., Inc.
* Authors:
* Huang Zhenchang <yufeng@orbbec.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifdef HAVE_OBSENSOR_MSMF
#include "obsensor_stream_channel_msmf.hpp"
#include <shlwapi.h> // QISearch
#pragma warning(disable : 4503)
#pragma comment(lib, "mfplat")
#pragma comment(lib, "mf")
#pragma comment(lib, "mfuuid")
#pragma comment(lib, "Strmiids")
#pragma comment(lib, "Mfreadwrite")
#pragma comment(lib, "dxgi")
namespace cv {
namespace obsensor {
std::string wideCharToUTF8(const WCHAR* s)
{
auto len = WideCharToMultiByte(CP_UTF8, 0, s, -1, nullptr, 0, nullptr, nullptr);
if (len == 0)
return "";
std::string buffer(len - 1, ' ');
len = WideCharToMultiByte(CP_UTF8, 0, s, -1, &buffer[0], static_cast<int>(buffer.size()) + 1, nullptr, nullptr);
return buffer;
}
std::string hr_to_string(HRESULT hr)
{
_com_error err(hr);
std::stringstream ss;
ss << "HResult 0x" << std::hex << hr << ": \"" << err.ErrorMessage() << "\"";
return ss.str();
}
#define HR_FAILED_RETURN(x) \
if (x < 0) \
{ \
CV_LOG_INFO(NULL, "Media Foundation error return: " << hr_to_string(x)); \
return; \
}
#define HR_FAILED_LOG(x) \
if (x < 0) \
{ \
CV_LOG_INFO(NULL, "Media Foundation error return: " << hr_to_string(x)); \
}
#define HR_FAILED_EXEC(x, statement) \
if (x < 0) \
{ \
CV_LOG_INFO(NULL, "Media Foundation error return: " << hr_to_string(x)); \
statement; \
}
std::vector<std::string> stringSplit(std::string string, char separator)
{
std::vector<std::string> tokens;
std::string::size_type i1 = 0;
while (true)
{
auto i2 = string.find(separator, i1);
if (i2 == std::string::npos)
{
tokens.push_back(string.substr(i1));
return tokens;
}
tokens.push_back(string.substr(i1, i2 - i1));
i1 = i2 + 1;
}
}
bool parseUvcDeviceSymbolicLink(const std::string& symbolicLink, uint16_t& vid, uint16_t& pid, uint16_t& mi, std::string& unique_id,
std::string& device_guid)
{
std::string lowerStr = symbolicLink;
for (size_t i = 0; i < lowerStr.length(); i++)
{
lowerStr[i] = (char)tolower(lowerStr[i]);
}
auto tokens = stringSplit(lowerStr, '#');
if (tokens.size() < 1 || (tokens[0] != R"(\\?\usb)" && tokens[0] != R"(\\?\hid)"))
return false; // Not a USB device
if (tokens.size() < 3)
{
return false;
}
auto ids = stringSplit(tokens[1], '&');
if (ids[0].size() != 8 || ids[0].substr(0, 4) != "vid_" || !(std::istringstream(ids[0].substr(4, 4)) >> std::hex >> vid))
{
return false;
}
if (ids[1].size() != 8 || ids[1].substr(0, 4) != "pid_" || !(std::istringstream(ids[1].substr(4, 4)) >> std::hex >> pid))
{
return false;
}
if (ids.size() > 2 && (ids[2].size() != 5 || ids[2].substr(0, 3) != "mi_" || !(std::istringstream(ids[2].substr(3, 2)) >> mi)))
{
return false;
}
ids = stringSplit(tokens[2], '&');
if (ids.size() == 0)
{
return false;
}
if (ids.size() > 2)
unique_id = ids[1];
else
unique_id = "";
if (tokens.size() >= 3)
device_guid = tokens[3];
return true;
}
#pragma pack(push, 1)
template <class T>
class big_endian
{
T be_value;
public:
operator T() const
{
T le_value = 0;
for (unsigned int i = 0; i < sizeof(T); ++i)
reinterpret_cast<char*>(&le_value)[i] = reinterpret_cast<const char*>(&be_value)[sizeof(T) - i - 1];
return le_value;
}
};
#pragma pack(pop)
MFContext::~MFContext(void)
{
CoUninitialize();
}
MFContext& MFContext::getInstance()
{
static MFContext instance;
return instance;
}
std::vector<UvcDeviceInfo> MFContext::queryUvcDeviceInfoList()
{
std::vector<UvcDeviceInfo> uvcDevList;
ComPtr<IMFAttributes> pAttributes = nullptr;
MFCreateAttributes(&pAttributes, 1);
pAttributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
// pAttributes->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_CATEGORY, KSCATEGORY_SENSOR_CAMERA);
IMFActivate** ppDevices;
uint32_t numDevices;
MFEnumDeviceSources(pAttributes.Get(), &ppDevices, &numDevices);
for (uint32_t i = 0; i < numDevices; ++i)
{
ComPtr<IMFActivate> pDevice;
pDevice = ppDevices[i];
WCHAR* wCharStr = nullptr;
uint32_t length;
pDevice->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, &wCharStr, &length);
auto symbolicLink = wideCharToUTF8(wCharStr);
CoTaskMemFree(wCharStr);
pDevice->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, &wCharStr, &length);
auto name = wideCharToUTF8(wCharStr);
CoTaskMemFree(wCharStr);
uint16_t vid, pid, mi;
std::string uid, guid;
if (!parseUvcDeviceSymbolicLink(symbolicLink, vid, pid, mi, uid, guid))
continue;
uvcDevList.emplace_back(UvcDeviceInfo({ symbolicLink, name, uid, vid, pid, mi }));
CV_LOG_INFO(NULL, "UVC device found: name=" << name << ", vid=" << vid << ", pid=" << pid << ", mi=" << mi << ", uid=" << uid << ", guid=" << guid);
}
return uvcDevList;
}
Ptr<IStreamChannel> MFContext::createStreamChannel(const UvcDeviceInfo& devInfo)
{
return makePtr<MSMFStreamChannel>(devInfo);
}
MFContext::MFContext()
{
CoInitialize(0);
CV_Assert(SUCCEEDED(MFStartup(MF_VERSION)));
}
MSMFStreamChannel::MSMFStreamChannel(const UvcDeviceInfo& devInfo) :
IUvcStreamChannel(devInfo),
mfContext_(MFContext::getInstance()),
xuNodeId_(-1)
{
HR_FAILED_RETURN(MFCreateAttributes(&deviceAttrs_, 2));
HR_FAILED_RETURN(deviceAttrs_->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID));
WCHAR* buffer = new wchar_t[devInfo_.id.length() + 1];
MultiByteToWideChar(CP_UTF8, 0, devInfo_.id.c_str(), (int)devInfo_.id.length() + 1, buffer, (int)devInfo_.id.length() * sizeof(WCHAR));
HR_FAILED_EXEC(deviceAttrs_->SetString(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, buffer), {
delete[] buffer;
return;
})
delete[] buffer;
HR_FAILED_RETURN(MFCreateDeviceSource(deviceAttrs_.Get(), &deviceSource_));
HR_FAILED_RETURN(deviceSource_->QueryInterface(__uuidof(IAMCameraControl), reinterpret_cast<void**>(&cameraControl_)));
HR_FAILED_RETURN(deviceSource_->QueryInterface(__uuidof(IAMVideoProcAmp), reinterpret_cast<void**>(&videoProcAmp_)));
HR_FAILED_RETURN(MFCreateAttributes(&readerAttrs_, 3));
HR_FAILED_RETURN(readerAttrs_->SetUINT32(MF_SOURCE_READER_DISCONNECT_MEDIASOURCE_ON_SHUTDOWN, false));
HR_FAILED_RETURN(readerAttrs_->SetUINT32(MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS, true));
HR_FAILED_RETURN(readerAttrs_->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, static_cast<IUnknown*>(this)));
HR_FAILED_RETURN(MFCreateSourceReaderFromMediaSource(deviceSource_.Get(), readerAttrs_.Get(), &streamReader_));
HR_FAILED_RETURN(streamReader_->SetStreamSelection(static_cast<DWORD>(MF_SOURCE_READER_ALL_STREAMS), true));
HR_FAILED_RETURN(deviceSource_->QueryInterface(__uuidof(IKsTopologyInfo), reinterpret_cast<void**>(&xuKsTopologyInfo_)));
DWORD nNodes = 0;
HR_FAILED_RETURN(xuKsTopologyInfo_->get_NumNodes(&nNodes));
for (DWORD i = 0; i < nNodes; i++)
{
GUID nodeType;
HR_FAILED_EXEC(xuKsTopologyInfo_->get_NodeType(i, &nodeType), { continue; })
if (nodeType == KSNODETYPE_DEV_SPECIFIC)
{
xuNodeId_ = i;
}
}
if (xuNodeId_ != -1)
{
HR_FAILED_RETURN(xuKsTopologyInfo_->CreateNodeInstance(xuNodeId_, IID_IUnknown, reinterpret_cast<LPVOID*>(&xuNodeInstance_)));
HR_FAILED_RETURN(xuNodeInstance_->QueryInterface(__uuidof(IKsControl), reinterpret_cast<void**>(&xuKsControl_)));
}
if (streamType_ == OBSENSOR_STREAM_DEPTH)
{
initDepthFrameProcessor();
}
}
MSMFStreamChannel::~MSMFStreamChannel()
{
stop();
if (cameraControl_)
{
cameraControl_.Release();
}
if (videoProcAmp_)
{
videoProcAmp_.Release();
}
if (streamReader_)
{
streamReader_.Release();
}
if (readerAttrs_)
{
readerAttrs_.Release();
}
if (deviceAttrs_)
{
deviceAttrs_.Release();
}
if (deviceSource_)
{
deviceSource_.Release();
}
if (xuKsTopologyInfo_)
{
xuKsTopologyInfo_.Release();
}
if (xuNodeInstance_)
{
xuNodeInstance_.Release();
}
if (xuKsControl_)
{
xuKsControl_.Release();
}
}
void MSMFStreamChannel::start(const StreamProfile& profile, FrameCallback frameCallback)
{
ComPtr<IMFMediaType> mediaType = nullptr;
unsigned int width, height, fps;
FrameRate frameRateMin, frameRateMax;
bool quit = false;
frameCallback_ = frameCallback;
currentProfile_ = profile;
currentStreamIndex_ = -1;
for (uint8_t index = 0; index <= 5; index++)
{
for (uint32_t k = 0;; k++)
{
HR_FAILED_EXEC(streamReader_->GetNativeMediaType(index, k, &mediaType), { continue; })
GUID subtype;
HR_FAILED_RETURN(mediaType->GetGUID(MF_MT_SUBTYPE, &subtype));
HR_FAILED_RETURN(MFGetAttributeSize(mediaType.Get(), MF_MT_FRAME_SIZE, &width, &height));
HR_FAILED_RETURN(MFGetAttributeRatio(mediaType.Get(), MF_MT_FRAME_RATE_RANGE_MIN, &frameRateMin.numerator, &frameRateMin.denominator));
HR_FAILED_RETURN(MFGetAttributeRatio(mediaType.Get(), MF_MT_FRAME_RATE_RANGE_MAX, &frameRateMax.numerator, &frameRateMax.denominator));
if (static_cast<float>(frameRateMax.numerator) / frameRateMax.denominator < static_cast<float>(frameRateMin.numerator) / frameRateMin.denominator)
{
std::swap(frameRateMax, frameRateMin);
}
fps = frameRateMax.numerator / frameRateMax.denominator;
uint32_t device_fourcc = reinterpret_cast<const big_endian<uint32_t> &>(subtype.Data1);
if (width == profile.width &&
height == profile.height &&
fps == profile.fps &&
frameFourccToFormat(device_fourcc) == profile.format)
{
HR_FAILED_RETURN(streamReader_->SetCurrentMediaType(index, nullptr, mediaType.Get()));
HR_FAILED_RETURN(streamReader_->SetStreamSelection(index, true));
streamReader_->ReadSample(index, 0, nullptr, nullptr, nullptr, nullptr);
streamState_ = STREAM_STARTING;
currentStreamIndex_ = index;
quit = true;
// wait for frame
std::unique_lock<std::mutex> lock(streamStateMutex_);
auto success = streamStateCv_.wait_for(lock, std::chrono::milliseconds(3000), [&]() {
return streamState_ == STREAM_STARTED;
});
if (!success)
{
stop();
}
break;
}
mediaType.Release();
}
if (quit)
{
break;
}
}
streamState_ = quit ? streamState_ : STREAM_STOPED;
}
void MSMFStreamChannel::stop()
{
if (streamState_ == STREAM_STARTING || streamState_ == STREAM_STARTED)
{
streamState_ = STREAM_STOPPING;
streamReader_->SetStreamSelection(currentStreamIndex_, false);
streamReader_->Flush(currentStreamIndex_);
std::unique_lock<std::mutex> lk(streamStateMutex_);
streamStateCv_.wait_for(lk, std::chrono::milliseconds(1000), [&]() {
return streamState_ == STREAM_STOPED;
});
}
}
bool MSMFStreamChannel::setXu(uint8_t ctrl, const uint8_t* data, uint32_t len)
{
if (xuSendBuf_.size() < XU_MAX_DATA_LENGTH) {
xuSendBuf_.resize(XU_MAX_DATA_LENGTH);
}
memcpy(xuSendBuf_.data(), data, len);
KSP_NODE node;
memset(&node, 0, sizeof(KSP_NODE));
node.Property.Set = { 0xA55751A1, 0xF3C5, 0x4A5E, {0x8D, 0x5A, 0x68, 0x54, 0xB8, 0xFA, 0x27, 0x16} };
node.Property.Id = ctrl;
node.Property.Flags = KSPROPERTY_TYPE_SET | KSPROPERTY_TYPE_TOPOLOGY;
node.NodeId = xuNodeId_;
ULONG bytes_received = 0;
HR_FAILED_EXEC(xuKsControl_->KsProperty(reinterpret_cast<PKSPROPERTY>(&node), sizeof(KSP_NODE), (void*)xuSendBuf_.data(), XU_MAX_DATA_LENGTH, &bytes_received), {
return false;
});
return true;
}
bool MSMFStreamChannel::getXu(uint8_t ctrl, uint8_t** data, uint32_t* len)
{
if (xuRecvBuf_.size() < XU_MAX_DATA_LENGTH) {
xuRecvBuf_.resize(XU_MAX_DATA_LENGTH);
}
KSP_NODE node;
memset(&node, 0, sizeof(KSP_NODE));
node.Property.Set = { 0xA55751A1, 0xF3C5, 0x4A5E, {0x8D, 0x5A, 0x68, 0x54, 0xB8, 0xFA, 0x27, 0x16} };
node.Property.Id = ctrl;
node.Property.Flags = KSPROPERTY_TYPE_GET | KSPROPERTY_TYPE_TOPOLOGY;
node.NodeId = xuNodeId_;
ULONG bytes_received = 0;
HR_FAILED_EXEC(xuKsControl_->KsProperty(reinterpret_cast<PKSPROPERTY>(&node), sizeof(node), xuRecvBuf_.data(), XU_MAX_DATA_LENGTH, &bytes_received), {
*len = 0;
data = nullptr;
return false;
});
*data = xuRecvBuf_.data();
*len = bytes_received;
return true;
}
STDMETHODIMP MSMFStreamChannel::QueryInterface(REFIID iid, void** ppv)
{
#pragma warning(push)
#pragma warning(disable : 4838)
static const QITAB qit[] = {
QITABENT(MSMFStreamChannel, IMFSourceReaderCallback),
{nullptr},
};
return QISearch(this, qit, iid, ppv);
#pragma warning(pop)
}
STDMETHODIMP_(ULONG)
MSMFStreamChannel::AddRef()
{
return InterlockedIncrement(&refCount_);
}
STDMETHODIMP_(ULONG)
MSMFStreamChannel::Release()
{
ULONG count = InterlockedDecrement(&refCount_);
if (count <= 0)
{
delete this;
}
return count;
}
STDMETHODIMP MSMFStreamChannel::OnReadSample(HRESULT hrStatus, DWORD dwStreamIndex, DWORD /*dwStreamFlags*/, LONGLONG /*timeStamp*/, IMFSample* sample)
{
HR_FAILED_LOG(hrStatus);
if (streamState_ == STREAM_STARTING)
{
std::unique_lock<std::mutex> lock(streamStateMutex_);
streamState_ = STREAM_STARTED;
streamStateCv_.notify_all();
}
if (streamState_ != STREAM_STOPPING && streamState_ != STREAM_STOPED)
{
HR_FAILED_LOG(streamReader_->ReadSample(dwStreamIndex, 0, nullptr, nullptr, nullptr, nullptr));
if (sample)
{
ComPtr<IMFMediaBuffer> buffer = nullptr;
DWORD max_length, current_length;
byte* byte_buffer = nullptr;
HR_FAILED_EXEC(sample->GetBufferByIndex(0, &buffer), { return S_OK; });
buffer->Lock(&byte_buffer, &max_length, &current_length);
Frame fo = { currentProfile_.format, currentProfile_.width, currentProfile_.height, current_length, (uint8_t*)byte_buffer };
if (depthFrameProcessor_)
{
depthFrameProcessor_->process(&fo);
}
frameCallback_(&fo);
buffer->Unlock();
}
}
return S_OK;
}
STDMETHODIMP MSMFStreamChannel::OnEvent(DWORD /*sidx*/, IMFMediaEvent* /*event*/)
{
return S_OK;
}
STDMETHODIMP MSMFStreamChannel::OnFlush(DWORD)
{
if (streamState_ == STREAM_STARTING)
{
std::unique_lock<std::mutex> lock(streamStateMutex_);
streamState_ = STREAM_STOPED;
streamStateCv_.notify_all();
}
return S_OK;
}
}} // namespace cv::obsensor::
#endif // HAVE_OBSENSOR_MSMF

@ -0,0 +1,180 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
/*
* Copyright(C) 2022 by ORBBEC Technology., Inc.
* Authors:
* Huang Zhenchang <yufeng@orbbec.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OPENCV_VIDEOIO_OBSENSOR_STREAM_CHANNEL_MSMF_HPP
#define OPENCV_VIDEOIO_OBSENSOR_STREAM_CHANNEL_MSMF_HPP
#ifdef HAVE_OBSENSOR_MSMF
#include "obsensor_uvc_stream_channel.hpp"
#include <windows.h>
#include <guiddef.h>
#include <mfapi.h>
#include <mfidl.h>
#include <mfplay.h>
#include <mfobjects.h>
#include <mfreadwrite.h>
#include <tchar.h>
#include <strsafe.h>
#include <codecvt>
#include <ks.h>
#include <comdef.h>
#include <mutex>
#include <vidcap.h> //IKsTopologyInfo
#include <ksproxy.h> //IKsControl
#include <ksmedia.h>
namespace cv {
namespace obsensor {
template <class T>
class ComPtr
{
public:
ComPtr() {}
ComPtr(T* lp)
{
p = lp;
}
ComPtr(_In_ const ComPtr<T>& lp)
{
p = lp.p;
}
virtual ~ComPtr() {}
void swap(_In_ ComPtr<T>& lp)
{
ComPtr<T> tmp(p);
p = lp.p;
lp.p = tmp.p;
tmp = NULL;
}
T** operator&()
{
CV_Assert(p == NULL);
return p.operator&();
}
T* operator->() const
{
CV_Assert(p != NULL);
return p.operator->();
}
operator bool()
{
return p.operator!=(NULL);
}
T* Get() const
{
return p;
}
void Release()
{
if (p)
p.Release();
}
// query for U interface
template <typename U>
HRESULT As(_Out_ ComPtr<U>& lp) const
{
lp.Release();
return p->QueryInterface(__uuidof(U), reinterpret_cast<void**>((T**)&lp));
}
private:
_COM_SMARTPTR_TYPEDEF(T, __uuidof(T));
TPtr p;
};
class MFContext
{
public:
~MFContext(void);
static MFContext& getInstance();
std::vector<UvcDeviceInfo> queryUvcDeviceInfoList();
Ptr<IStreamChannel> createStreamChannel(const UvcDeviceInfo& devInfo);
private:
MFContext(void);
};
struct FrameRate
{
unsigned int denominator;
unsigned int numerator;
};
class MSMFStreamChannel : public IUvcStreamChannel, public IMFSourceReaderCallback
{
public:
MSMFStreamChannel(const UvcDeviceInfo& devInfo);
virtual ~MSMFStreamChannel() noexcept;
virtual void start(const StreamProfile& profile, FrameCallback frameCallback) override;
virtual void stop() override;
private:
virtual bool setXu(uint8_t ctrl, const uint8_t* data, uint32_t len) override;
virtual bool getXu(uint8_t ctrl, uint8_t** data, uint32_t* len) override;
private:
MFContext& mfContext_;
ComPtr<IMFAttributes> deviceAttrs_ = nullptr;
ComPtr<IMFMediaSource> deviceSource_ = nullptr;
ComPtr<IMFAttributes> readerAttrs_ = nullptr;
ComPtr<IMFSourceReader> streamReader_ = nullptr;
ComPtr<IAMCameraControl> cameraControl_ = nullptr;
ComPtr<IAMVideoProcAmp> videoProcAmp_ = nullptr;
ComPtr<IKsTopologyInfo> xuKsTopologyInfo_ = nullptr;
ComPtr<IUnknown> xuNodeInstance_ = nullptr;
ComPtr<IKsControl> xuKsControl_ = nullptr;
int xuNodeId_;
FrameCallback frameCallback_;
StreamProfile currentProfile_;
int8_t currentStreamIndex_;
StreamState streamState_;
std::mutex streamStateMutex_;
std::condition_variable streamStateCv_;
std::vector<uint8_t> xuRecvBuf_;
std::vector<uint8_t> xuSendBuf_;
public:
STDMETHODIMP QueryInterface(REFIID iid, void** ppv) override;
STDMETHODIMP_(ULONG)
AddRef() override;
STDMETHODIMP_(ULONG)
Release() override;
STDMETHODIMP OnReadSample(HRESULT /*hrStatus*/, DWORD dwStreamIndex, DWORD /*dwStreamFlags*/, LONGLONG /*llTimestamp*/, IMFSample* sample) override;
STDMETHODIMP OnEvent(DWORD /*sidx*/, IMFMediaEvent* /*event*/) override;
STDMETHODIMP OnFlush(DWORD) override;
private:
long refCount_ = 1;
};
}} // namespace cv::obsensor::
#endif // HAVE_OBSENSOR_MSMF
#endif // OPENCV_VIDEOIO_OBSENSOR_STREAM_CHANNEL_MSMF_HPP

@ -0,0 +1,379 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
/*
* Copyright(C) 2022 by ORBBEC Technology., Inc.
* Authors:
* Huang Zhenchang <yufeng@orbbec.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifdef HAVE_OBSENSOR_V4L2
#include "obsensor_stream_channel_v4l2.hpp"
#include <errno.h>
#include <unistd.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/types.h>
#include <sys/mman.h>
#include <linux/videodev2.h>
#include <linux/uvcvideo.h>
#include <linux/usb/video.h>
#include <fstream>
#include <map>
#include <vector>
#include "opencv2/core/utils/filesystem.hpp"
namespace cv {
namespace obsensor {
#define IOCTL_FAILED_RETURN(x) \
if (x < 0) \
{ \
CV_LOG_WARNING(NULL, "ioctl error return: " << errno); \
return; \
}
#define IOCTL_FAILED_LOG(x) \
if (x < 0) \
{ \
CV_LOG_WARNING(NULL, "ioctl error return: " << errno); \
}
#define IOCTL_FAILED_CONTINUE(x) \
if (x < 0) \
{ \
CV_LOG_WARNING(NULL, "ioctl error return: " << errno); \
continue; \
}
#define IOCTL_FAILED_EXEC(x, statement) \
if (x < 0) \
{ \
CV_LOG_WARNING(NULL, "ioctl error return: " << errno); \
statement; \
}
int xioctl(int fd, int req, void* arg)
{
int rst;
int retry = 5;
do
{
rst = ioctl(fd, req, arg);
retry--;
} while (rst == -1 && (errno == EAGAIN || (errno == EBUSY && retry > 0)));
if (rst < 0)
{
CV_LOG_WARNING(NULL, "ioctl: fd=" << fd << ", req=" << req);
}
return rst;
}
V4L2Context& V4L2Context::getInstance()
{
static V4L2Context instance;
return instance;
}
std::vector<UvcDeviceInfo> V4L2Context::queryUvcDeviceInfoList()
{
std::vector<UvcDeviceInfo> uvcDevList;
std::map<std::string, UvcDeviceInfo> uvcDevMap;
const cv::String videosDir = "/sys/class/video4linux";
cv::utils::Paths videos;
if (cv::utils::fs::isDirectory(videosDir))
{
cv::utils::fs::glob(videosDir, "*", videos, false, true);
for (const auto& video : videos)
{
UvcDeviceInfo uvcDev;
cv::String videoName = video.substr(video.find_last_of("/") + 1);
char buf[PATH_MAX];
if (realpath(video.c_str(), buf) == nullptr || cv::String(buf).find("virtual") != std::string::npos)
{
continue;
}
cv::String videoRealPath = buf;
cv::String interfaceRealPath = videoRealPath.substr(0, videoRealPath.find_last_of("/"));
std::string busNum, devNum, devPath;
while (videoRealPath.find_last_of("/") != std::string::npos)
{
videoRealPath = videoRealPath.substr(0, videoRealPath.find_last_of("/"));
if (!(std::ifstream(videoRealPath + "/busnum") >> busNum))
{
continue;
}
if (!(std::ifstream(videoRealPath + "/devpath") >> devPath))
{
continue;
}
if (!(std::ifstream(videoRealPath + "/devnum") >> devNum))
{
continue;
}
uvcDev.uid = busNum + "-" + devPath + "-" + devNum;
break;
/* code */
}
uvcDev.id = cv::String("/dev/") + videoName;
v4l2_capability caps = {};
int videoFd = open(uvcDev.id.c_str(), O_RDONLY);
IOCTL_FAILED_EXEC(xioctl(videoFd, VIDIOC_QUERYCAP, &caps), {
close(videoFd);
continue;
});
close(videoFd);
if (caps.capabilities & V4L2_CAP_VIDEO_CAPTURE)
{
cv::String modalias;
if (!(std::ifstream(video + "/device/modalias") >> modalias) ||
modalias.size() < 14 ||
modalias.substr(0, 5) != "usb:v" ||
modalias[9] != 'p')
{
continue;
}
std::istringstream(modalias.substr(5, 4)) >> std::hex >> uvcDev.vid;
std::istringstream(modalias.substr(10, 4)) >> std::hex >> uvcDev.pid;
std::getline(std::ifstream(video + "/device/interface"), uvcDev.name);
std::ifstream(video + "/device/bInterfaceNumber") >> uvcDev.mi;
uvcDevMap.insert({ interfaceRealPath, uvcDev });
}
}
}
for (const auto& item : uvcDevMap)
{
const auto uvcDev = item.second; // alias
CV_LOG_INFO(NULL, "UVC device found: name=" << uvcDev.name << ", vid=" << uvcDev.vid << ", pid=" << uvcDev.pid << ", mi=" << uvcDev.mi << ", uid=" << uvcDev.uid << ", id=" << uvcDev.id);
uvcDevList.push_back(uvcDev);
}
return uvcDevList;
}
Ptr<IStreamChannel> V4L2Context::createStreamChannel(const UvcDeviceInfo& devInfo)
{
return makePtr<V4L2StreamChannel>(devInfo);
}
V4L2StreamChannel::V4L2StreamChannel(const UvcDeviceInfo &devInfo) : IUvcStreamChannel(devInfo),
devFd_(-1),
streamState_(STREAM_STOPED)
{
devFd_ = open(devInfo_.id.c_str(), O_RDWR | O_NONBLOCK, 0);
if (devFd_ < 0)
{
CV_LOG_ERROR(NULL, "Open " << devInfo_.id << " failed ! errno=" << errno)
}
else if (streamType_ == OBSENSOR_STREAM_DEPTH)
{
initDepthFrameProcessor();
}
}
V4L2StreamChannel::~V4L2StreamChannel() noexcept
{
stop();
if (devFd_)
{
close(devFd_);
devFd_ = -1;
}
}
void V4L2StreamChannel::start(const StreamProfile& profile, FrameCallback frameCallback)
{
if (streamState_ != STREAM_STOPED)
{
CV_LOG_ERROR(NULL, devInfo_.id << ": repetitive operation!")
return;
}
frameCallback_ = frameCallback;
currentProfile_ = profile;
struct v4l2_format fmt = {};
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = profile.width;
fmt.fmt.pix.height = profile.height;
fmt.fmt.pix.pixelformat = frameFormatToFourcc(profile.format);
IOCTL_FAILED_RETURN(xioctl(devFd_, VIDIOC_S_FMT, &fmt));
IOCTL_FAILED_RETURN(xioctl(devFd_, VIDIOC_G_FMT, &fmt));
struct v4l2_streamparm streamParm = {};
streamParm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
streamParm.parm.capture.timeperframe.numerator = 1;
streamParm.parm.capture.timeperframe.denominator = profile.fps;
IOCTL_FAILED_RETURN(xioctl(devFd_, VIDIOC_S_PARM, &streamParm));
IOCTL_FAILED_RETURN(xioctl(devFd_, VIDIOC_G_PARM, &streamParm));
struct v4l2_requestbuffers req = {};
req.count = MAX_FRAME_BUFFER_NUM;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
IOCTL_FAILED_RETURN(xioctl(devFd_, VIDIOC_REQBUFS, &req));
for (uint32_t i = 0; i < req.count && i < MAX_FRAME_BUFFER_NUM; i++)
{
struct v4l2_buffer buf = {};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i; // only one buffer
IOCTL_FAILED_RETURN(xioctl(devFd_, VIDIOC_QUERYBUF, &buf));
frameBuffList[i].ptr = (uint8_t*)mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, devFd_, buf.m.offset);
frameBuffList[i].length = buf.length;
}
// stream on
std::unique_lock<std::mutex> lk(streamStateMutex_);
streamState_ = STREAM_STARTING;
uint32_t type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
IOCTL_FAILED_EXEC(xioctl(devFd_, VIDIOC_STREAMON, &type), {
streamState_ = STREAM_STOPED;
for (uint32_t i = 0; i < MAX_FRAME_BUFFER_NUM; i++)
{
if (frameBuffList[i].ptr)
{
munmap(frameBuffList[i].ptr, frameBuffList[i].length);
frameBuffList[i].ptr = nullptr;
frameBuffList[i].length = 0;
}
}
return;
});
grabFrameThread_ = std::thread(&V4L2StreamChannel::grabFrame, this);
}
void V4L2StreamChannel::grabFrame()
{
fd_set fds;
FD_ZERO(&fds);
FD_SET(devFd_, &fds);
struct timeval tv = {};
tv.tv_sec = 0;
tv.tv_usec = 100000; // 100ms
struct v4l2_buffer buf = {};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
// buf.index = 0;
IOCTL_FAILED_EXEC(xioctl(devFd_, VIDIOC_QBUF, &buf), {
std::unique_lock<std::mutex> lk(streamStateMutex_);
streamState_ = STREAM_STOPED;
streamStateCv_.notify_all();
return;
});
while (streamState_ == STREAM_STARTING || streamState_ == STREAM_STARTED)
{
IOCTL_FAILED_CONTINUE(select(devFd_ + 1, &fds, NULL, NULL, &tv));
IOCTL_FAILED_CONTINUE(xioctl(devFd_, VIDIOC_DQBUF, &buf));
if (streamState_ == STREAM_STARTING)
{
std::unique_lock<std::mutex> lk(streamStateMutex_);
streamState_ = STREAM_STARTED;
streamStateCv_.notify_all();
}
Frame fo = { currentProfile_.format, currentProfile_.width, currentProfile_.height, buf.length, frameBuffList[buf.index].ptr };
if (depthFrameProcessor_)
{
depthFrameProcessor_->process(&fo);
}
frameCallback_(&fo);
IOCTL_FAILED_CONTINUE(xioctl(devFd_, VIDIOC_QBUF, &buf));
}
std::unique_lock<std::mutex> lk(streamStateMutex_);
streamState_ = STREAM_STOPED;
streamStateCv_.notify_all();
}
bool V4L2StreamChannel::setXu(uint8_t ctrl, const uint8_t* data, uint32_t len)
{
if (xuSendBuf_.size() < XU_MAX_DATA_LENGTH) {
xuSendBuf_.resize(XU_MAX_DATA_LENGTH);
}
memcpy(xuSendBuf_.data(), data, len);
struct uvc_xu_control_query xu_ctrl_query = {
.unit = XU_UNIT_ID,
.selector = ctrl,
.query = UVC_SET_CUR,
.size = (__u16)(ctrl == 1 ? 512 : (ctrl == 2 ? 64 : 1024)),
.data = xuSendBuf_.data()
};
if (devFd_ > 0)
{
IOCTL_FAILED_EXEC(xioctl(devFd_, UVCIOC_CTRL_QUERY, &xu_ctrl_query), { return false; });
}
return true;
}
bool V4L2StreamChannel::getXu(uint8_t ctrl, uint8_t** data, uint32_t* len)
{
if (xuRecvBuf_.size() < XU_MAX_DATA_LENGTH) {
xuRecvBuf_.resize(XU_MAX_DATA_LENGTH);
}
struct uvc_xu_control_query xu_ctrl_query = {
.unit = XU_UNIT_ID,
.selector = ctrl,
.query = UVC_GET_CUR,
.size = (__u16)(ctrl == 1 ? 512 : (ctrl == 2 ? 64 : 1024)),
.data = xuRecvBuf_.data()
};
IOCTL_FAILED_EXEC(xioctl(devFd_, UVCIOC_CTRL_QUERY, &xu_ctrl_query), {
*len = 0;
return false;
});
*len = xu_ctrl_query.size;
*data = xuRecvBuf_.data();
return true;
}
void V4L2StreamChannel::stop()
{
if (streamState_ == STREAM_STARTING || streamState_ == STREAM_STARTED)
{
streamState_ = STREAM_STOPPING;
std::unique_lock<std::mutex> lk(streamStateMutex_);
streamStateCv_.wait_for(lk, std::chrono::milliseconds(1000), [&](){
return streamState_ == STREAM_STOPED;
});
uint32_t type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
IOCTL_FAILED_LOG(xioctl(devFd_, VIDIOC_STREAMOFF, &type));
}
if (grabFrameThread_.joinable())
{
grabFrameThread_.join();
}
for (uint32_t i = 0; i < MAX_FRAME_BUFFER_NUM; i++)
{
if (frameBuffList[i].ptr)
{
munmap(frameBuffList[i].ptr, frameBuffList[i].length);
frameBuffList[i].ptr = nullptr;
frameBuffList[i].length = 0;
}
}
}
}} // namespace cv::obsensor::
#endif // HAVE_OBSENSOR_V4L2

@ -0,0 +1,90 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
/*
* Copyright(C) 2022 by ORBBEC Technology., Inc.
* Authors:
* Huang Zhenchang <yufeng@orbbec.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OPENCV_VIDEOIO_OBSENSOR_STREAM_CHANNEL_V4L2_HPP
#define OPENCV_VIDEOIO_OBSENSOR_STREAM_CHANNEL_V4L2_HPP
#ifdef HAVE_OBSENSOR_V4L2
#include "obsensor_uvc_stream_channel.hpp"
#include <mutex>
#include <condition_variable>
#include <thread>
namespace cv {
namespace obsensor {
#define MAX_FRAME_BUFFER_NUM 4
struct V4L2FrameBuffer
{
uint32_t length = 0;
uint8_t* ptr = nullptr;
};
int xioctl(int fd, int req, void* arg);
class V4L2Context
{
public:
~V4L2Context() {}
static V4L2Context& getInstance();
std::vector<UvcDeviceInfo> queryUvcDeviceInfoList();
Ptr<IStreamChannel> createStreamChannel(const UvcDeviceInfo& devInfo);
private:
V4L2Context() noexcept {}
};
class V4L2StreamChannel : public IUvcStreamChannel
{
public:
V4L2StreamChannel(const UvcDeviceInfo& devInfo);
virtual ~V4L2StreamChannel() noexcept;
virtual void start(const StreamProfile& profile, FrameCallback frameCallback) override;
virtual void stop() override;
private:
void grabFrame();
virtual bool setXu(uint8_t ctrl, const uint8_t* data, uint32_t len) override;
virtual bool getXu(uint8_t ctrl, uint8_t** data, uint32_t* len) override;
private:
int devFd_;
V4L2FrameBuffer frameBuffList[MAX_FRAME_BUFFER_NUM];
StreamState streamState_;
std::mutex streamStateMutex_;
std::condition_variable streamStateCv_;
std::thread grabFrameThread_;
FrameCallback frameCallback_;
StreamProfile currentProfile_;
std::vector<uint8_t> xuRecvBuf_;
std::vector<uint8_t> xuSendBuf_;
};
}} // namespace cv::obsensor::
#endif // HAVE_OBSENSOR_V4L2
#endif // OPENCV_VIDEOIO_OBSENSOR_STREAM_CHANNEL_V4L2_HPP

@ -0,0 +1,265 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
/*
* Copyright(C) 2022 by ORBBEC Technology., Inc.
* Authors:
* Huang Zhenchang <yufeng@orbbec.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#if defined(HAVE_OBSENSOR_V4L2) || defined(HAVE_OBSENSOR_MSMF)
#include <map>
#include <vector>
#include <string>
#include <algorithm>
#include <iterator>
#if defined(HAVE_OBSENSOR_V4L2)
#include "obsensor_stream_channel_v4l2.hpp"
#elif defined(HAVE_OBSENSOR_MSMF)
#include "obsensor_stream_channel_msmf.hpp"
#endif // HAVE_OBSENSOR_V4L2
namespace cv {
namespace obsensor {
const uint8_t OB_EXT_CMD0[16] = { 0x47, 0x4d, 0x04, 0x00, 0x02, 0x00, 0x52, 0x00, 0x5B, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00 };
const uint8_t OB_EXT_CMD1[16] = { 0x47, 0x4d, 0x04, 0x00, 0x02, 0x00, 0x54, 0x00, 0x3f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 };
const uint8_t OB_EXT_CMD2[16] = { 0x47, 0x4d, 0x04, 0x00, 0x02, 0x00, 0x56, 0x00, 0x0d, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00 };
const uint8_t OB_EXT_CMD3[16] = { 0x47, 0x4d, 0x04, 0x00, 0x02, 0x00, 0x58, 0x00, 0x2a, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00 };
const uint8_t OB_EXT_CMD4[16] = { 0x47, 0x4d, 0x02, 0x00, 0x03, 0x00, 0x60, 0x00, 0xed, 0x03, 0x00, 0x00 };
const uint8_t OB_EXT_CMD5[16] = { 0x47, 0x4d, 0x02, 0x00, 0x03, 0x00, 0x62, 0x00, 0xe9, 0x03, 0x00, 0x00 };
#if defined(HAVE_OBSENSOR_V4L2)
#define fourCc2Int(a, b, c, d) \
((uint32_t)(a) | ((uint32_t)(b) << 8) | ((uint32_t)(c) << 16) | ((uint32_t)(d) << 24))
#elif defined(HAVE_OBSENSOR_MSMF)
#define fourCc2Int(a, b, c, d) \
(((uint32_t)(a) <<24) | ((uint32_t)(b) << 16) | ((uint32_t)(c) << 8) | (uint32_t)(d))
#endif // HAVE_OBSENSOR_V4L2
const std::map<uint32_t, FrameFormat> fourccToOBFormat = {
{fourCc2Int('Y', 'U', 'Y', '2'), FRAME_FORMAT_YUYV},
{fourCc2Int('M', 'J', 'P', 'G'), FRAME_FORMAT_MJPG},
{fourCc2Int('Y', '1', '6', ' '), FRAME_FORMAT_Y16},
};
StreamType parseUvcDeviceNameToStreamType(const std::string& devName)
{
std::string uvcDevName = devName;
for (size_t i = 0; i < uvcDevName.length(); i++)
{
uvcDevName[i] = (char)tolower(uvcDevName[i]);
}
if (uvcDevName.find(" depth") != std::string::npos)
{
return OBSENSOR_STREAM_DEPTH;
}
else if (uvcDevName.find(" ir") != std::string::npos)
{
return OBSENSOR_STREAM_IR;
}
return OBSENSOR_STREAM_COLOR; // else
}
FrameFormat frameFourccToFormat(uint32_t fourcc)
{
for (const auto& item : fourccToOBFormat)
{
if (item.first == fourcc)
{
return item.second;
}
}
return FRAME_FORMAT_UNKNOWN;
}
uint32_t frameFormatToFourcc(FrameFormat fmt)
{
for (const auto& item : fourccToOBFormat)
{
if (item.second == fmt)
{
return item.first;
}
}
return 0;
}
std::vector<Ptr<IStreamChannel>> getStreamChannelGroup(uint32_t groupIdx)
{
std::vector<Ptr<IStreamChannel>> streamChannelGroup;
#if defined(HAVE_OBSENSOR_V4L2)
auto& ctx = V4L2Context::getInstance();
#elif defined(HAVE_OBSENSOR_MSMF)
auto& ctx = MFContext::getInstance();
#endif // HAVE_OBSENSOR_V4L2
auto uvcDevInfoList = ctx.queryUvcDeviceInfoList();
std::map<std::string, std::vector<UvcDeviceInfo>> uvcDevInfoGroupMap;
auto devInfoIter = uvcDevInfoList.begin();
while (devInfoIter != uvcDevInfoList.end())
{
if (devInfoIter->vid != OBSENSOR_CAM_VID)
{
devInfoIter = uvcDevInfoList.erase(devInfoIter); // drop it
continue;
}
devInfoIter++;
}
if (!uvcDevInfoList.empty() && uvcDevInfoList.size() <= 3)
{
uvcDevInfoGroupMap.insert({ "default", uvcDevInfoList });
}
else {
for (auto& devInfo : uvcDevInfoList)
{
// group by uid
uvcDevInfoGroupMap[devInfo.uid].push_back(devInfo); // todo: group by sn
}
}
if (uvcDevInfoGroupMap.size() > groupIdx)
{
auto uvcDevInfoGroupIter = uvcDevInfoGroupMap.begin();
std::advance(uvcDevInfoGroupIter, groupIdx);
for (const auto& devInfo : uvcDevInfoGroupIter->second)
{
streamChannelGroup.emplace_back(ctx.createStreamChannel(devInfo));
}
}
else
{
CV_LOG_ERROR(NULL, "Camera index out of range");
}
return streamChannelGroup;
}
DepthFrameProcessor::DepthFrameProcessor(const OBExtensionParam& param) : param_(param)
{
double tempValue = 0;
double rstValue = 0;
lookUpTable_ = new uint16_t[4096];
memset(lookUpTable_, 0, 4096 * 2);
for (uint16_t oriValue = 0; oriValue < 4096; oriValue++)
{
if (oriValue == 0)
{
continue;
}
tempValue = 200.375 - (double)oriValue / 8;
rstValue = (double)param_.pd / (1 + tempValue * param_.ps / param_.bl) * 10;
if ((rstValue >= 40) && (rstValue <= 10000) && rstValue < 65536)
{
lookUpTable_[oriValue] = (uint16_t)rstValue;
}
}
}
DepthFrameProcessor::~DepthFrameProcessor()
{
delete[] lookUpTable_;
}
void DepthFrameProcessor::process(Frame* frame)
{
uint16_t* data = (uint16_t*)frame->data;
for (uint32_t i = 0; i < frame->dataSize / 2; i++)
{
data[i] = lookUpTable_[data[i] & 0x0fff];
}
}
IUvcStreamChannel::IUvcStreamChannel(const UvcDeviceInfo& devInfo) :
devInfo_(devInfo),
streamType_(parseUvcDeviceNameToStreamType(devInfo_.name))
{
}
StreamType IUvcStreamChannel::streamType() const {
return streamType_;
}
bool IUvcStreamChannel::setProperty(int propId, const uint8_t* /*data*/, uint32_t /*dataSize*/)
{
uint8_t* rcvData;
uint32_t rcvLen;
bool rst = true;
switch (propId)
{
case DEPTH_TO_COLOR_ALIGN:
// todo: value filling
rst &= setXu(2, OB_EXT_CMD0, sizeof(OB_EXT_CMD0));
rst &= getXu(2, &rcvData, &rcvLen);
rst &= setXu(2, OB_EXT_CMD1, sizeof(OB_EXT_CMD1));
rst &= getXu(2, &rcvData, &rcvLen);
rst &= setXu(2, OB_EXT_CMD2, sizeof(OB_EXT_CMD2));
rst &= getXu(2, &rcvData, &rcvLen);
rst &= setXu(2, OB_EXT_CMD3, sizeof(OB_EXT_CMD3));
rst &= getXu(2, &rcvData, &rcvLen);
break;
default:
rst = false;
break;
}
return rst;
}
bool IUvcStreamChannel::getProperty(int propId, uint8_t* recvData, uint32_t* recvDataSize)
{
bool rst = true;
uint8_t* rcvData;
uint32_t rcvLen;
switch (propId)
{
case CAMERA_PARAM:
rst &= setXu(2, OB_EXT_CMD5, sizeof(OB_EXT_CMD5));
rst &= getXu(2, &rcvData, &rcvLen);
if (rst && OB_EXT_CMD5[6] == rcvData[6] && rcvData[8] == 0 && rcvData[9] == 0)
{
memcpy(recvData, rcvData + 10, rcvLen - 10);
*recvDataSize = rcvLen - 10;
}
break;
default:
rst = false;
break;
}
return rst;
}
bool IUvcStreamChannel::initDepthFrameProcessor()
{
if (streamType_ == OBSENSOR_STREAM_DEPTH && setXu(2, OB_EXT_CMD4, sizeof(OB_EXT_CMD4)))
{
uint8_t* rcvData;
uint32_t rcvLen;
if (getXu(1, &rcvData, &rcvLen) && OB_EXT_CMD4[6] == rcvData[6] && rcvData[8] == 0 && rcvData[9] == 0)
{
depthFrameProcessor_ = makePtr<DepthFrameProcessor>(*(OBExtensionParam*)(rcvData + 10));
return true;
}
}
return false;
}
}} // namespace cv::obsensor::
#endif // HAVE_OBSENSOR_V4L2 || HAVE_OBSENSOR_MSMF

@ -0,0 +1,96 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
/*
* Copyright(C) 2022 by ORBBEC Technology., Inc.
* Authors:
* Huang Zhenchang <yufeng@orbbec.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OPENCV_VIDEOIO_OBSENSOR_UVC_STREAM_CHANNEL_HPP
#define OPENCV_VIDEOIO_OBSENSOR_UVC_STREAM_CHANNEL_HPP
#include "obsensor_stream_channel_interface.hpp"
#ifdef HAVE_OBSENSOR
namespace cv {
namespace obsensor {
#define OBSENSOR_CAM_VID 0x2bc5 // usb vid
#define XU_MAX_DATA_LENGTH 1024
#define XU_UNIT_ID 4
struct UvcDeviceInfo
{
std::string id = ""; // uvc sub-device id
std::string name = "";
std::string uid = ""; // parent usb device id
uint16_t vid = 0;
uint16_t pid = 0;
uint16_t mi = 0; // uvc interface index
};
enum StreamState
{
STREAM_STOPED = 0, // stoped or ready
STREAM_STARTING = 1,
STREAM_STARTED = 2,
STREAM_STOPPING = 3,
};
StreamType parseUvcDeviceNameToStreamType(const std::string& devName);
FrameFormat frameFourccToFormat(uint32_t fourcc);
uint32_t frameFormatToFourcc(FrameFormat);
struct OBExtensionParam {
float bl;
float bl2;
float pd;
float ps;
};
class DepthFrameProcessor {
public:
DepthFrameProcessor(const OBExtensionParam& parma);
~DepthFrameProcessor() noexcept;
void process(Frame* frame);
private:
const OBExtensionParam param_;
uint16_t* lookUpTable_;
};
class IUvcStreamChannel : public IStreamChannel {
public:
IUvcStreamChannel(const UvcDeviceInfo& devInfo);
virtual ~IUvcStreamChannel() noexcept {}
virtual bool setProperty(int propId, const uint8_t* data, uint32_t dataSize) override;
virtual bool getProperty(int propId, uint8_t* recvData, uint32_t* recvDataSize) override;
virtual StreamType streamType() const override;
protected:
virtual bool setXu(uint8_t ctrl, const uint8_t* data, uint32_t len) = 0;
virtual bool getXu(uint8_t ctrl, uint8_t** data, uint32_t* len) = 0;
bool initDepthFrameProcessor();
protected:
const UvcDeviceInfo devInfo_;
StreamType streamType_;
Ptr<DepthFrameProcessor> depthFrameProcessor_;
};
}} // namespace cv::obsensor::
#endif // HAVE_OBSENSOR
#endif // OPENCV_VIDEOIO_OBSENSOR_UVC_STREAM_CHANNEL_HPP

@ -0,0 +1,150 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
/*
* Copyright(C) 2022 by ORBBEC Technology., Inc.
* Authors:
* Huang Zhenchang <yufeng@orbbec.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "precomp.hpp"
#include "cap_obsensor_capture.hpp"
#include "cap_obsensor/obsensor_stream_channel_interface.hpp"
#ifdef HAVE_OBSENSOR
namespace cv {
Ptr<IVideoCapture> create_obsensor_capture(int index)
{
return makePtr<VideoCapture_obsensor>(index);
}
VideoCapture_obsensor::VideoCapture_obsensor(int index) : isOpened_(false)
{
static const obsensor::StreamProfile colorProfile = { 640, 480, 30, obsensor::FRAME_FORMAT_MJPG };
static const obsensor::StreamProfile depthProfile = {640, 480, 30, obsensor::FRAME_FORMAT_Y16};
streamChannelGroup_ = obsensor::getStreamChannelGroup(index);
if (!streamChannelGroup_.empty())
{
for (auto& channel : streamChannelGroup_)
{
auto streamType = channel->streamType();
switch (streamType)
{
case obsensor::OBSENSOR_STREAM_COLOR:
channel->start(colorProfile, [&](obsensor::Frame* frame) {
std::unique_lock<std::mutex> lk(frameMutex_);
colorFrame_ = Mat(1, frame->dataSize, CV_8UC1, frame->data).clone();
});
break;
case obsensor::OBSENSOR_STREAM_DEPTH:
{
uint8_t data = 1;
channel->setProperty(obsensor::DEPTH_TO_COLOR_ALIGN, &data, 1);
channel->start(depthProfile, [&](obsensor::Frame* frame) {
std::unique_lock<std::mutex> lk(frameMutex_);
depthFrame_ = Mat(frame->height, frame->width, CV_16UC1, frame->data, frame->width * 2).clone();
});
uint32_t len;
memset(&camParam_, 0, sizeof(camParam_));
channel->getProperty(obsensor::CAMERA_PARAM, (uint8_t*)&camParam_, &len);
camParamScale_ = (int)(camParam_.p1[2] * 2 / 640 + 0.5);
}
break;
default:
break;
}
}
isOpened_ = true;
}
}
bool VideoCapture_obsensor::grabFrame()
{
std::unique_lock<std::mutex> lk(frameMutex_);
grabbedDepthFrame_ = depthFrame_;
grabbedColorFrame_ = colorFrame_;
depthFrame_.release();
colorFrame_.release();
return !grabbedDepthFrame_.empty() || !grabbedColorFrame_.empty();
}
bool VideoCapture_obsensor::retrieveFrame(int outputType, OutputArray frame)
{
std::unique_lock<std::mutex> lk(frameMutex_);
switch (outputType)
{
case CAP_OBSENSOR_DEPTH_MAP:
if (!grabbedDepthFrame_.empty())
{
grabbedDepthFrame_.copyTo(frame);
grabbedDepthFrame_.release();
return true;
}
break;
case CAP_OBSENSOR_BGR_IMAGE:
if (!grabbedColorFrame_.empty())
{
auto mat = imdecode(grabbedColorFrame_, IMREAD_COLOR);
grabbedColorFrame_.release();
if (!mat.empty())
{
mat.copyTo(frame);
return true;
}
}
break;
default:
break;
}
return false;
}
double VideoCapture_obsensor::getProperty(int propIdx) const {
double rst = 0.0;
propIdx = propIdx & (~CAP_OBSENSOR_GENERATORS_MASK);
// int gen = propIdx & CAP_OBSENSOR_GENERATORS_MASK;
switch (propIdx)
{
case CAP_PROP_OBSENSOR_INTRINSIC_FX:
rst = camParam_.p1[0] / camParamScale_;
break;
case CAP_PROP_OBSENSOR_INTRINSIC_FY:
rst = camParam_.p1[1] / camParamScale_;
break;
case CAP_PROP_OBSENSOR_INTRINSIC_CX:
rst = camParam_.p1[2] / camParamScale_;
break;
case CAP_PROP_OBSENSOR_INTRINSIC_CY:
rst = camParam_.p1[3] / camParamScale_;
break;
}
return rst;
}
bool VideoCapture_obsensor::setProperty(int propIdx, double /*propVal*/)
{
CV_LOG_WARNING(NULL, "Unsupported or read only property, id=" << propIdx);
return false;
}
} // namespace cv::
#endif // HAVE_OBSENSOR

@ -0,0 +1,66 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
/*
* Copyright(C) 2022 by ORBBEC Technology., Inc.
* Authors:
* Huang Zhenchang <yufeng@orbbec.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef OPENCV_VIDEOIO_CAP_OBSENSOR_CAPTURE_HPP
#define OPENCV_VIDEOIO_CAP_OBSENSOR_CAPTURE_HPP
#include <map>
#include <mutex>
#include "cap_obsensor/obsensor_stream_channel_interface.hpp"
#ifdef HAVE_OBSENSOR
namespace cv {
class VideoCapture_obsensor : public IVideoCapture
{
public:
VideoCapture_obsensor(int index);
virtual ~VideoCapture_obsensor() {}
virtual double getProperty(int propIdx) const CV_OVERRIDE;
virtual bool setProperty(int propIdx, double propVal) CV_OVERRIDE;
virtual bool grabFrame() CV_OVERRIDE;
virtual bool retrieveFrame(int outputType, OutputArray frame) CV_OVERRIDE;
virtual int getCaptureDomain() CV_OVERRIDE {
return CAP_OBSENSOR;
}
virtual bool isOpened() const CV_OVERRIDE {
return isOpened_;
}
private:
bool isOpened_;
std::vector<Ptr<obsensor::IStreamChannel>> streamChannelGroup_;
std::mutex frameMutex_;
Mat depthFrame_;
Mat colorFrame_;
Mat grabbedDepthFrame_;
Mat grabbedColorFrame_;
obsensor::CameraParam camParam_;
int camParamScale_;
};
} // namespace cv::
#endif // HAVE_OBSENSOR
#endif // OPENCV_VIDEOIO_CAP_OBSENSOR_CAPTURE_HPP

@ -53,7 +53,7 @@ namespace {
/** Ordering guidelines:
- modern optimized, multi-platform libraries: ffmpeg, gstreamer, Media SDK
- platform specific universal SDK: WINRT, AVFOUNDATION, MSMF/DSHOW, V4L/V4L2
- RGB-D: OpenNI/OpenNI2, REALSENSE
- RGB-D: OpenNI/OpenNI2, REALSENSE, OBSENSOR
- special OpenCV (file-based): "images", "mjpeg"
- special camera SDKs, including stereo: other special SDKs: FIREWIRE/1394, XIMEA/ARAVIS/GIGANETIX/PVAPI(GigE)/uEye
- other: XINE, gphoto2, etc
@ -171,6 +171,11 @@ static const struct VideoBackendInfo builtin_backends[] =
#endif
0)
#endif
#ifdef HAVE_OBSENSOR
DECLARE_STATIC_BACKEND(CAP_OBSENSOR, "OBSENSOR", MODE_CAPTURE_BY_INDEX, 0, create_obsensor_capture, 0)
#endif
// dropped backends: MIL, TYZX
};

@ -0,0 +1,74 @@
#include <opencv2/videoio.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/imgproc.hpp>
#include <iostream>
using namespace cv;
int main()
{
VideoCapture obsensorCapture(0, CAP_OBSENSOR);
if(!obsensorCapture.isOpened()){
std::cerr << "Failed to open obsensor capture! Index out of range or no response from device";
return -1;
}
double fx = obsensorCapture.get(CAP_PROP_OBSENSOR_INTRINSIC_FX);
double fy = obsensorCapture.get(CAP_PROP_OBSENSOR_INTRINSIC_FY);
double cx = obsensorCapture.get(CAP_PROP_OBSENSOR_INTRINSIC_CX);
double cy = obsensorCapture.get(CAP_PROP_OBSENSOR_INTRINSIC_CY);
std::cout << "obsensor camera intrinsic params: fx=" << fx << ", fy=" << fy << ", cx=" << cx << ", cy=" << cy << std::endl;
Mat image;
Mat depthMap;
Mat adjDepthMap;
while (true)
{
// Grab depth map like this:
// obsensorCapture >> depthMap;
// Another way to grab depth map (and bgr image).
if (obsensorCapture.grab())
{
if (obsensorCapture.retrieve(image, CAP_OBSENSOR_BGR_IMAGE))
{
imshow("RGB", image);
}
if (obsensorCapture.retrieve(depthMap, CAP_OBSENSOR_DEPTH_MAP))
{
normalize(depthMap, adjDepthMap, 0, 255, NORM_MINMAX, CV_8UC1);
applyColorMap(adjDepthMap, adjDepthMap, COLORMAP_JET);
imshow("DEPTH", adjDepthMap);
}
// depth map overlay on bgr image
static const float alpha = 0.6f;
if (!image.empty() && !depthMap.empty())
{
normalize(depthMap, adjDepthMap, 0, 255, NORM_MINMAX, CV_8UC1);
cv::resize(adjDepthMap, adjDepthMap, cv::Size(image.cols, image.rows));
for (int i = 0; i < image.rows; i++)
{
for (int j = 0; j < image.cols; j++)
{
cv::Vec3b& outRgb = image.at<cv::Vec3b>(i, j);
uint8_t depthValue = 255 - adjDepthMap.at<uint8_t>(i, j);
if (depthValue != 0 && depthValue != 255)
{
outRgb[0] = (uint8_t)(outRgb[0] * (1.0f - alpha) + depthValue * alpha);
outRgb[1] = (uint8_t)(outRgb[1] * (1.0f - alpha) + depthValue * alpha);
outRgb[2] = (uint8_t)(outRgb[2] * (1.0f - alpha) + depthValue * alpha);
}
}
}
imshow("DepthToColor", image);
}
image.release();
depthMap.release();
}
if (pollKey() >= 0)
break;
}
return 0;
}
Loading…
Cancel
Save