|
|
|
@ -1,4 +1,4 @@ |
|
|
|
|
/*M///////////////////////////////////////////////////////////////////////////////////////
|
|
|
|
|
/*M///////////////////////////////////////////////////////////////////////////////////////
|
|
|
|
|
//
|
|
|
|
|
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
|
|
|
|
|
//
|
|
|
|
@ -58,7 +58,7 @@ class HighguiAndroidCameraActivity; |
|
|
|
|
class CvCapture_Android : public CvCapture |
|
|
|
|
{ |
|
|
|
|
public: |
|
|
|
|
CvCapture_Android(); |
|
|
|
|
CvCapture_Android(int); |
|
|
|
|
virtual ~CvCapture_Android(); |
|
|
|
|
|
|
|
|
|
virtual double getProperty(int propIdx); |
|
|
|
@ -81,66 +81,74 @@ protected: |
|
|
|
|
|
|
|
|
|
CameraActivity* m_activity; |
|
|
|
|
|
|
|
|
|
//raw from camera
|
|
|
|
|
int m_width; |
|
|
|
|
int m_height; |
|
|
|
|
unsigned char *m_frameYUV420i; |
|
|
|
|
unsigned char *m_frameYUV420inext; |
|
|
|
|
|
|
|
|
|
void setFrame(const void* buffer, int bufferSize); |
|
|
|
|
|
|
|
|
|
private: |
|
|
|
|
bool m_isOpened; |
|
|
|
|
bool m_CameraParamsChanged; |
|
|
|
|
|
|
|
|
|
//frames counter for statistics
|
|
|
|
|
int m_framesGrabbed; |
|
|
|
|
|
|
|
|
|
OutputMap *m_frameYUV; |
|
|
|
|
OutputMap *m_frameYUVnext; |
|
|
|
|
//cached converted frames
|
|
|
|
|
OutputMap m_frameGray; |
|
|
|
|
OutputMap m_frameColor; |
|
|
|
|
bool m_hasGray; |
|
|
|
|
bool m_hasColor; |
|
|
|
|
|
|
|
|
|
//synchronization
|
|
|
|
|
pthread_mutex_t m_nextFrameMutex; |
|
|
|
|
pthread_cond_t m_nextFrameCond; |
|
|
|
|
volatile bool m_waitingNextFrame; |
|
|
|
|
|
|
|
|
|
int m_framesGrabbed; |
|
|
|
|
|
|
|
|
|
friend class HighguiAndroidCameraActivity; |
|
|
|
|
void prepareCacheForYUV420i(int width, int height); |
|
|
|
|
static bool convertYUV420i2Grey(int width, int height, const unsigned char* yuv, cv::Mat& resmat); |
|
|
|
|
static bool convertYUV420i2BGR888(int width, int height, const unsigned char* yuv, cv::Mat& resmat); |
|
|
|
|
|
|
|
|
|
void onFrame(const void* buffer, int bufferSize); |
|
|
|
|
|
|
|
|
|
void convertBufferToYUV(const void* buffer, int size, int width, int height); |
|
|
|
|
static bool convertYUVToGrey(const cv::Mat& yuv, cv::Mat& resmat); |
|
|
|
|
static bool convertYUVToColor(const cv::Mat& yuv, cv::Mat& resmat); |
|
|
|
|
friend class HighguiAndroidCameraActivity; |
|
|
|
|
}; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class HighguiAndroidCameraActivity : public CameraActivity |
|
|
|
|
{ |
|
|
|
|
public: |
|
|
|
|
public: |
|
|
|
|
HighguiAndroidCameraActivity(CvCapture_Android* capture) |
|
|
|
|
{ |
|
|
|
|
m_capture = capture; |
|
|
|
|
m_framesReceived = 0; |
|
|
|
|
m_capture = capture; |
|
|
|
|
m_framesReceived = 0; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
virtual bool onFrameBuffer(void* buffer, int bufferSize) |
|
|
|
|
{ |
|
|
|
|
LOGD("buffer addr:%p size:%d",buffer, bufferSize); |
|
|
|
|
if(isConnected() && buffer != 0 && bufferSize > 0) |
|
|
|
|
{ |
|
|
|
|
m_framesReceived++; |
|
|
|
|
if (m_capture->m_waitingNextFrame) |
|
|
|
|
{ |
|
|
|
|
m_capture->onFrame(buffer, bufferSize); |
|
|
|
|
pthread_mutex_lock(&m_capture->m_nextFrameMutex); |
|
|
|
|
m_capture->m_waitingNextFrame = false;//set flag that no more frames required at this moment
|
|
|
|
|
pthread_cond_broadcast(&m_capture->m_nextFrameCond); |
|
|
|
|
pthread_mutex_unlock(&m_capture->m_nextFrameMutex); |
|
|
|
|
} |
|
|
|
|
return true; |
|
|
|
|
} |
|
|
|
|
return false; |
|
|
|
|
if(isConnected() && buffer != 0 && bufferSize > 0) |
|
|
|
|
{ |
|
|
|
|
m_framesReceived++; |
|
|
|
|
if (m_capture->m_waitingNextFrame) |
|
|
|
|
{ |
|
|
|
|
m_capture->setFrame(buffer, bufferSize); |
|
|
|
|
pthread_mutex_lock(&m_capture->m_nextFrameMutex); |
|
|
|
|
m_capture->m_waitingNextFrame = false;//set flag that no more frames required at this moment
|
|
|
|
|
pthread_cond_broadcast(&m_capture->m_nextFrameCond); |
|
|
|
|
pthread_mutex_unlock(&m_capture->m_nextFrameMutex); |
|
|
|
|
} |
|
|
|
|
return true; |
|
|
|
|
} |
|
|
|
|
return false; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void LogFramesRate() |
|
|
|
|
{ |
|
|
|
|
LOGI("FRAMES received: %d grabbed: %d", m_framesReceived, m_capture->m_framesGrabbed); |
|
|
|
|
LOGI("FRAMES received: %d grabbed: %d", m_framesReceived, m_capture->m_framesGrabbed); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
private: |
|
|
|
|
private: |
|
|
|
|
CvCapture_Android* m_capture; |
|
|
|
|
int m_framesReceived; |
|
|
|
|
}; |
|
|
|
@ -154,195 +162,292 @@ IplImage* CvCapture_Android::OutputMap::getIplImagePtr() |
|
|
|
|
return &iplHeader; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
bool CvCapture_Android::isOpened() const |
|
|
|
|
CvCapture_Android::CvCapture_Android(int cameraId) |
|
|
|
|
{ |
|
|
|
|
return m_isOpened; |
|
|
|
|
//defaults
|
|
|
|
|
m_width = 0; |
|
|
|
|
m_height = 0; |
|
|
|
|
m_activity = 0; |
|
|
|
|
m_isOpened = false; |
|
|
|
|
m_frameYUV420i = 0; |
|
|
|
|
m_frameYUV420inext = 0; |
|
|
|
|
m_hasGray = false; |
|
|
|
|
m_hasColor = false; |
|
|
|
|
m_waitingNextFrame = false; |
|
|
|
|
m_framesGrabbed = 0; |
|
|
|
|
m_CameraParamsChanged = false; |
|
|
|
|
|
|
|
|
|
//try connect to camera
|
|
|
|
|
m_activity = new HighguiAndroidCameraActivity(this); |
|
|
|
|
|
|
|
|
|
if (m_activity == 0) return; |
|
|
|
|
|
|
|
|
|
pthread_mutex_init(&m_nextFrameMutex, NULL); |
|
|
|
|
pthread_cond_init (&m_nextFrameCond, NULL); |
|
|
|
|
|
|
|
|
|
CameraActivity::ErrorCode errcode = m_activity->connect(cameraId); |
|
|
|
|
|
|
|
|
|
if(errcode == CameraActivity::NO_ERROR) |
|
|
|
|
m_isOpened = true; |
|
|
|
|
else |
|
|
|
|
{ |
|
|
|
|
LOGE("Native_camera returned opening error: %d", errcode); |
|
|
|
|
delete m_activity; |
|
|
|
|
m_activity = 0; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
CvCapture_Android::CvCapture_Android() |
|
|
|
|
bool CvCapture_Android::isOpened() const |
|
|
|
|
{ |
|
|
|
|
//defaults
|
|
|
|
|
m_activity = 0; |
|
|
|
|
m_isOpened = false; |
|
|
|
|
m_frameYUV = 0; |
|
|
|
|
m_frameYUVnext = 0; |
|
|
|
|
m_hasGray = false; |
|
|
|
|
m_hasColor = false; |
|
|
|
|
m_waitingNextFrame = false; |
|
|
|
|
m_framesGrabbed = 0; |
|
|
|
|
|
|
|
|
|
//try connect to camera
|
|
|
|
|
m_activity = new HighguiAndroidCameraActivity(this); |
|
|
|
|
|
|
|
|
|
if (m_activity == 0) return; |
|
|
|
|
pthread_mutex_init(&m_nextFrameMutex, NULL); |
|
|
|
|
pthread_cond_init (&m_nextFrameCond, NULL); |
|
|
|
|
|
|
|
|
|
CameraActivity::ErrorCode errcode = m_activity->connect(); |
|
|
|
|
if(errcode == CameraActivity::NO_ERROR) |
|
|
|
|
{ |
|
|
|
|
m_isOpened = true; |
|
|
|
|
m_frameYUV = new OutputMap(); |
|
|
|
|
m_frameYUVnext = new OutputMap(); |
|
|
|
|
} |
|
|
|
|
else |
|
|
|
|
{ |
|
|
|
|
LOGE("Native_camera returned opening error: %d", errcode); |
|
|
|
|
delete m_activity; |
|
|
|
|
m_activity = 0; |
|
|
|
|
} |
|
|
|
|
return m_isOpened; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
CvCapture_Android::~CvCapture_Android() |
|
|
|
|
{ |
|
|
|
|
if (m_activity) |
|
|
|
|
{ |
|
|
|
|
((HighguiAndroidCameraActivity*)m_activity)->LogFramesRate(); |
|
|
|
|
|
|
|
|
|
//m_activity->disconnect() will be automatically called inside destructor;
|
|
|
|
|
delete m_activity; |
|
|
|
|
delete m_frameYUV; |
|
|
|
|
delete m_frameYUVnext; |
|
|
|
|
m_activity = 0; |
|
|
|
|
m_frameYUV = 0; |
|
|
|
|
m_frameYUVnext = 0; |
|
|
|
|
|
|
|
|
|
pthread_mutex_destroy(&m_nextFrameMutex); |
|
|
|
|
pthread_cond_destroy(&m_nextFrameCond); |
|
|
|
|
} |
|
|
|
|
if (m_activity) |
|
|
|
|
{ |
|
|
|
|
((HighguiAndroidCameraActivity*)m_activity)->LogFramesRate(); |
|
|
|
|
|
|
|
|
|
//m_activity->disconnect() will be automatically called inside destructor;
|
|
|
|
|
delete m_activity; |
|
|
|
|
delete m_frameYUV420i; |
|
|
|
|
delete m_frameYUV420inext; |
|
|
|
|
m_activity = 0; |
|
|
|
|
m_frameYUV420i = 0; |
|
|
|
|
m_frameYUV420inext = 0; |
|
|
|
|
|
|
|
|
|
pthread_mutex_destroy(&m_nextFrameMutex); |
|
|
|
|
pthread_cond_destroy(&m_nextFrameCond); |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
double CvCapture_Android::getProperty( int propIdx ) |
|
|
|
|
{ |
|
|
|
|
switch ( propIdx ) |
|
|
|
|
{ |
|
|
|
|
switch ( propIdx ) |
|
|
|
|
{ |
|
|
|
|
case CV_CAP_PROP_FRAME_WIDTH: |
|
|
|
|
return (double)CameraActivity::getFrameWidth(); |
|
|
|
|
return (double)m_activity->getFrameWidth(); |
|
|
|
|
case CV_CAP_PROP_FRAME_HEIGHT: |
|
|
|
|
return (double)CameraActivity::getFrameHeight(); |
|
|
|
|
return (double)m_activity->getFrameHeight(); |
|
|
|
|
default: |
|
|
|
|
CV_Error( CV_StsError, "Failed attempt to GET unsupported camera property." ); |
|
|
|
|
break; |
|
|
|
|
} |
|
|
|
|
return -1.0; |
|
|
|
|
CV_Error( CV_StsOutOfRange, "Failed attempt to GET unsupported camera property." ); |
|
|
|
|
break; |
|
|
|
|
} |
|
|
|
|
return -1.0; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
bool CvCapture_Android::setProperty( int propIdx, double propValue ) |
|
|
|
|
{ |
|
|
|
|
bool res = false; |
|
|
|
|
if( isOpened() ) |
|
|
|
|
{ |
|
|
|
|
switch ( propIdx ) |
|
|
|
|
bool res = false; |
|
|
|
|
if( isOpened() ) |
|
|
|
|
{ |
|
|
|
|
default: |
|
|
|
|
CV_Error( CV_StsError, "Failed attempt to SET unsupported camera property." ); |
|
|
|
|
break; |
|
|
|
|
switch ( propIdx ) |
|
|
|
|
{ |
|
|
|
|
case CV_CAP_PROP_FRAME_WIDTH: |
|
|
|
|
m_activity->setProperty(ANDROID_CAMERA_PROPERTY_FRAMEWIDTH, propValue); |
|
|
|
|
break; |
|
|
|
|
case CV_CAP_PROP_FRAME_HEIGHT: |
|
|
|
|
m_activity->setProperty(ANDROID_CAMERA_PROPERTY_FRAMEHEIGHT, propValue); |
|
|
|
|
break; |
|
|
|
|
default: |
|
|
|
|
CV_Error( CV_StsOutOfRange, "Failed attempt to SET unsupported camera property." ); |
|
|
|
|
break; |
|
|
|
|
} |
|
|
|
|
m_CameraParamsChanged = true; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
return res; |
|
|
|
|
return res; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
bool CvCapture_Android::grabFrame() |
|
|
|
|
{ |
|
|
|
|
if( !isOpened() ) |
|
|
|
|
return false; |
|
|
|
|
|
|
|
|
|
pthread_mutex_lock(&m_nextFrameMutex); |
|
|
|
|
m_waitingNextFrame = true; |
|
|
|
|
pthread_cond_wait(&m_nextFrameCond, &m_nextFrameMutex); |
|
|
|
|
pthread_mutex_unlock(&m_nextFrameMutex); |
|
|
|
|
m_framesGrabbed++; |
|
|
|
|
return true; |
|
|
|
|
if( !isOpened() ) |
|
|
|
|
return false; |
|
|
|
|
|
|
|
|
|
pthread_mutex_lock(&m_nextFrameMutex); |
|
|
|
|
if (m_CameraParamsChanged) |
|
|
|
|
{ |
|
|
|
|
m_activity->applyProperties(); |
|
|
|
|
m_CameraParamsChanged = false; |
|
|
|
|
} |
|
|
|
|
m_waitingNextFrame = true; |
|
|
|
|
pthread_cond_wait(&m_nextFrameCond, &m_nextFrameMutex); |
|
|
|
|
pthread_mutex_unlock(&m_nextFrameMutex); |
|
|
|
|
|
|
|
|
|
m_framesGrabbed++; |
|
|
|
|
return true; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
IplImage* CvCapture_Android::retrieveFrame( int outputType ) |
|
|
|
|
{ |
|
|
|
|
IplImage* image = 0; |
|
|
|
|
if (0 != m_frameYUV420i) |
|
|
|
|
{ |
|
|
|
|
switch(outputType) |
|
|
|
|
{ |
|
|
|
|
case CV_CAP_ANDROID_COLOR_FRAME: |
|
|
|
|
if (!m_hasColor) |
|
|
|
|
if (!(m_hasColor = convertYUV420i2BGR888(m_width, m_height, m_frameYUV420i, m_frameColor.mat))) |
|
|
|
|
return 0; |
|
|
|
|
image = m_frameColor.getIplImagePtr(); |
|
|
|
|
break; |
|
|
|
|
case CV_CAP_ANDROID_GREY_FRAME: |
|
|
|
|
if (!m_hasGray) |
|
|
|
|
if (!(m_hasGray = convertYUV420i2Grey(m_width, m_height, m_frameYUV420i, m_frameGray.mat))) |
|
|
|
|
return 0; |
|
|
|
|
image = m_frameGray.getIplImagePtr(); |
|
|
|
|
break; |
|
|
|
|
default: |
|
|
|
|
LOGE("Unsupported frame output format: %d", outputType); |
|
|
|
|
CV_Error( CV_StsOutOfRange, "Output frame format is not supported." ); |
|
|
|
|
image = 0; |
|
|
|
|
break; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
return image; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void CvCapture_Android::onFrame(const void* buffer, int bufferSize) |
|
|
|
|
void CvCapture_Android::setFrame(const void* buffer, int bufferSize) |
|
|
|
|
{ |
|
|
|
|
LOGD("Buffer available: %p + %d", buffer, bufferSize); |
|
|
|
|
int width = m_activity->getFrameWidth(); |
|
|
|
|
int height = m_activity->getFrameHeight(); |
|
|
|
|
int expectedSize = (width * height * 3) >> 1; |
|
|
|
|
|
|
|
|
|
if ( expectedSize != bufferSize) |
|
|
|
|
{ |
|
|
|
|
LOGE("ERROR reading YUV420i buffer: width=%d, height=%d, size=%d, receivedSize=%d", width, height, expectedSize, bufferSize); |
|
|
|
|
return; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
//allocate memery if needed
|
|
|
|
|
prepareCacheForYUV420i(width, height); |
|
|
|
|
|
|
|
|
|
convertBufferToYUV(buffer, bufferSize, CameraActivity::getFrameWidth(), CameraActivity::getFrameHeight()); |
|
|
|
|
//copy data
|
|
|
|
|
memcpy(m_frameYUV420inext, buffer, bufferSize); |
|
|
|
|
|
|
|
|
|
//swap current and new frames
|
|
|
|
|
OutputMap* tmp = m_frameYUV; |
|
|
|
|
m_frameYUV = m_frameYUVnext; |
|
|
|
|
m_frameYUVnext = tmp; |
|
|
|
|
//swap current and new frames
|
|
|
|
|
unsigned char* tmp = m_frameYUV420i; |
|
|
|
|
m_frameYUV420i = m_frameYUV420inext; |
|
|
|
|
m_frameYUV420inext = tmp; |
|
|
|
|
|
|
|
|
|
//discard cached frames
|
|
|
|
|
m_hasGray = false; |
|
|
|
|
m_hasColor = false; |
|
|
|
|
//discard cached frames
|
|
|
|
|
m_hasGray = false; |
|
|
|
|
m_hasColor = false; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
IplImage* CvCapture_Android::retrieveFrame( int outputType ) |
|
|
|
|
void CvCapture_Android::prepareCacheForYUV420i(int width, int height) |
|
|
|
|
{ |
|
|
|
|
IplImage* image = 0; |
|
|
|
|
if (0 != m_frameYUV && !m_frameYUV->mat.empty()) |
|
|
|
|
{ |
|
|
|
|
switch(outputType) |
|
|
|
|
if (width != m_width || height != m_height) |
|
|
|
|
{ |
|
|
|
|
case CV_CAP_ANDROID_YUV_FRAME: |
|
|
|
|
image = m_frameYUV->getIplImagePtr(); |
|
|
|
|
break; |
|
|
|
|
case CV_CAP_ANDROID_GREY_FRAME: |
|
|
|
|
if (!m_hasGray) |
|
|
|
|
if (!(m_hasGray = convertYUVToGrey(m_frameYUV->mat, m_frameGray.mat))) |
|
|
|
|
image = 0; |
|
|
|
|
image = m_frameGray.getIplImagePtr(); |
|
|
|
|
break; |
|
|
|
|
case CV_CAP_ANDROID_COLOR_FRAME: |
|
|
|
|
if (!m_hasColor) |
|
|
|
|
if (!(m_hasColor = convertYUVToColor(m_frameYUV->mat, m_frameColor.mat))) |
|
|
|
|
image = 0; |
|
|
|
|
image = m_frameColor.getIplImagePtr(); |
|
|
|
|
break; |
|
|
|
|
default: |
|
|
|
|
LOGE("Unsupported frame output format: %d", outputType); |
|
|
|
|
image = 0; |
|
|
|
|
break; |
|
|
|
|
m_width = width; |
|
|
|
|
m_height = height; |
|
|
|
|
unsigned char *tmp = m_frameYUV420inext; |
|
|
|
|
m_frameYUV420inext = new unsigned char [width * height * 3 / 2]; |
|
|
|
|
delete[] tmp; |
|
|
|
|
|
|
|
|
|
tmp = m_frameYUV420i; |
|
|
|
|
m_frameYUV420i = new unsigned char [width * height * 3 / 2]; |
|
|
|
|
delete[] tmp; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
return image; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void CvCapture_Android::convertBufferToYUV(const void* buffer, int size, int width, int height) |
|
|
|
|
inline unsigned char clamp(int value) |
|
|
|
|
{ |
|
|
|
|
cv::Size buffSize(width, height + (height / 2)); |
|
|
|
|
if (buffSize.area() != size) |
|
|
|
|
{ |
|
|
|
|
LOGE("ERROR convertBufferToYuv_Mat: width=%d, height=%d, buffSize=%d x %d, buffSize.area()=%d, size=%d", |
|
|
|
|
width, height, buffSize.width, buffSize.height, buffSize.area(), size); |
|
|
|
|
|
|
|
|
|
return; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
m_frameYUVnext->mat.create(buffSize, CV_8UC1); |
|
|
|
|
uchar* matBuff = m_frameYUVnext->mat.ptr<uchar> (0); |
|
|
|
|
memcpy(matBuff, buffer, size); |
|
|
|
|
if (value <= 0) |
|
|
|
|
return 0; |
|
|
|
|
if (value >= 255) |
|
|
|
|
return (unsigned char)255; |
|
|
|
|
return (unsigned char)value; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
bool CvCapture_Android::convertYUVToGrey(const cv::Mat& yuv, cv::Mat& resmat) |
|
|
|
|
{ |
|
|
|
|
if (yuv.empty()) |
|
|
|
|
return false; |
|
|
|
|
|
|
|
|
|
resmat = yuv(cv::Range(0, yuv.rows * (2.0f / 3)), cv::Range::all()); |
|
|
|
|
bool CvCapture_Android::convertYUV420i2Grey(int width, int height, const unsigned char* yuv, cv::Mat& resmat) |
|
|
|
|
{ |
|
|
|
|
if (yuv == 0) return false; |
|
|
|
|
|
|
|
|
|
return !resmat.empty(); |
|
|
|
|
resmat.create(width, height, CV_8UC1); |
|
|
|
|
unsigned char* matBuff = resmat.ptr<unsigned char> (0); |
|
|
|
|
memcpy(matBuff, yuv, width * height); |
|
|
|
|
return !resmat.empty(); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
bool CvCapture_Android::convertYUVToColor(const cv::Mat& yuv, cv::Mat& resmat) |
|
|
|
|
bool CvCapture_Android::convertYUV420i2BGR888(int width, int height, const unsigned char* yuv, cv::Mat& resmat) |
|
|
|
|
{ |
|
|
|
|
if (yuv.empty()) |
|
|
|
|
return false; |
|
|
|
|
if (yuv == 0) return false; |
|
|
|
|
CV_Assert(width % 2 == 0 && height % 2 == 0); |
|
|
|
|
|
|
|
|
|
resmat.create(height, width, CV_8UC3); |
|
|
|
|
|
|
|
|
|
unsigned char* y1 = (unsigned char*)yuv; |
|
|
|
|
unsigned char* uv = y1 + width * height; |
|
|
|
|
|
|
|
|
|
//B = 1.164(Y - 16) + 2.018(U - 128)
|
|
|
|
|
//G = 1.164(Y - 16) - 0.813(V - 128) - 0.391(U - 128)
|
|
|
|
|
//R = 1.164(Y - 16) + 1.596(V - 128)
|
|
|
|
|
|
|
|
|
|
for (int j = 0; j < height; j+=2, y1+=width*2, uv+=width) |
|
|
|
|
{ |
|
|
|
|
unsigned char* row1 = resmat.ptr<unsigned char>(j); |
|
|
|
|
unsigned char* row2 = resmat.ptr<unsigned char>(j+1); |
|
|
|
|
unsigned char* y2 = y1 + width; |
|
|
|
|
|
|
|
|
|
for(int i = 0; i < width; i+=2,row1+=6,row2+=6) |
|
|
|
|
{ |
|
|
|
|
// unsigned char cr = uv[i];
|
|
|
|
|
// unsigned char cb = uv[i+1];
|
|
|
|
|
|
|
|
|
|
// row1[0] = y1[i];
|
|
|
|
|
// row1[1] = cr;
|
|
|
|
|
// row1[2] = cb;
|
|
|
|
|
|
|
|
|
|
// row1[3] = y1[i+1];
|
|
|
|
|
// row1[4] = cr;
|
|
|
|
|
// row1[5] = cb;
|
|
|
|
|
|
|
|
|
|
// row2[0] = y2[i];
|
|
|
|
|
// row2[1] = cr;
|
|
|
|
|
// row2[2] = cb;
|
|
|
|
|
|
|
|
|
|
// row2[3] = y2[i+1];
|
|
|
|
|
// row2[4] = cr;
|
|
|
|
|
// row2[5] = cb;
|
|
|
|
|
|
|
|
|
|
int cr = uv[i] - 128; |
|
|
|
|
int cb = uv[i+1] - 128; |
|
|
|
|
|
|
|
|
|
int ruv = 409 * cr + 128; |
|
|
|
|
int guv = 128 - 100 * cb - 208 * cr; |
|
|
|
|
int buv = 516 * cb + 128; |
|
|
|
|
|
|
|
|
|
int y00 = (y1[i] - 16) * 298; |
|
|
|
|
row1[0] = clamp((y00 + buv) >> 8); |
|
|
|
|
row1[1] = clamp((y00 + guv) >> 8); |
|
|
|
|
row1[2] = clamp((y00 + ruv) >> 8); |
|
|
|
|
|
|
|
|
|
int y01 = (y1[i+1] - 16) * 298; |
|
|
|
|
row1[3] = clamp((y01 + buv) >> 8); |
|
|
|
|
row1[4] = clamp((y01 + guv) >> 8); |
|
|
|
|
row1[5] = clamp((y01 + ruv) >> 8); |
|
|
|
|
|
|
|
|
|
int y10 = (y2[i] - 16) * 298; |
|
|
|
|
row2[0] = clamp((y10 + buv) >> 8); |
|
|
|
|
row2[1] = clamp((y10 + guv) >> 8); |
|
|
|
|
row2[2] = clamp((y10 + ruv) >> 8); |
|
|
|
|
|
|
|
|
|
int y11 = (y2[i+1] - 16) * 298; |
|
|
|
|
row2[3] = clamp((y11 + buv) >> 8); |
|
|
|
|
row2[4] = clamp((y11 + guv) >> 8); |
|
|
|
|
row2[5] = clamp((y11 + ruv) >> 8); |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
cv::cvtColor(yuv, resmat, CV_YUV2RGB); |
|
|
|
|
return !resmat.empty(); |
|
|
|
|
return !resmat.empty(); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
CvCapture* cvCreateCameraCapture_Android( int /*index*/ ) |
|
|
|
|
CvCapture* cvCreateCameraCapture_Android( int cameraId ) |
|
|
|
|
{ |
|
|
|
|
CvCapture_Android* capture = new CvCapture_Android(); |
|
|
|
|
CvCapture_Android* capture = new CvCapture_Android(cameraId); |
|
|
|
|
|
|
|
|
|
if( capture->isOpened() ) |
|
|
|
|
return capture; |
|
|
|
|