Fixed camera output format handling

pull/13383/head
Andrey Kamaev 14 years ago
parent 4ddc1a6477
commit ac7e16fdf6
  1. BIN
      3rdparty/lib/armeabi-v7a/libnative_camera_r2.2.2.so
  2. BIN
      3rdparty/lib/armeabi-v7a/libnative_camera_r2.3.3.so
  3. BIN
      3rdparty/lib/armeabi/libnative_camera_r2.2.2.so
  4. BIN
      3rdparty/lib/armeabi/libnative_camera_r2.3.3.so
  5. 2
      modules/androidcamera/camera_wrapper/camera_wrapper.cpp
  6. 6
      modules/androidcamera/src/camera_activity.cpp
  7. 3
      modules/highgui/include/opencv2/highgui/highgui_c.h
  8. 99
      modules/highgui/src/cap_android.cpp
  9. 2
      modules/imgproc/include/opencv2/imgproc/imgproc.hpp
  10. 2
      modules/imgproc/include/opencv2/imgproc/types_c.h
  11. 52
      modules/imgproc/src/color.cpp
  12. 2
      samples/android/tutorial-1-addopencv/src/org/opencv/samples/tutorial1/Sample1View.java
  13. 4
      samples/android/tutorial-4-mixed/src/org/opencv/samples/tutorial4/Sample4View.java

Binary file not shown.

Binary file not shown.

@ -398,7 +398,7 @@ double CameraHandler::getProperty(int propIdx)
union {const char* str;double res;} u;
memset(&u.res, 0, sizeof(u.res));
u.str = cameraPropertySupportedPreviewSizesString.c_str();
u.str = cameraPropertyPreviewFormatString.c_str();
return u.res;
}

@ -133,7 +133,7 @@ CameraActivity::ErrorCode CameraWrapperConnector::getProperty(void* camera, int
LOGE("CameraWrapperConnector::getProperty error: wrong pointer to camera object");
return CameraActivity::ERROR_WRONG_POINTER_CAMERA_WRAPPER;
}
LOGE("calling (*pGetPropertyC)(%p, %d)", camera, propIdx);
*value = (*pGetPropertyC)(camera, propIdx);
return CameraActivity::NO_ERROR;
}
@ -260,10 +260,6 @@ std::string CameraWrapperConnector::getPathLibFolder()
LOGD("Library name: %s", dl_info.dli_fname);
LOGD("Library base address: %p", dl_info.dli_fbase);
char addrBuf[18];
sprintf(addrBuf, "%p-", dl_info.dli_fbase);
int addrLength = strlen(addrBuf);
const char* libName=dl_info.dli_fname;
while( ((*libName)=='/') || ((*libName)=='.') )
libName++;

@ -349,7 +349,8 @@ enum
CV_CAP_PROP_WHITE_BALANCE_RED_V =26,
CV_CAP_PROP_MAX_DC1394 =27,
CV_CAP_PROP_AUTOGRAB =1024, // property for highgui class CvCapture_Android only
CV_CAP_PROP_SUPPORTED_PREVIEW_SIZES_STRING=1025, // tricky property, returns cpnst char* indeed
CV_CAP_PROP_SUPPORTED_PREVIEW_SIZES_STRING=1025, // readonly, tricky property, returns cpnst char* indeed
CV_CAP_PROP_PREVIEW_FORMAT=1026, // readonly, tricky property, returns cpnst char* indeed
// OpenNI map generators
CV_CAP_OPENNI_DEPTH_GENERATOR = 0,
CV_CAP_OPENNI_IMAGE_GENERATOR = 1 << 31,

@ -86,8 +86,18 @@ protected:
//raw from camera
int m_width;
int m_height;
unsigned char *m_frameYUV420i;
unsigned char *m_frameYUV420inext;
unsigned char *m_frameYUV420;
unsigned char *m_frameYUV420next;
enum YUVformat
{
noformat = 0,
yuv420sp,
yuv420i,
yuvUnknown
};
YUVformat m_frameFormat;
void setFrame(const void* buffer, int bufferSize);
@ -117,9 +127,9 @@ private:
volatile bool m_waitingNextFrame;
volatile bool m_shouldAutoGrab;
void prepareCacheForYUV420i(int width, int height);
static bool convertYUV420i2Grey(int width, int height, const unsigned char* yuv, cv::Mat& resmat);
static bool convertYUV420i2BGR(int width, int height, const unsigned char* yuv, cv::Mat& resmat, bool inRGBorder, bool withAlpha);
void prepareCacheForYUV(int width, int height);
bool convertYUV2Grey(int width, int height, const unsigned char* yuv, cv::Mat& resmat);
bool convertYUV2BGR(int width, int height, const unsigned char* yuv, cv::Mat& resmat, bool inRGBorder, bool withAlpha);
friend class HighguiAndroidCameraActivity;
};
@ -179,8 +189,8 @@ CvCapture_Android::CvCapture_Android(int cameraId)
m_height = 0;
m_activity = 0;
m_isOpened = false;
m_frameYUV420i = 0;
m_frameYUV420inext = 0;
m_frameYUV420 = 0;
m_frameYUV420next = 0;
m_hasGray = false;
m_hasColor = false;
m_dataState = CVCAPTURE_ANDROID_STATE_NO_FRAME;
@ -188,6 +198,7 @@ CvCapture_Android::CvCapture_Android(int cameraId)
m_shouldAutoGrab = false;
m_framesGrabbed = 0;
m_CameraParamsChanged = false;
m_frameFormat = noformat;
//try connect to camera
m_activity = new HighguiAndroidCameraActivity(this);
@ -223,10 +234,10 @@ CvCapture_Android::~CvCapture_Android()
pthread_mutex_lock(&m_nextFrameMutex);
unsigned char *tmp1=m_frameYUV420i;
unsigned char *tmp2=m_frameYUV420inext;
m_frameYUV420i = 0;
m_frameYUV420inext = 0;
unsigned char *tmp1=m_frameYUV420;
unsigned char *tmp2=m_frameYUV420next;
m_frameYUV420 = 0;
m_frameYUV420next = 0;
delete tmp1;
delete tmp2;
@ -255,6 +266,8 @@ double CvCapture_Android::getProperty( int propIdx )
case CV_CAP_PROP_SUPPORTED_PREVIEW_SIZES_STRING:
return (double)m_activity->getProperty(ANDROID_CAMERA_PROPERTY_SUPPORTED_PREVIEW_SIZES_STRING);
case CV_CAP_PROP_PREVIEW_FORMAT:
return (double)m_activity->getProperty(ANDROID_CAMERA_PROPERTY_PREVIEW_FORMAT_STRING);
default:
CV_Error( CV_StsOutOfRange, "Failed attempt to GET unsupported camera property." );
break;
@ -318,9 +331,9 @@ bool CvCapture_Android::grabFrame()
if (m_dataState == CVCAPTURE_ANDROID_STATE_HAS_NEW_FRAME_UNGRABBED) {
//LOGD("CvCapture_Android::grabFrame: get new frame");
//swap current and new frames
unsigned char* tmp = m_frameYUV420i;
m_frameYUV420i = m_frameYUV420inext;
m_frameYUV420inext = tmp;
unsigned char* tmp = m_frameYUV420;
m_frameYUV420 = m_frameYUV420next;
m_frameYUV420next = tmp;
//discard cached frames
m_hasGray = false;
@ -348,27 +361,39 @@ IplImage* CvCapture_Android::retrieveFrame( int outputType )
{
IplImage* image = NULL;
unsigned char *current_frameYUV420i=m_frameYUV420i;
unsigned char *current_frameYUV420=m_frameYUV420;
//Attention! all the operations in this function below should occupy less time than the period between two frames from camera
if (NULL != current_frameYUV420i)
if (NULL != current_frameYUV420)
{
if (m_frameFormat == noformat)
{
union {double prop; const char* name;} u;
u.prop = getProperty(CV_CAP_PROP_PREVIEW_FORMAT);
if (0 == strcmp(u.name, "yuv420sp"))
m_frameFormat = yuv420sp;
else if (0 == strcmp(u.name, "yuv420i"))
m_frameFormat = yuv420i;
else
m_frameFormat = yuvUnknown;
}
switch(outputType)
{
case CV_CAP_ANDROID_GREY_FRAME:
if (!m_hasGray)
if (!(m_hasGray = convertYUV420i2Grey(m_width, m_height, current_frameYUV420i, m_frameGray.mat)))
if (!(m_hasGray = convertYUV2Grey(m_width, m_height, current_frameYUV420, m_frameGray.mat)))
return NULL;
image = m_frameGray.getIplImagePtr();
break;
case CV_CAP_ANDROID_COLOR_FRAME_BGR: case CV_CAP_ANDROID_COLOR_FRAME_RGB:
if (!m_hasColor)
if (!(m_hasColor = convertYUV420i2BGR(m_width, m_height, current_frameYUV420i, m_frameColor.mat, outputType == CV_CAP_ANDROID_COLOR_FRAME_RGB, false)))
if (!(m_hasColor = convertYUV2BGR(m_width, m_height, current_frameYUV420, m_frameColor.mat, outputType == CV_CAP_ANDROID_COLOR_FRAME_RGB, false)))
return NULL;
image = m_frameColor.getIplImagePtr();
break;
case CV_CAP_ANDROID_COLOR_FRAME_BGRA: case CV_CAP_ANDROID_COLOR_FRAME_RGBA:
if (!m_hasColor)
if (!(m_hasColor = convertYUV420i2BGR(m_width, m_height, current_frameYUV420i, m_frameColor.mat, outputType == CV_CAP_ANDROID_COLOR_FRAME_RGBA, true)))
if (!(m_hasColor = convertYUV2BGR(m_width, m_height, current_frameYUV420, m_frameColor.mat, outputType == CV_CAP_ANDROID_COLOR_FRAME_RGBA, true)))
return NULL;
image = m_frameColor.getIplImagePtr();
break;
@ -391,22 +416,22 @@ void CvCapture_Android::setFrame(const void* buffer, int bufferSize)
if ( expectedSize != bufferSize)
{
LOGE("ERROR reading YUV420i buffer: width=%d, height=%d, size=%d, receivedSize=%d", width, height, expectedSize, bufferSize);
LOGE("ERROR reading YUV buffer: width=%d, height=%d, size=%d, receivedSize=%d", width, height, expectedSize, bufferSize);
return;
}
//allocate memory if needed
prepareCacheForYUV420i(width, height);
prepareCacheForYUV(width, height);
//copy data
memcpy(m_frameYUV420inext, buffer, bufferSize);
memcpy(m_frameYUV420next, buffer, bufferSize);
//LOGD("CvCapture_Android::setFrame -- memcpy is done");
#if 0 //moved this part of code into grabFrame
//swap current and new frames
unsigned char* tmp = m_frameYUV420i;
m_frameYUV420i = m_frameYUV420inext;
m_frameYUV420inext = tmp;
unsigned char* tmp = m_frameYUV420;
m_frameYUV420 = m_frameYUV420next;
m_frameYUV420next = tmp;
//discard cached frames
m_hasGray = false;
@ -418,30 +443,31 @@ void CvCapture_Android::setFrame(const void* buffer, int bufferSize)
}
//Attention: this method should be called inside pthread_mutex_lock(m_nextFrameMutex) only
void CvCapture_Android::prepareCacheForYUV420i(int width, int height)
void CvCapture_Android::prepareCacheForYUV(int width, int height)
{
if (width != m_width || height != m_height)
{
LOGD("CvCapture_Android::prepareCacheForYUV420i: Changing size of buffers: from width=%d height=%d to width=%d height=%d", m_width, m_height, width, height);
LOGD("CvCapture_Android::prepareCacheForYUV: Changing size of buffers: from width=%d height=%d to width=%d height=%d", m_width, m_height, width, height);
m_width = width;
m_height = height;
unsigned char *tmp = m_frameYUV420inext;
m_frameYUV420inext = new unsigned char [width * height * 3 / 2];
unsigned char *tmp = m_frameYUV420next;
m_frameYUV420next = new unsigned char [width * height * 3 / 2];
if (tmp != NULL) {
delete[] tmp;
}
tmp = m_frameYUV420i;
m_frameYUV420i = new unsigned char [width * height * 3 / 2];
tmp = m_frameYUV420;
m_frameYUV420 = new unsigned char [width * height * 3 / 2];
if (tmp != NULL) {
delete[] tmp;
}
}
}
bool CvCapture_Android::convertYUV420i2Grey(int width, int height, const unsigned char* yuv, cv::Mat& resmat)
bool CvCapture_Android::convertYUV2Grey(int width, int height, const unsigned char* yuv, cv::Mat& resmat)
{
if (yuv == 0) return false;
if (m_frameFormat != yuv420sp && m_frameFormat != yuv420i) return false;
#define ALWAYS_COPY_GRAY 0
#if ALWAYS_COPY_GRAY
resmat.create(height, width, CV_8UC1);
@ -453,14 +479,19 @@ bool CvCapture_Android::convertYUV420i2Grey(int width, int height, const unsigne
return !resmat.empty();
}
bool CvCapture_Android::convertYUV420i2BGR(int width, int height, const unsigned char* yuv, cv::Mat& resmat, bool inRGBorder, bool withAlpha)
bool CvCapture_Android::convertYUV2BGR(int width, int height, const unsigned char* yuv, cv::Mat& resmat, bool inRGBorder, bool withAlpha)
{
if (yuv == 0) return false;
if (m_frameFormat != yuv420sp && m_frameFormat != yuv420i) return false;
CV_Assert(width % 2 == 0 && height % 2 == 0);
cv::Mat src(height*3/2, width, CV_8UC1, (void*)yuv);
cv::cvtColor(src, resmat, inRGBorder ? CV_YUV420i2RGB : CV_YUV420i2BGR, withAlpha ? 4 : 3);
if (m_frameFormat == yuv420sp)
cv::cvtColor(src, resmat, inRGBorder ? CV_YUV420sp2RGB : CV_YUV420sp2BGR, withAlpha ? 4 : 3);
else if (m_frameFormat == yuv420i)
cv::cvtColor(src, resmat, inRGBorder ? CV_YUV420i2RGB : CV_YUV420i2BGR, withAlpha ? 4 : 3);
return !resmat.empty();
}

@ -902,6 +902,8 @@ enum
COLOR_YUV420i2RGB = 90,
COLOR_YUV420i2BGR = 91,
COLOR_YUV420sp2RGB = 92,
COLOR_YUV420sp2BGR = 93,
COLOR_COLORCVT_MAX =100
};

@ -228,6 +228,8 @@ enum
CV_YUV420i2RGB = 90,
CV_YUV420i2BGR = 91,
CV_YUV420sp2RGB = 92,
CV_YUV420sp2BGR = 93,
CV_COLORCVT_MAX =100
};

@ -2648,16 +2648,16 @@ static void Bayer2RGB_VNG_8u( const Mat& srcmat, Mat& dstmat, int code )
}
}
///////////////////////////////////// YUV420i -> RGB /////////////////////////////////////
///////////////////////////////////// YUV420 -> RGB /////////////////////////////////////
template<int R>
struct YUV420i2BGR888Invoker
template<int R, int SPorI>
struct YUV4202BGR888Invoker
{
Mat* dst;
const uchar* my1, *muv;
int width;
YUV420i2BGR888Invoker(Mat& _dst, int _width, const uchar* _y1, const uchar* _uv)
YUV4202BGR888Invoker(Mat& _dst, int _width, const uchar* _y1, const uchar* _uv)
: dst(&_dst), my1(_y1), muv(_uv), width(_width) {}
void operator()(const BlockedRange& range) const
@ -2676,8 +2676,8 @@ struct YUV420i2BGR888Invoker
for(int i = 0; i < width; i+=2,row1+=6,row2+=6)
{
int cr = uv[i] - 128;
int cb = uv[i+1] - 128;
int cr = uv[i + SPorI + 0] - 128;
int cb = uv[i - SPorI + 1] - 128;
int ruv = 409 * cr + 128;
int guv = 128 - 100 * cb - 208 * cr;
@ -2707,14 +2707,14 @@ struct YUV420i2BGR888Invoker
}
};
template<int R>
struct YUV420i2BGRA8888Invoker
template<int R, int SPorI>
struct YUV4202BGRA8888Invoker
{
Mat* dst;
const uchar* my1, *muv;
int width;
YUV420i2BGRA8888Invoker(Mat& _dst, int _width, const uchar* _y1, const uchar* _uv)
YUV4202BGRA8888Invoker(Mat& _dst, int _width, const uchar* _y1, const uchar* _uv)
: dst(&_dst), my1(_y1), muv(_uv), width(_width) {}
void operator()(const BlockedRange& range) const
@ -2733,8 +2733,8 @@ struct YUV420i2BGRA8888Invoker
for(int i = 0; i < width; i+=2,row1+=8,row2+=8)
{
int cr = uv[i] - 128;
int cb = uv[i+1] - 128;
int cr = uv[i + SPorI + 0] - 128;
int cb = uv[i - SPorI + 1] - 128;
int ruv = 409 * cr + 128;
int guv = 128 - 100 * cb - 208 * cr;
@ -3112,7 +3112,7 @@ void cv::cvtColor( InputArray _src, OutputArray _dst, int code, int dcn )
Bayer2RGB_VNG_8u(src, dst, code);
}
break;
case CV_YUV420i2BGR: case CV_YUV420i2RGB:
case CV_YUV420sp2BGR: case CV_YUV420sp2RGB: case CV_YUV420i2BGR: case CV_YUV420i2RGB:
{
if(dcn <= 0) dcn = 3;
CV_Assert( dcn == 3 || dcn == 4 );
@ -3126,22 +3126,36 @@ void cv::cvtColor( InputArray _src, OutputArray _dst, int code, int dcn )
const uchar* uv = y + dstSz.area();
#ifdef HAVE_TEGRA_OPTIMIZATION
if (!tegra::YUV420i2BGR(y, uv, dst, CV_YUV420i2RGB == code))
if (!tegra::YUV420i2BGR(y, uv, dst, CV_YUV420sp2RGB == code))
#endif
{
if (CV_YUV420i2RGB == code)
if (CV_YUV420sp2RGB == code)
{
if (dcn == 3)
parallel_for(BlockedRange(0, dstSz.height, 2), YUV420i2BGR888Invoker<2>(dst, dstSz.width, y, uv));
parallel_for(BlockedRange(0, dstSz.height, 2), YUV4202BGR888Invoker<2,0>(dst, dstSz.width, y, uv));
else
parallel_for(BlockedRange(0, dstSz.height, 2), YUV420i2BGRA8888Invoker<2>(dst, dstSz.width, y, uv));
parallel_for(BlockedRange(0, dstSz.height, 2), YUV4202BGRA8888Invoker<2,0>(dst, dstSz.width, y, uv));
}
else
else if (CV_YUV420sp2BGR == code)
{
if (dcn == 3)
parallel_for(BlockedRange(0, dstSz.height, 2), YUV420i2BGR888Invoker<0>(dst, dstSz.width, y, uv));
parallel_for(BlockedRange(0, dstSz.height, 2), YUV4202BGR888Invoker<0,0>(dst, dstSz.width, y, uv));
else
parallel_for(BlockedRange(0, dstSz.height, 2), YUV420i2BGRA8888Invoker<0>(dst, dstSz.width, y, uv));
parallel_for(BlockedRange(0, dstSz.height, 2), YUV4202BGRA8888Invoker<0,0>(dst, dstSz.width, y, uv));
}
else if (CV_YUV420i2RGB == code)
{
if (dcn == 3)
parallel_for(BlockedRange(0, dstSz.height, 2), YUV4202BGR888Invoker<2,1>(dst, dstSz.width, y, uv));
else
parallel_for(BlockedRange(0, dstSz.height, 2), YUV4202BGRA8888Invoker<2,1>(dst, dstSz.width, y, uv));
}
else if (CV_YUV420i2BGR == code)
{
if (dcn == 3)
parallel_for(BlockedRange(0, dstSz.height, 2), YUV4202BGR888Invoker<0,1>(dst, dstSz.width, y, uv));
else
parallel_for(BlockedRange(0, dstSz.height, 2), YUV4202BGRA8888Invoker<0,1>(dst, dstSz.width, y, uv));
}
}
}

@ -45,7 +45,7 @@ class Sample1View extends SampleViewBase {
Imgproc.cvtColor(mGraySubmat, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
break;
case Sample1Java.VIEW_MODE_RGBA:
Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420i2RGB, 4);
Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420sp2RGB, 4);
Core.putText(mRgba, "OpenCV + Android", new Point(10, 100), 3/* CV_FONT_HERSHEY_COMPLEX */, 2, new Scalar(255, 0, 0, 255), 3);
break;
case Sample1Java.VIEW_MODE_CANNY:

@ -42,14 +42,14 @@ class Sample4View extends SampleViewBase {
Imgproc.cvtColor(mGraySubmat, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
break;
case Sample4Mixed.VIEW_MODE_RGBA:
Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420i2RGB, 4);
Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420sp2RGB, 4);
break;
case Sample4Mixed.VIEW_MODE_CANNY:
Imgproc.Canny(mGraySubmat, mIntermediateMat, 80, 100);
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
break;
case Sample4Mixed.VIEW_MODE_FEATURES:
Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420i2RGB, 4);
Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420sp2RGB, 4);
FindFeatures(mGraySubmat.getNativeObjAddr(), mRgba.getNativeObjAddr());
break;
}

Loading…
Cancel
Save