Merged the trunk r8345:8376

pull/13383/head
Andrey Kamaev 13 years ago
parent 70166820ea
commit e1c4fd9e1f
  1. 502
      3rdparty/include/MultiMon.h
  2. 76
      android/android.toolchain.cmake
  3. 19
      cmake/OpenCVCompilerOptions.cmake
  4. 2
      cmake/OpenCVDetectCXXCompiler.cmake
  5. 18
      cmake/OpenCVDetectPython.cmake
  6. 118
      cmake/OpenCVFindIPP.cmake
  7. 9
      modules/core/doc/basic_structures.rst
  8. 10
      modules/core/src/matrix.cpp
  9. 11
      modules/core/test/test_mat.cpp
  10. 13
      modules/highgui/src/cap_avfoundation.mm
  11. 54
      modules/highgui/src/window_w32.cpp
  12. 2
      modules/imgproc/src/accum.cpp
  13. 2
      modules/imgproc/src/phasecorr.cpp
  14. 13
      modules/imgproc/src/utils.cpp
  15. 4
      modules/imgproc/test/test_pc.cpp
  16. 15
      modules/video/include/opencv2/video/tracking.hpp
  17. 4
      modules/video/perf/perf_optflowpyrlk.cpp
  18. 2
      modules/video/src/bgfg_gaussmix2.cpp
  19. 229
      modules/video/src/lkpyramid.cpp
  20. 2
      modules/video/src/precomp.hpp
  21. 53
      samples/android/15-puzzle/src/org/opencv/samples/puzzle15/SampleCvViewBase.java
  22. 27
      samples/android/15-puzzle/src/org/opencv/samples/puzzle15/puzzle15Activity.java
  23. 12
      samples/android/15-puzzle/src/org/opencv/samples/puzzle15/puzzle15View.java
  24. 27
      samples/android/color-blob-detection/src/org/opencv/samples/colorblobdetect/ColorBlobDetectionActivity.java
  25. 5
      samples/android/color-blob-detection/src/org/opencv/samples/colorblobdetect/ColorBlobDetectionView.java
  26. 53
      samples/android/color-blob-detection/src/org/opencv/samples/colorblobdetect/SampleCvViewBase.java
  27. 32
      samples/android/face-detection/src/org/opencv/samples/fd/FdActivity.java
  28. 10
      samples/android/face-detection/src/org/opencv/samples/fd/FdView.java
  29. 53
      samples/android/face-detection/src/org/opencv/samples/fd/SampleCvViewBase.java
  30. 35
      samples/android/image-manipulations/src/org/opencv/samples/imagemanipulations/ImageManipulationsActivity.java
  31. 10
      samples/android/image-manipulations/src/org/opencv/samples/imagemanipulations/ImageManipulationsView.java
  32. 59
      samples/android/image-manipulations/src/org/opencv/samples/imagemanipulations/SampleCvViewBase.java
  33. 2
      samples/android/tutorial-0-androidcamera/res/values/strings.xml
  34. 29
      samples/android/tutorial-0-androidcamera/src/org/opencv/samples/tutorial0/Sample0Base.java
  35. 16
      samples/android/tutorial-0-androidcamera/src/org/opencv/samples/tutorial0/Sample0View.java
  36. 152
      samples/android/tutorial-0-androidcamera/src/org/opencv/samples/tutorial0/SampleViewBase.java
  37. 2
      samples/android/tutorial-1-addopencv/res/values/strings.xml
  38. 27
      samples/android/tutorial-1-addopencv/src/org/opencv/samples/tutorial1/Sample1Java.java
  39. 7
      samples/android/tutorial-1-addopencv/src/org/opencv/samples/tutorial1/Sample1View.java
  40. 152
      samples/android/tutorial-1-addopencv/src/org/opencv/samples/tutorial1/SampleViewBase.java
  41. 2
      samples/android/tutorial-2-opencvcamera/res/values/strings.xml
  42. 32
      samples/android/tutorial-2-opencvcamera/src/org/opencv/samples/tutorial2/Sample2NativeCamera.java
  43. 53
      samples/android/tutorial-2-opencvcamera/src/org/opencv/samples/tutorial2/Sample2View.java
  44. 53
      samples/android/tutorial-2-opencvcamera/src/org/opencv/samples/tutorial2/SampleCvViewBase.java
  45. 2
      samples/android/tutorial-3-native/res/values/strings.xml
  46. 31
      samples/android/tutorial-3-native/src/org/opencv/samples/tutorial3/Sample3Native.java
  47. 2
      samples/android/tutorial-3-native/src/org/opencv/samples/tutorial3/Sample3View.java
  48. 153
      samples/android/tutorial-3-native/src/org/opencv/samples/tutorial3/SampleViewBase.java
  49. 2
      samples/android/tutorial-4-mixed/res/values/strings.xml
  50. 27
      samples/android/tutorial-4-mixed/src/org/opencv/samples/tutorial4/Sample4Mixed.java
  51. 5
      samples/android/tutorial-4-mixed/src/org/opencv/samples/tutorial4/Sample4View.java
  52. 154
      samples/android/tutorial-4-mixed/src/org/opencv/samples/tutorial4/SampleViewBase.java

@ -0,0 +1,502 @@
//=============================================================================
//
// multimon.h -- Stub module that fakes multiple monitor apis on Win32 OSes
// without them.
//
// By using this header your code will get back default values from
// GetSystemMetrics() for new metrics, and the new multimonitor APIs
// will act like only one display is present on a Win32 OS without
// multimonitor APIs.
//
// Exactly one source must include this with COMPILE_MULTIMON_STUBS defined.
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
//=============================================================================
#ifdef __cplusplus
extern "C" { // Assume C declarations for C++
#endif // __cplusplus
//
// If we are building with Win95/NT4 headers, we need to declare
// the multimonitor-related metrics and APIs ourselves.
//
#ifndef SM_CMONITORS
#define SM_XVIRTUALSCREEN 76
#define SM_YVIRTUALSCREEN 77
#define SM_CXVIRTUALSCREEN 78
#define SM_CYVIRTUALSCREEN 79
#define SM_CMONITORS 80
#define SM_SAMEDISPLAYFORMAT 81
// HMONITOR is already declared if WINVER >= 0x0500 in windef.h
// This is for components built with an older version number.
//
#if !defined(HMONITOR_DECLARED) && (WINVER < 0x0500)
DECLARE_HANDLE(HMONITOR);
#define HMONITOR_DECLARED
#endif
#define MONITOR_DEFAULTTONULL 0x00000000
#define MONITOR_DEFAULTTOPRIMARY 0x00000001
#define MONITOR_DEFAULTTONEAREST 0x00000002
#define MONITORINFOF_PRIMARY 0x00000001
typedef struct tagMONITORINFO
{
DWORD cbSize;
RECT rcMonitor;
RECT rcWork;
DWORD dwFlags;
} MONITORINFO, *LPMONITORINFO;
#ifndef CCHDEVICENAME
#define CCHDEVICENAME 32
#endif
#ifdef __cplusplus
typedef struct tagMONITORINFOEXA : public tagMONITORINFO
{
CHAR szDevice[CCHDEVICENAME];
} MONITORINFOEXA, *LPMONITORINFOEXA;
typedef struct tagMONITORINFOEXW : public tagMONITORINFO
{
WCHAR szDevice[CCHDEVICENAME];
} MONITORINFOEXW, *LPMONITORINFOEXW;
#ifdef UNICODE
typedef MONITORINFOEXW MONITORINFOEX;
typedef LPMONITORINFOEXW LPMONITORINFOEX;
#else
typedef MONITORINFOEXA MONITORINFOEX;
typedef LPMONITORINFOEXA LPMONITORINFOEX;
#endif // UNICODE
#else // ndef __cplusplus
typedef struct tagMONITORINFOEXA
{
MONITORINFO;
CHAR szDevice[CCHDEVICENAME];
} MONITORINFOEXA, *LPMONITORINFOEXA;
typedef struct tagMONITORINFOEXW
{
MONITORINFO;
WCHAR szDevice[CCHDEVICENAME];
} MONITORINFOEXW, *LPMONITORINFOEXW;
#ifdef UNICODE
typedef MONITORINFOEXW MONITORINFOEX;
typedef LPMONITORINFOEXW LPMONITORINFOEX;
#else
typedef MONITORINFOEXA MONITORINFOEX;
typedef LPMONITORINFOEXA LPMONITORINFOEX;
#endif // UNICODE
#endif
typedef BOOL (CALLBACK* MONITORENUMPROC)(HMONITOR, HDC, LPRECT, LPARAM);
#ifndef DISPLAY_DEVICE_ATTACHED_TO_DESKTOP
typedef struct _DISPLAY_DEVICEA {
DWORD cb;
CHAR DeviceName[32];
CHAR DeviceString[128];
DWORD StateFlags;
CHAR DeviceID[128];
CHAR DeviceKey[128];
} DISPLAY_DEVICEA, *PDISPLAY_DEVICEA, *LPDISPLAY_DEVICEA;
typedef struct _DISPLAY_DEVICEW {
DWORD cb;
WCHAR DeviceName[32];
WCHAR DeviceString[128];
DWORD StateFlags;
WCHAR DeviceID[128];
WCHAR DeviceKey[128];
} DISPLAY_DEVICEW, *PDISPLAY_DEVICEW, *LPDISPLAY_DEVICEW;
#ifdef UNICODE
typedef DISPLAY_DEVICEW DISPLAY_DEVICE;
typedef PDISPLAY_DEVICEW PDISPLAY_DEVICE;
typedef LPDISPLAY_DEVICEW LPDISPLAY_DEVICE;
#else
typedef DISPLAY_DEVICEA DISPLAY_DEVICE;
typedef PDISPLAY_DEVICEA PDISPLAY_DEVICE;
typedef LPDISPLAY_DEVICEA LPDISPLAY_DEVICE;
#endif // UNICODE
#define DISPLAY_DEVICE_ATTACHED_TO_DESKTOP 0x00000001
#define DISPLAY_DEVICE_MULTI_DRIVER 0x00000002
#define DISPLAY_DEVICE_PRIMARY_DEVICE 0x00000004
#define DISPLAY_DEVICE_MIRRORING_DRIVER 0x00000008
#define DISPLAY_DEVICE_VGA_COMPATIBLE 0x00000010
#endif
#endif // SM_CMONITORS
#undef GetMonitorInfo
#undef GetSystemMetrics
#undef MonitorFromWindow
#undef MonitorFromRect
#undef MonitorFromPoint
#undef EnumDisplayMonitors
#undef EnumDisplayDevices
//
// Define COMPILE_MULTIMON_STUBS to compile the stubs;
// otherwise, you get the declarations.
//
#ifdef COMPILE_MULTIMON_STUBS
//-----------------------------------------------------------------------------
//
// Implement the API stubs.
//
//-----------------------------------------------------------------------------
#ifndef _MULTIMON_USE_SECURE_CRT
#if defined(__GOT_SECURE_LIB__) && __GOT_SECURE_LIB__ >= 200402L
#define _MULTIMON_USE_SECURE_CRT 1
#else
#define _MULTIMON_USE_SECURE_CRT 0
#endif
#endif
#ifndef MULTIMON_FNS_DEFINED
int (WINAPI* g_pfnGetSystemMetrics)(int) = NULL;
HMONITOR (WINAPI* g_pfnMonitorFromWindow)(HWND, DWORD) = NULL;
HMONITOR (WINAPI* g_pfnMonitorFromRect)(LPCRECT, DWORD) = NULL;
HMONITOR (WINAPI* g_pfnMonitorFromPoint)(POINT, DWORD) = NULL;
BOOL (WINAPI* g_pfnGetMonitorInfo)(HMONITOR, LPMONITORINFO) = NULL;
BOOL (WINAPI* g_pfnEnumDisplayMonitors)(HDC, LPCRECT, MONITORENUMPROC, LPARAM) = NULL;
BOOL (WINAPI* g_pfnEnumDisplayDevices)(PVOID, DWORD, PDISPLAY_DEVICE,DWORD) = NULL;
BOOL g_fMultiMonInitDone = FALSE;
BOOL g_fMultimonPlatformNT = FALSE;
#endif
BOOL IsPlatformNT()
{
OSVERSIONINFOA osvi = {0};
osvi.dwOSVersionInfoSize = sizeof(osvi);
GetVersionExA((OSVERSIONINFOA*)&osvi);
return (VER_PLATFORM_WIN32_NT == osvi.dwPlatformId);
}
BOOL InitMultipleMonitorStubs(void)
{
HMODULE hUser32;
if (g_fMultiMonInitDone)
{
return g_pfnGetMonitorInfo != NULL;
}
g_fMultimonPlatformNT = IsPlatformNT();
hUser32 = GetModuleHandle(TEXT("USER32"));
if (hUser32 &&
(*(FARPROC*)&g_pfnGetSystemMetrics = GetProcAddress(hUser32,"GetSystemMetrics")) != NULL &&
(*(FARPROC*)&g_pfnMonitorFromWindow = GetProcAddress(hUser32,"MonitorFromWindow")) != NULL &&
(*(FARPROC*)&g_pfnMonitorFromRect = GetProcAddress(hUser32,"MonitorFromRect")) != NULL &&
(*(FARPROC*)&g_pfnMonitorFromPoint = GetProcAddress(hUser32,"MonitorFromPoint")) != NULL &&
(*(FARPROC*)&g_pfnEnumDisplayMonitors = GetProcAddress(hUser32,"EnumDisplayMonitors")) != NULL &&
#ifdef UNICODE
(*(FARPROC*)&g_pfnEnumDisplayDevices = GetProcAddress(hUser32,"EnumDisplayDevicesW")) != NULL &&
(*(FARPROC*)&g_pfnGetMonitorInfo = g_fMultimonPlatformNT ? GetProcAddress(hUser32,"GetMonitorInfoW") :
GetProcAddress(hUser32,"GetMonitorInfoA")) != NULL
#else
(*(FARPROC*)&g_pfnGetMonitorInfo = GetProcAddress(hUser32,"GetMonitorInfoA")) != NULL &&
(*(FARPROC*)&g_pfnEnumDisplayDevices = GetProcAddress(hUser32,"EnumDisplayDevicesA")) != NULL
#endif
) {
g_fMultiMonInitDone = TRUE;
return TRUE;
}
else
{
g_pfnGetSystemMetrics = NULL;
g_pfnMonitorFromWindow = NULL;
g_pfnMonitorFromRect = NULL;
g_pfnMonitorFromPoint = NULL;
g_pfnGetMonitorInfo = NULL;
g_pfnEnumDisplayMonitors = NULL;
g_pfnEnumDisplayDevices = NULL;
g_fMultiMonInitDone = TRUE;
return FALSE;
}
}
//-----------------------------------------------------------------------------
//
// fake implementations of Monitor APIs that work with the primary display
// no special parameter validation is made since these run in client code
//
//-----------------------------------------------------------------------------
int WINAPI
xGetSystemMetrics(int nIndex)
{
if (InitMultipleMonitorStubs())
return g_pfnGetSystemMetrics(nIndex);
switch (nIndex)
{
case SM_CMONITORS:
case SM_SAMEDISPLAYFORMAT:
return 1;
case SM_XVIRTUALSCREEN:
case SM_YVIRTUALSCREEN:
return 0;
case SM_CXVIRTUALSCREEN:
nIndex = SM_CXSCREEN;
break;
case SM_CYVIRTUALSCREEN:
nIndex = SM_CYSCREEN;
break;
}
return GetSystemMetrics(nIndex);
}
#define xPRIMARY_MONITOR ((HMONITOR)0x12340042)
HMONITOR WINAPI
xMonitorFromPoint(POINT ptScreenCoords, DWORD dwFlags)
{
if (InitMultipleMonitorStubs())
return g_pfnMonitorFromPoint(ptScreenCoords, dwFlags);
if ((dwFlags & (MONITOR_DEFAULTTOPRIMARY | MONITOR_DEFAULTTONEAREST)) ||
((ptScreenCoords.x >= 0) &&
(ptScreenCoords.x < GetSystemMetrics(SM_CXSCREEN)) &&
(ptScreenCoords.y >= 0) &&
(ptScreenCoords.y < GetSystemMetrics(SM_CYSCREEN))))
{
return xPRIMARY_MONITOR;
}
return NULL;
}
HMONITOR WINAPI
xMonitorFromRect(LPCRECT lprcScreenCoords, DWORD dwFlags)
{
if (InitMultipleMonitorStubs())
return g_pfnMonitorFromRect(lprcScreenCoords, dwFlags);
if ((dwFlags & (MONITOR_DEFAULTTOPRIMARY | MONITOR_DEFAULTTONEAREST)) ||
((lprcScreenCoords->right > 0) &&
(lprcScreenCoords->bottom > 0) &&
(lprcScreenCoords->left < GetSystemMetrics(SM_CXSCREEN)) &&
(lprcScreenCoords->top < GetSystemMetrics(SM_CYSCREEN))))
{
return xPRIMARY_MONITOR;
}
return NULL;
}
HMONITOR WINAPI
xMonitorFromWindow(HWND hWnd, DWORD dwFlags)
{
WINDOWPLACEMENT wp;
if (InitMultipleMonitorStubs())
return g_pfnMonitorFromWindow(hWnd, dwFlags);
if (dwFlags & (MONITOR_DEFAULTTOPRIMARY | MONITOR_DEFAULTTONEAREST))
return xPRIMARY_MONITOR;
if (IsIconic(hWnd) ?
GetWindowPlacement(hWnd, &wp) :
GetWindowRect(hWnd, &wp.rcNormalPosition)) {
return xMonitorFromRect(&wp.rcNormalPosition, dwFlags);
}
return NULL;
}
BOOL WINAPI
xGetMonitorInfo(HMONITOR hMonitor, __inout LPMONITORINFO lpMonitorInfo)
{
RECT rcWork;
if (InitMultipleMonitorStubs())
{
BOOL f = g_pfnGetMonitorInfo(hMonitor, lpMonitorInfo);
#ifdef UNICODE
if (f && !g_fMultimonPlatformNT && (lpMonitorInfo->cbSize >= sizeof(MONITORINFOEX)))
{
MultiByteToWideChar(CP_ACP, 0,
(LPSTR)((MONITORINFOEX*)lpMonitorInfo)->szDevice, -1,
((MONITORINFOEX*)lpMonitorInfo)->szDevice, (sizeof(((MONITORINFOEX*)lpMonitorInfo)->szDevice)/sizeof(TCHAR)));
}
#endif
return f;
}
if ((hMonitor == xPRIMARY_MONITOR) &&
lpMonitorInfo &&
(lpMonitorInfo->cbSize >= sizeof(MONITORINFO)) &&
SystemParametersInfoA(SPI_GETWORKAREA, 0, &rcWork, 0))
{
lpMonitorInfo->rcMonitor.left = 0;
lpMonitorInfo->rcMonitor.top = 0;
lpMonitorInfo->rcMonitor.right = GetSystemMetrics(SM_CXSCREEN);
lpMonitorInfo->rcMonitor.bottom = GetSystemMetrics(SM_CYSCREEN);
lpMonitorInfo->rcWork = rcWork;
lpMonitorInfo->dwFlags = MONITORINFOF_PRIMARY;
if (lpMonitorInfo->cbSize >= sizeof(MONITORINFOEX))
{
#ifdef UNICODE
MultiByteToWideChar(CP_ACP, 0, "DISPLAY", -1, ((MONITORINFOEX*)lpMonitorInfo)->szDevice, (sizeof(((MONITORINFOEX*)lpMonitorInfo)->szDevice)/sizeof(TCHAR)));
#else // UNICODE
#if _MULTIMON_USE_SECURE_CRT
strncpy_s(((MONITORINFOEX*)lpMonitorInfo)->szDevice, (sizeof(((MONITORINFOEX*)lpMonitorInfo)->szDevice)/sizeof(TCHAR)), TEXT("DISPLAY"), (sizeof(((MONITORINFOEX*)lpMonitorInfo)->szDevice)/sizeof(TCHAR)) - 1);
#else
lstrcpyn(((MONITORINFOEX*)lpMonitorInfo)->szDevice, TEXT("DISPLAY"), (sizeof(((MONITORINFOEX*)lpMonitorInfo)->szDevice)/sizeof(TCHAR)));
#endif // _MULTIMON_USE_SECURE_CRT
#endif // UNICODE
}
return TRUE;
}
return FALSE;
}
BOOL WINAPI
xEnumDisplayMonitors(
HDC hdcOptionalForPainting,
LPCRECT lprcEnumMonitorsThatIntersect,
MONITORENUMPROC lpfnEnumProc,
LPARAM dwData)
{
RECT rcLimit;
if (InitMultipleMonitorStubs()) {
return g_pfnEnumDisplayMonitors(
hdcOptionalForPainting,
lprcEnumMonitorsThatIntersect,
lpfnEnumProc,
dwData);
}
if (!lpfnEnumProc)
return FALSE;
rcLimit.left = 0;
rcLimit.top = 0;
rcLimit.right = GetSystemMetrics(SM_CXSCREEN);
rcLimit.bottom = GetSystemMetrics(SM_CYSCREEN);
if (hdcOptionalForPainting)
{
RECT rcClip;
POINT ptOrg;
switch (GetClipBox(hdcOptionalForPainting, &rcClip))
{
default:
if (!GetDCOrgEx(hdcOptionalForPainting, &ptOrg))
return FALSE;
OffsetRect(&rcLimit, -ptOrg.x, -ptOrg.y);
if (IntersectRect(&rcLimit, &rcLimit, &rcClip) &&
(!lprcEnumMonitorsThatIntersect ||
IntersectRect(&rcLimit, &rcLimit, lprcEnumMonitorsThatIntersect))) {
break;
}
//fall thru
case NULLREGION:
return TRUE;
case ERROR:
return FALSE;
}
} else {
if ( lprcEnumMonitorsThatIntersect &&
!IntersectRect(&rcLimit, &rcLimit, lprcEnumMonitorsThatIntersect)) {
return TRUE;
}
}
return lpfnEnumProc(
xPRIMARY_MONITOR,
hdcOptionalForPainting,
&rcLimit,
dwData);
}
BOOL WINAPI
xEnumDisplayDevices(
PVOID Unused,
DWORD iDevNum,
__inout PDISPLAY_DEVICE lpDisplayDevice,
DWORD dwFlags)
{
if (InitMultipleMonitorStubs())
return g_pfnEnumDisplayDevices(Unused, iDevNum, lpDisplayDevice, dwFlags);
if (Unused != NULL)
return FALSE;
if (iDevNum != 0)
return FALSE;
if (lpDisplayDevice == NULL || lpDisplayDevice->cb < sizeof(DISPLAY_DEVICE))
return FALSE;
#ifdef UNICODE
MultiByteToWideChar(CP_ACP, 0, "DISPLAY", -1, lpDisplayDevice->DeviceName, (sizeof(lpDisplayDevice->DeviceName)/sizeof(TCHAR)));
MultiByteToWideChar(CP_ACP, 0, "DISPLAY", -1, lpDisplayDevice->DeviceString, (sizeof(lpDisplayDevice->DeviceString)/sizeof(TCHAR)));
#else // UNICODE
#if _MULTIMON_USE_SECURE_CRT
strncpy_s((LPTSTR)lpDisplayDevice->DeviceName, (sizeof(lpDisplayDevice->DeviceName)/sizeof(TCHAR)), TEXT("DISPLAY"), (sizeof(lpDisplayDevice->DeviceName)/sizeof(TCHAR)) - 1);
strncpy_s((LPTSTR)lpDisplayDevice->DeviceString, (sizeof(lpDisplayDevice->DeviceString)/sizeof(TCHAR)), TEXT("DISPLAY"), (sizeof(lpDisplayDevice->DeviceName)/sizeof(TCHAR)) - 1);
#else
lstrcpyn((LPTSTR)lpDisplayDevice->DeviceName, TEXT("DISPLAY"), (sizeof(lpDisplayDevice->DeviceName)/sizeof(TCHAR)));
lstrcpyn((LPTSTR)lpDisplayDevice->DeviceString, TEXT("DISPLAY"), (sizeof(lpDisplayDevice->DeviceString)/sizeof(TCHAR)));
#endif // _MULTIMON_USE_SECURE_CRT
#endif // UNICODE
lpDisplayDevice->StateFlags = DISPLAY_DEVICE_ATTACHED_TO_DESKTOP | DISPLAY_DEVICE_PRIMARY_DEVICE;
return TRUE;
}
#undef xPRIMARY_MONITOR
#undef COMPILE_MULTIMON_STUBS
#else // COMPILE_MULTIMON_STUBS
extern int WINAPI xGetSystemMetrics(int);
extern HMONITOR WINAPI xMonitorFromWindow(HWND, DWORD);
extern HMONITOR WINAPI xMonitorFromRect(LPCRECT, DWORD);
extern HMONITOR WINAPI xMonitorFromPoint(POINT, DWORD);
extern BOOL WINAPI xGetMonitorInfo(HMONITOR, LPMONITORINFO);
extern BOOL WINAPI xEnumDisplayMonitors(HDC, LPCRECT, MONITORENUMPROC, LPARAM);
extern BOOL WINAPI xEnumDisplayDevices(PVOID, DWORD, PDISPLAY_DEVICE, DWORD);
#endif // COMPILE_MULTIMON_STUBS
//
// build defines that replace the regular APIs with our versions
//
#define GetSystemMetrics xGetSystemMetrics
#define MonitorFromWindow xMonitorFromWindow
#define MonitorFromRect xMonitorFromRect
#define MonitorFromPoint xMonitorFromPoint
#define GetMonitorInfo xGetMonitorInfo
#define EnumDisplayMonitors xEnumDisplayMonitors
#define EnumDisplayDevices xEnumDisplayDevices
#ifdef __cplusplus
}
#endif // __cplusplus

@ -766,30 +766,30 @@ set( CMAKE_COMPILER_IS_GNUASM 1)
# NDK flags # NDK flags
if( ARMEABI OR ARMEABI_V7A ) if( ARMEABI OR ARMEABI_V7A )
# NDK also defines -ffunction-sections -funwind-tables but they result in worse OpenCV performance # NDK also defines -ffunction-sections -funwind-tables but they result in worse OpenCV performance
set( CMAKE_CXX_FLAGS "-fPIC -Wno-psabi" ) set( _CMAKE_CXX_FLAGS "-fPIC -Wno-psabi" )
set( CMAKE_C_FLAGS "-fPIC -Wno-psabi" ) set( _CMAKE_C_FLAGS "-fPIC -Wno-psabi" )
remove_definitions( -D__ARM_ARCH_5__ -D__ARM_ARCH_5T__ -D__ARM_ARCH_5E__ -D__ARM_ARCH_5TE__ ) remove_definitions( -D__ARM_ARCH_5__ -D__ARM_ARCH_5T__ -D__ARM_ARCH_5E__ -D__ARM_ARCH_5TE__ )
add_definitions( -D__ARM_ARCH_5__ -D__ARM_ARCH_5T__ -D__ARM_ARCH_5E__ -D__ARM_ARCH_5TE__ ) add_definitions( -D__ARM_ARCH_5__ -D__ARM_ARCH_5T__ -D__ARM_ARCH_5E__ -D__ARM_ARCH_5TE__ )
# extra arm-specific flags # extra arm-specific flags
set( ANDROID_CXX_FLAGS "${ANDROID_CXX_FLAGS} -fsigned-char" ) set( ANDROID_CXX_FLAGS "${ANDROID_CXX_FLAGS} -fsigned-char" )
elseif( X86 ) elseif( X86 )
set( CMAKE_CXX_FLAGS "-funwind-tables" ) set( _CMAKE_CXX_FLAGS "-funwind-tables" )
set( CMAKE_C_FLAGS "-funwind-tables" ) set( _CMAKE_C_FLAGS "-funwind-tables" )
elseif( MIPS ) elseif( MIPS )
set( CMAKE_CXX_FLAGS "-fpic -Wno-psabi -fno-strict-aliasing -finline-functions -ffunction-sections -funwind-tables -fmessage-length=0 -fno-inline-functions-called-once -fgcse-after-reload -frerun-cse-after-loop -frename-registers" ) set( _CMAKE_CXX_FLAGS "-fpic -Wno-psabi -fno-strict-aliasing -finline-functions -ffunction-sections -funwind-tables -fmessage-length=0 -fno-inline-functions-called-once -fgcse-after-reload -frerun-cse-after-loop -frename-registers" )
set( CMAKE_CXX_FLAGS "-fpic -Wno-psabi -fno-strict-aliasing -finline-functions -ffunction-sections -funwind-tables -fmessage-length=0 -fno-inline-functions-called-once -fgcse-after-reload -frerun-cse-after-loop -frename-registers" ) set( _CMAKE_CXX_FLAGS "-fpic -Wno-psabi -fno-strict-aliasing -finline-functions -ffunction-sections -funwind-tables -fmessage-length=0 -fno-inline-functions-called-once -fgcse-after-reload -frerun-cse-after-loop -frename-registers" )
set( ANDROID_CXX_FLAGS "${ANDROID_CXX_FLAGS} -fsigned-char" ) set( ANDROID_CXX_FLAGS "${ANDROID_CXX_FLAGS} -fsigned-char" )
else() else()
set( CMAKE_CXX_FLAGS "" ) set( _CMAKE_CXX_FLAGS "" )
set( CMAKE_C_FLAGS "" ) set( _CMAKE_C_FLAGS "" )
endif() endif()
if( ANDROID_USE_STLPORT ) if( ANDROID_USE_STLPORT )
set( CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-rtti -fno-exceptions" ) set( _CMAKE_CXX_FLAGS "${_CMAKE_CXX_FLAGS} -fno-rtti -fno-exceptions" )
set( CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fno-rtti -fno-exceptions" ) set( _CMAKE_C_FLAGS "${_CMAKE_C_FLAGS} -fno-rtti -fno-exceptions" )
else() else()
set( CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -frtti -fexceptions" ) set( _CMAKE_CXX_FLAGS "${_CMAKE_CXX_FLAGS} -frtti -fexceptions" )
set( CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fexceptions" ) set( _CMAKE_C_FLAGS "${_CMAKE_C_FLAGS} -fexceptions" )
endif() endif()
#release and debug flags #release and debug flags
@ -798,33 +798,33 @@ if( ARMEABI OR ARMEABI_V7A )
#It is recommended to use the -mthumb compiler flag to force the generation #It is recommended to use the -mthumb compiler flag to force the generation
#of 16-bit Thumb-1 instructions (the default being 32-bit ARM ones). #of 16-bit Thumb-1 instructions (the default being 32-bit ARM ones).
# O3 instead of O2/Os in release mode - like cmake sets for desktop gcc # O3 instead of O2/Os in release mode - like cmake sets for desktop gcc
set( CMAKE_CXX_FLAGS_RELEASE "-mthumb -O3" ) set( _CMAKE_CXX_FLAGS_RELEASE "-mthumb -O3" )
set( CMAKE_C_FLAGS_RELEASE "-mthumb -O3" ) set( _CMAKE_C_FLAGS_RELEASE "-mthumb -O3" )
set( CMAKE_CXX_FLAGS_DEBUG "-marm -Os -finline-limit=64" ) set( _CMAKE_CXX_FLAGS_DEBUG "-marm -Os -finline-limit=64" )
set( CMAKE_C_FLAGS_DEBUG "-marm -Os -finline-limit=64" ) set( _CMAKE_C_FLAGS_DEBUG "-marm -Os -finline-limit=64" )
else() else()
# always compile ARMEABI_V6 in arm mode; otherwise there is no difference from ARMEABI # always compile ARMEABI_V6 in arm mode; otherwise there is no difference from ARMEABI
# O3 instead of O2/Os in release mode - like cmake sets for desktop gcc # O3 instead of O2/Os in release mode - like cmake sets for desktop gcc
set( CMAKE_CXX_FLAGS_RELEASE "-marm -O3 -fstrict-aliasing" ) set( _CMAKE_CXX_FLAGS_RELEASE "-marm -O3 -fstrict-aliasing" )
set( CMAKE_C_FLAGS_RELEASE "-marm -O3 -fstrict-aliasing" ) set( _CMAKE_C_FLAGS_RELEASE "-marm -O3 -fstrict-aliasing" )
set( CMAKE_CXX_FLAGS_DEBUG "-marm -O0 -finline-limit=300" ) set( _CMAKE_CXX_FLAGS_DEBUG "-marm -O0 -finline-limit=300" )
set( CMAKE_C_FLAGS_DEBUG "-marm -O0 -finline-limit=300" ) set( _CMAKE_C_FLAGS_DEBUG "-marm -O0 -finline-limit=300" )
endif() endif()
elseif( X86 ) elseif( X86 )
set( CMAKE_CXX_FLAGS_RELEASE "-O3 -fstrict-aliasing" ) set( _CMAKE_CXX_FLAGS_RELEASE "-O3 -fstrict-aliasing" )
set( CMAKE_C_FLAGS_RELEASE "-O3 -fstrict-aliasing" ) set( _CMAKE_C_FLAGS_RELEASE "-O3 -fstrict-aliasing" )
set( CMAKE_CXX_FLAGS_DEBUG "-O0 -finline-limit=300" ) set( _CMAKE_CXX_FLAGS_DEBUG "-O0 -finline-limit=300" )
set( CMAKE_C_FLAGS_DEBUG "-O0 -finline-limit=300" ) set( _CMAKE_C_FLAGS_DEBUG "-O0 -finline-limit=300" )
elseif( MIPS ) elseif( MIPS )
set( CMAKE_CXX_FLAGS_RELEASE "-O3 -funswitch-loops -finline-limit=300" ) set( _CMAKE_CXX_FLAGS_RELEASE "-O3 -funswitch-loops -finline-limit=300" )
set( CMAKE_C_FLAGS_RELEASE "-O3 -funswitch-loops -finline-limit=300" ) set( _CMAKE_C_FLAGS_RELEASE "-O3 -funswitch-loops -finline-limit=300" )
set( CMAKE_CXX_FLAGS_DEBUG "-O0 -g" ) set( _CMAKE_CXX_FLAGS_DEBUG "-O0 -g" )
set( CMAKE_C_FLAGS_DEBUG "-O0 -g" ) set( _CMAKE_C_FLAGS_DEBUG "-O0 -g" )
endif() endif()
set( CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -fomit-frame-pointer -DNDEBUG" ) set( _CMAKE_CXX_FLAGS_RELEASE "${_CMAKE_CXX_FLAGS_RELEASE} -fomit-frame-pointer -DNDEBUG" )
set( CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE} -fomit-frame-pointer -DNDEBUG" ) set( _CMAKE_C_FLAGS_RELEASE "${_CMAKE_C_FLAGS_RELEASE} -fomit-frame-pointer -DNDEBUG" )
set( CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -fno-strict-aliasing -fno-omit-frame-pointer -DDEBUG -D_DEBUG" ) set( _CMAKE_CXX_FLAGS_DEBUG "${_CMAKE_CXX_FLAGS_DEBUG} -fno-strict-aliasing -fno-omit-frame-pointer -DDEBUG -D_DEBUG" )
set( CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -fno-strict-aliasing -fno-omit-frame-pointer -DDEBUG -D_DEBUG" ) set( _CMAKE_C_FLAGS_DEBUG "${_CMAKE_C_FLAGS_DEBUG} -fno-strict-aliasing -fno-omit-frame-pointer -DDEBUG -D_DEBUG" )
#ABI-specific flags #ABI-specific flags
if( ARMEABI_V7A ) if( ARMEABI_V7A )
@ -928,12 +928,12 @@ if( ARMEABI_V7A )
endif() endif()
#cache flags #cache flags
set( CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}" CACHE STRING "c++ flags" ) set( CMAKE_CXX_FLAGS "${_CMAKE_CXX_FLAGS}" CACHE STRING "c++ flags" )
set( CMAKE_C_FLAGS "${CMAKE_C_FLAGS}" CACHE STRING "c flags" ) set( CMAKE_C_FLAGS "${_CMAKE_C_FLAGS}" CACHE STRING "c flags" )
set( CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE}" CACHE STRING "c++ Release flags" ) set( CMAKE_CXX_FLAGS_RELEASE "${_CMAKE_CXX_FLAGS_RELEASE}" CACHE STRING "c++ Release flags" )
set( CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE}" CACHE STRING "c Release flags" ) set( CMAKE_C_FLAGS_RELEASE "${_CMAKE_C_FLAGS_RELEASE}" CACHE STRING "c Release flags" )
set( CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG}" CACHE STRING "c++ Debug flags" ) set( CMAKE_CXX_FLAGS_DEBUG "${_CMAKE_CXX_FLAGS_DEBUG}" CACHE STRING "c++ Debug flags" )
set( CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG}" CACHE STRING "c Debug flags" ) set( CMAKE_C_FLAGS_DEBUG "${_CMAKE_C_FLAGS_DEBUG}" CACHE STRING "c Debug flags" )
set( CMAKE_SHARED_LINKER_FLAGS "" CACHE STRING "linker flags" ) set( CMAKE_SHARED_LINKER_FLAGS "" CACHE STRING "linker flags" )
set( CMAKE_MODULE_LINKER_FLAGS "" CACHE STRING "linker flags" ) set( CMAKE_MODULE_LINKER_FLAGS "" CACHE STRING "linker flags" )
set( CMAKE_EXE_LINKER_FLAGS "-Wl,-z,nocopyreloc" CACHE STRING "linker flags" ) set( CMAKE_EXE_LINKER_FLAGS "-Wl,-z,nocopyreloc" CACHE STRING "linker flags" )

@ -1,7 +1,3 @@
if (WIN32 AND CMAKE_GENERATOR MATCHES "(MinGW)|(MSYS)")
set(CMAKE_CXX_FLAGS_RELEASE "-O2 -DNDEBUG" CACHE STRING "")
endif()
if(MSVC) if(MSVC)
if(CMAKE_CXX_FLAGS STREQUAL CMAKE_CXX_FLAGS_INIT) if(CMAKE_CXX_FLAGS STREQUAL CMAKE_CXX_FLAGS_INIT)
# override cmake default exception handling option # override cmake default exception handling option
@ -17,6 +13,21 @@ set(OPENCV_EXTRA_EXE_LINKER_FLAGS "")
set(OPENCV_EXTRA_EXE_LINKER_FLAGS_RELEASE "") set(OPENCV_EXTRA_EXE_LINKER_FLAGS_RELEASE "")
set(OPENCV_EXTRA_EXE_LINKER_FLAGS_DEBUG "") set(OPENCV_EXTRA_EXE_LINKER_FLAGS_DEBUG "")
if(MINGW)
# mingw compiler is known to produce unstable SSE code
# here we are trying to workaround the problem
include(CheckCXXCompilerFlag)
CHECK_CXX_COMPILER_FLAG(-mstackrealign HAVE_STACKREALIGN_FLAG)
if(HAVE_STACKREALIGN_FLAG)
set(OPENCV_EXTRA_C_FLAGS "${OPENCV_EXTRA_C_FLAGS} -mstackrealign")
else()
CHECK_CXX_COMPILER_FLAG(-mpreferred-stack-boundary=2 HAVE_PREFERRED_STACKBOUNDARY_FLAG)
if(HAVE_PREFERRED_STACKBOUNDARY_FLAG)
set(OPENCV_EXTRA_C_FLAGS "${OPENCV_EXTRA_C_FLAGS} -mstackrealign")
endif()
endif()
endif()
if(CMAKE_COMPILER_IS_GNUCXX) if(CMAKE_COMPILER_IS_GNUCXX)
# High level of warnings. # High level of warnings.
set(OPENCV_EXTRA_C_FLAGS "${OPENCV_EXTRA_C_FLAGS} -Wall") set(OPENCV_EXTRA_C_FLAGS "${OPENCV_EXTRA_C_FLAGS} -Wall")

@ -11,7 +11,7 @@ if(NOT APPLE)
unset(ENABLE_PRECOMPILED_HEADERS CACHE) unset(ENABLE_PRECOMPILED_HEADERS CACHE)
endif() endif()
if(CMAKE_C_COMPILER_ID STREQUAL "Clang") if(CMAKE_C_COMPILER_ID STREQUAL "Clang")
set(CMAKE_COMPILER_IS_GNUC 1) set(CMAKE_COMPILER_IS_GNUCC 1)
unset(ENABLE_PRECOMPILED_HEADERS CACHE) unset(ENABLE_PRECOMPILED_HEADERS CACHE)
endif() endif()
endif() endif()

@ -12,12 +12,10 @@ if(MSVC AND NOT PYTHON_EXECUTABLE)
) )
endforeach() endforeach()
endif() endif()
find_host_package(PythonInterp 2.0) find_host_package(PythonInterp 2.0)
unset(PYTHON_USE_NUMPY CACHE) unset(PYTHON_USE_NUMPY CACHE)
unset(HAVE_SPHINX CACHE) unset(HAVE_SPHINX CACHE)
if(PYTHON_EXECUTABLE) if(PYTHON_EXECUTABLE)
if(PYTHON_VERSION_STRING) if(PYTHON_VERSION_STRING)
set(PYTHON_VERSION_MAJOR_MINOR "${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}") set(PYTHON_VERSION_MAJOR_MINOR "${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}")
@ -46,16 +44,15 @@ if(PYTHON_EXECUTABLE)
OUTPUT_VARIABLE PYTHON_STD_PACKAGES_PATH OUTPUT_VARIABLE PYTHON_STD_PACKAGES_PATH
OUTPUT_STRIP_TRAILING_WHITESPACE) OUTPUT_STRIP_TRAILING_WHITESPACE)
if("${PYTHON_STD_PACKAGES_PATH}" MATCHES "site-packages") if("${PYTHON_STD_PACKAGES_PATH}" MATCHES "site-packages")
set(PYTHON_PACKAGES_PATH "python${PYTHON_VERSION_MAJOR_MINOR}/site-packages") set(_PYTHON_PACKAGES_PATH "python${PYTHON_VERSION_MAJOR_MINOR}/site-packages")
else() #debian based assumed, install to the dist-packages. else() #debian based assumed, install to the dist-packages.
set(PYTHON_PACKAGES_PATH "python${PYTHON_VERSION_MAJOR_MINOR}/dist-packages") set(_PYTHON_PACKAGES_PATH "python${PYTHON_VERSION_MAJOR_MINOR}/dist-packages")
endif() endif()
if(EXISTS "${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX}/${PYTHON_PACKAGES_PATH}") if(EXISTS "${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX}/${PYTHON_PACKAGES_PATH}")
set(PYTHON_PACKAGES_PATH "lib${LIB_SUFFIX}/${PYTHON_PACKAGES_PATH}") set(_PYTHON_PACKAGES_PATH "lib${LIB_SUFFIX}/${_PYTHON_PACKAGES_PATH}")
else() else()
set(PYTHON_PACKAGES_PATH "lib/${PYTHON_PACKAGES_PATH}") set(_PYTHON_PACKAGES_PATH "lib/${_PYTHON_PACKAGES_PATH}")
endif() endif()
set(PYTHON_PACKAGES_PATH "${PYTHON_PACKAGES_PATH}" CACHE PATH "Where to install the python packages.")
elseif(CMAKE_HOST_WIN32) elseif(CMAKE_HOST_WIN32)
get_filename_component(PYTHON_PATH "${PYTHON_EXECUTABLE}" PATH) get_filename_component(PYTHON_PATH "${PYTHON_EXECUTABLE}" PATH)
file(TO_CMAKE_PATH "${PYTHON_PATH}" PYTHON_PATH) file(TO_CMAKE_PATH "${PYTHON_PATH}" PYTHON_PATH)
@ -67,8 +64,9 @@ if(PYTHON_EXECUTABLE)
endif() endif()
file(TO_CMAKE_PATH "${PYTHON_PATH}" PYTHON_PATH) file(TO_CMAKE_PATH "${PYTHON_PATH}" PYTHON_PATH)
endif() endif()
set(PYTHON_PACKAGES_PATH "${PYTHON_PATH}/Lib/site-packages") set(_PYTHON_PACKAGES_PATH "${PYTHON_PATH}/Lib/site-packages")
endif() endif()
SET(PYTHON_PACKAGES_PATH "${_PYTHON_PACKAGES_PATH}" CACHE PATH "Where to install the python packages.")
if(NOT PYTHON_NUMPY_INCLUDE_DIR) if(NOT PYTHON_NUMPY_INCLUDE_DIR)
# Attempt to discover the NumPy include directory. If this succeeds, then build python API with NumPy # Attempt to discover the NumPy include directory. If this succeeds, then build python API with NumPy
@ -78,8 +76,8 @@ if(PYTHON_EXECUTABLE)
OUTPUT_STRIP_TRAILING_WHITESPACE) OUTPUT_STRIP_TRAILING_WHITESPACE)
if(PYTHON_NUMPY_PROCESS EQUAL 0) if(PYTHON_NUMPY_PROCESS EQUAL 0)
file(TO_CMAKE_PATH "${PYTHON_NUMPY_INCLUDE_DIR}" PYTHON_NUMPY_INCLUDE_DIR) file(TO_CMAKE_PATH "${PYTHON_NUMPY_INCLUDE_DIR}" _PYTHON_NUMPY_INCLUDE_DIR)
set(PYTHON_NUMPY_INCLUDE_DIR ${PYTHON_NUMPY_INCLUDE_DIR} CACHE PATH "Path to numpy headers") set(PYTHON_NUMPY_INCLUDE_DIR ${_PYTHON_NUMPY_INCLUDE_DIR} CACHE PATH "Path to numpy headers")
endif() endif()
endif() endif()

@ -220,45 +220,40 @@ function(set_ipp_variables _LATEST_VERSION)
endfunction() endfunction()
# ------------------------------------------------------------------------ # ------------------------------------------------------------------------
# This section will look for IPP through IPPROOT env variable # This section will look for IPP through IPPROOT env variable
# Note, IPPROOT is not set by IPP installer, you may need to set it manually # Note, IPPROOT is not set by IPP installer, you may need to set it manually
# ------------------------------------------------------------------------ # ------------------------------------------------------------------------
find_path( find_path(
IPP_H_PATH IPP_H_PATH
NAMES ippversion.h NAMES ippversion.h
PATHS $ENV{IPPROOT} PATHS $ENV{IPPROOT}
PATH_SUFFIXES include PATH_SUFFIXES include
DOC "The path to Intel(R) IPP header files" DOC "The path to Intel(R) IPP header files"
NO_DEFAULT_PATH NO_DEFAULT_PATH
NO_CMAKE_PATH) NO_CMAKE_PATH)
if(IPP_H_PATH) if(IPP_H_PATH)
set(IPP_FOUND 1)
set(IPP_FOUND 1)
# traverse up to IPPROOT level
# traverse up to IPPROOT level get_filename_component(IPP_ROOT_DIR ${IPP_H_PATH} PATH)
get_filename_component(IPP_ROOT_DIR ${IPP_H_PATH} PATH)
# extract IPP version info
# extract IPP version info get_ipp_version(${IPP_ROOT_DIR})
get_ipp_version(${IPP_ROOT_DIR})
# keep info in the same vars for auto search and search by IPPROOT
# keep info in the same vars for auto search and search by IPPROOT set(IPP_LATEST_VERSION_STR ${IPP_VERSION_STR})
set(IPP_LATEST_VERSION_STR ${IPP_VERSION_STR}) set(IPP_LATEST_VERSION_MAJOR ${IPP_VERSION_MAJOR})
set(IPP_LATEST_VERSION_MAJOR ${IPP_VERSION_MAJOR}) set(IPP_LATEST_VERSION_MINOR ${IPP_VERSION_MINOR})
set(IPP_LATEST_VERSION_MINOR ${IPP_VERSION_MINOR}) set(IPP_LATEST_VERSION_BUILD ${IPP_VERSION_BUILD})
set(IPP_LATEST_VERSION_BUILD ${IPP_VERSION_BUILD})
# set IPP INCLUDE, LIB dirs and library names
# set IPP INCLUDE, LIB dirs and library names set_ipp_variables(${IPP_LATEST_VERSION_STR})
set_ipp_variables(${IPP_LATEST_VERSION_STR}) endif()
endif()
if(IPP_FOUND)
return()
endif()
if(NOT IPP_FOUND)
# reset var from previous search # reset var from previous search
set(IPP_H_PATH) set(IPP_H_PATH)
@ -304,19 +299,38 @@ endfunction()
endforeach() endforeach()
endif() endif()
endforeach() endforeach()
endif()
if(IPP_FOUND) if(IPP_FOUND)
# set IPP INCLUDE, LIB dirs and library names # set IPP INCLUDE, LIB dirs and library names
set_ipp_variables(${IPP_LATEST_VERSION_STR}) set_ipp_variables(${IPP_LATEST_VERSION_STR})
# set CACHE variable IPP_H_PATH, # set CACHE variable IPP_H_PATH,
# path to IPP header files for the latest version # path to IPP header files for the latest version
find_path( find_path(
IPP_H_PATH IPP_H_PATH
NAMES ippversion.h NAMES ippversion.h
PATHS ${IPP_ROOT_DIR} PATHS ${IPP_ROOT_DIR}
PATH_SUFFIXES include PATH_SUFFIXES include
DOC "The path to Intel(R) IPP header files" DOC "The path to Intel(R) IPP header files"
NO_DEFAULT_PATH NO_DEFAULT_PATH
NO_CMAKE_PATH) NO_CMAKE_PATH)
endif() endif()
if(WIN32 AND MINGW AND NOT IPP_LATEST_VERSION_MAJOR LESS 7)
# Since IPP built with Microsoft compiler and /GS option
# ======================================================
# From Windows SDK 7.1
# (usually in "C:\Program Files\Microsoft Visual Studio 10.0\VC\lib"),
# to avoid undefined reference to __security_cookie and _chkstk:
set(MSV_RUNTMCHK "RunTmChk")
set(IPP_LIBRARIES ${IPP_LIBRARIES} ${MSV_RUNTMCHK}${IPP_LIB_SUFFIX})
# To avoid undefined reference to _alldiv and _chkstk
# ===================================================
# NB: it may require a recompilation of w32api (after having modified
# the file ntdll.def) to export the required functions
# See http://code.opencv.org/issues/1906 for additional details
set(MSV_NTDLL "ntdll")
set(IPP_LIBRARIES ${IPP_LIBRARIES} ${MSV_NTDLL}${IPP_LIB_SUFFIX})
endif()

@ -2324,6 +2324,7 @@ The class provides the following features for all derived classes:
Here is example of SIFT use in your application via Algorithm interface: :: Here is example of SIFT use in your application via Algorithm interface: ::
#include "opencv2/opencv.hpp" #include "opencv2/opencv.hpp"
#include "opencv2/nonfree/nonfree.hpp"
... ...
@ -2334,22 +2335,22 @@ Here is example of SIFT use in your application via Algorithm interface: ::
FileStorage fs("sift_params.xml", FileStorage::READ); FileStorage fs("sift_params.xml", FileStorage::READ);
if( fs.isOpened() ) // if we have file with parameters, read them if( fs.isOpened() ) // if we have file with parameters, read them
{ {
sift.read(fs["sift_params"]); sift->read(fs["sift_params"]);
fs.release(); fs.release();
} }
else // else modify the parameters and store them; user can later edit the file to use different parameters else // else modify the parameters and store them; user can later edit the file to use different parameters
{ {
sift.set("contrastThreshold", 0.01f); // lower the contrast threshold, compared to the default value sift->set("contrastThreshold", 0.01f); // lower the contrast threshold, compared to the default value
{ {
WriteStructContext ws(fs, "sift_params", CV_NODE_MAP); WriteStructContext ws(fs, "sift_params", CV_NODE_MAP);
sift.write(fs); sift->write(fs);
} }
} }
Mat image = imread("myimage.png", 0), descriptors; Mat image = imread("myimage.png", 0), descriptors;
vector<KeyPoint> keypoints; vector<KeyPoint> keypoints;
sift(image, noArray(), keypoints, descriptors); (*sift)(image, noArray(), keypoints, descriptors);
Algorithm::get Algorithm::get

@ -2897,8 +2897,16 @@ cvKMeans2( const CvArr* _samples, int cluster_count, CvArr* _labels,
namespace cv namespace cv
{ {
Mat Mat::reshape(int, int, const int*) const Mat Mat::reshape(int _cn, int _newndims, const int* _newsz) const
{ {
if(_newndims == dims)
{
if(_newsz == 0)
return reshape(_cn);
if(_newndims == 2)
return reshape(_cn, _newsz[0]);
}
CV_Error(CV_StsNotImplemented, ""); CV_Error(CV_StsNotImplemented, "");
// TBD // TBD
return Mat(); return Mat();

@ -861,3 +861,14 @@ TEST(Core_IOArray, submat_create)
EXPECT_THROW( OutputArray_create1(A.row(0)), cv::Exception ); EXPECT_THROW( OutputArray_create1(A.row(0)), cv::Exception );
EXPECT_THROW( OutputArray_create2(A.row(0)), cv::Exception ); EXPECT_THROW( OutputArray_create2(A.row(0)), cv::Exception );
} }
TEST(Core_Mat, reshape_1942)
{
cv::Mat A = (cv::Mat_<float>(2,3) << 3.4884074, 1.4159607, 0.78737736, 2.3456569, -0.88010466, 0.3009364);
int cn = 0;
ASSERT_NO_THROW(
cv::Mat_<float> M = A.reshape(3);
cn = M.channels();
);
ASSERT_EQ(1, cn);
}

@ -381,9 +381,12 @@ int CvCaptureCAM::startCaptureDevice(int cameraNum) {
//TODO: add new interface for setting fps and capturing resolution. //TODO: add new interface for setting fps and capturing resolution.
[mCaptureDecompressedVideoOutput setVideoSettings:pixelBufferOptions]; [mCaptureDecompressedVideoOutput setVideoSettings:pixelBufferOptions];
mCaptureDecompressedVideoOutput.alwaysDiscardsLateVideoFrames = YES; mCaptureDecompressedVideoOutput.alwaysDiscardsLateVideoFrames = YES;
mCaptureDecompressedVideoOutput.minFrameDuration = CMTimeMake(1, 30);
#if TARGET_OS_IPHONE || TARGET_IPHONE_SIMULATOR
mCaptureDecompressedVideoOutput.minFrameDuration = CMTimeMake(1, 30);
#endif
//Slow. 1280*720 for iPhone4, iPod back camera. 640*480 for front camera //Slow. 1280*720 for iPhone4, iPod back camera. 640*480 for front camera
//mCaptureSession.sessionPreset = AVCaptureSessionPresetHigh; // fps ~= 5 slow for OpenCV //mCaptureSession.sessionPreset = AVCaptureSessionPresetHigh; // fps ~= 5 slow for OpenCV
@ -1150,9 +1153,11 @@ CvVideoWriter_AVFoundation::CvVideoWriter_AVFoundation(const char* filename, int
fileType = [AVFileTypeMPEG4 copy]; fileType = [AVFileTypeMPEG4 copy];
}else if ([fileExt isEqualToString:@"m4v"]){ }else if ([fileExt isEqualToString:@"m4v"]){
fileType = [AVFileTypeAppleM4V copy]; fileType = [AVFileTypeAppleM4V copy];
#if TARGET_OS_IPHONE || TARGET_IPHONE_SIMULATOR
}else if ([fileExt isEqualToString:@"3gp"] || [fileExt isEqualToString:@"3gpp"] || [fileExt isEqualToString:@"sdv"] ){ }else if ([fileExt isEqualToString:@"3gp"] || [fileExt isEqualToString:@"3gpp"] || [fileExt isEqualToString:@"sdv"] ){
fileType = [AVFileType3GPP copy]; fileType = [AVFileType3GPP copy];
}else{ #endif
} else{
fileType = [AVFileTypeMPEG4 copy]; //default mp4 fileType = [AVFileTypeMPEG4 copy]; //default mp4
} }
[fileExt release]; [fileExt release];

@ -47,6 +47,18 @@
#pragma warning( disable: 4710 ) #pragma warning( disable: 4710 )
#endif #endif
#define COMPILE_MULTIMON_STUBS // Required for multi-monitor support
#if defined SM_CMONITORS && !defined MONITOR_DEFAULTTONEAREST
# define MONITOR_DEFAULTTONULL 0x00000000
# define MONITOR_DEFAULTTOPRIMARY 0x00000001
# define MONITOR_DEFAULTTONEAREST 0x00000002
# define MONITORINFOF_PRIMARY 0x00000001
#endif
#ifndef __inout
# define __inout
#endif
#include <MultiMon.h>
#include <commctrl.h> #include <commctrl.h>
#include <winuser.h> #include <winuser.h>
#include <stdlib.h> #include <stdlib.h>
@ -420,7 +432,6 @@ double cvGetModeWindow_W32(const char* name)//YV
return result; return result;
} }
#ifdef MONITOR_DEFAULTTONEAREST
void cvSetModeWindow_W32( const char* name, double prop_value)//Yannick Verdie void cvSetModeWindow_W32( const char* name, double prop_value)//Yannick Verdie
{ {
CV_FUNCNAME( "cvSetModeWindow_W32" ); CV_FUNCNAME( "cvSetModeWindow_W32" );
@ -484,11 +495,6 @@ void cvSetModeWindow_W32( const char* name, double prop_value)//Yannick Verdie
__END__; __END__;
} }
#else
void cvSetModeWindow_W32( const char*, double)
{
}
#endif
double cvGetPropWindowAutoSize_W32(const char* name) double cvGetPropWindowAutoSize_W32(const char* name)
{ {
@ -1065,7 +1071,7 @@ CV_IMPL int cvNamedWindow( const char* name, int flags )
icvSetWindowLongPtr( hWnd, CV_USERDATA, window ); icvSetWindowLongPtr( hWnd, CV_USERDATA, window );
icvSetWindowLongPtr( mainhWnd, CV_USERDATA, window ); icvSetWindowLongPtr( mainhWnd, CV_USERDATA, window );
// Recalculate window position // Recalculate window pos
icvUpdateWindowPos( window ); icvUpdateWindowPos( window );
result = 1; result = 1;
@ -1633,7 +1639,7 @@ MainWindowProc( HWND hwnd, UINT uMsg, WPARAM wParam, LPARAM lParam )
{ {
WINDOWPOS* pos = (WINDOWPOS*)lParam; WINDOWPOS* pos = (WINDOWPOS*)lParam;
// Update the toolbar position/size // Update the toolbar pos/size
if(window->toolbar.toolbar) if(window->toolbar.toolbar)
{ {
RECT rect; RECT rect;
@ -1647,6 +1653,36 @@ MainWindowProc( HWND hwnd, UINT uMsg, WPARAM wParam, LPARAM lParam )
break; break;
} }
case WM_WINDOWPOSCHANGING:
{
// Snap window to screen edges with multi-monitor support. // Adi Shavit
LPWINDOWPOS pos = (LPWINDOWPOS)lParam;
RECT rect;
GetWindowRect(window->frame, &rect);
HMONITOR hMonitor;
hMonitor = MonitorFromRect(&rect, MONITOR_DEFAULTTONEAREST);
MONITORINFO mi;
mi.cbSize = sizeof(mi);
GetMonitorInfo(hMonitor, &mi);
const int SNAP_DISTANCE = 15;
if (abs(pos->x - mi.rcMonitor.left) <= SNAP_DISTANCE)
pos->x = mi.rcMonitor.left; // snap to left edge
else
if (abs(pos->x + pos->cx - mi.rcMonitor.right) <= SNAP_DISTANCE)
pos->x = mi.rcMonitor.right - pos->cx; // snap to right edge
if (abs(pos->y - mi.rcMonitor.top) <= SNAP_DISTANCE)
pos->y = mi.rcMonitor.top; // snap to top edge
else
if (abs(pos->y + pos->cy - mi.rcMonitor.bottom) <= SNAP_DISTANCE)
pos->y = mi.rcMonitor.bottom - pos->cy; // snap to bottom edge
}
case WM_ACTIVATE: case WM_ACTIVATE:
if(LOWORD(wParam) == WA_ACTIVE || LOWORD(wParam) == WA_CLICKACTIVE) if(LOWORD(wParam) == WA_ACTIVE || LOWORD(wParam) == WA_CLICKACTIVE)
SetFocus(window->hwnd); SetFocus(window->hwnd);
@ -2199,7 +2235,7 @@ icvCreateTrackbar( const char* trackbar_name, const char* window_name,
SendMessage(window->toolbar.toolbar, TB_SETBUTTONINFO, SendMessage(window->toolbar.toolbar, TB_SETBUTTONINFO,
(WPARAM)tbs.idCommand, (LPARAM)&tbis); (WPARAM)tbs.idCommand, (LPARAM)&tbis);
/* Get button position */ /* Get button pos */
SendMessage(window->toolbar.toolbar, TB_GETITEMRECT, SendMessage(window->toolbar.toolbar, TB_GETITEMRECT,
(WPARAM)tbs.idCommand, (LPARAM)&rect); (WPARAM)tbs.idCommand, (LPARAM)&rect);

@ -272,7 +272,7 @@ accW_( const T* src, AT* dst, const uchar* mask, int len, int cn, double alpha )
if( mask[i] ) if( mask[i] )
{ {
for( int k = 0; k < cn; k++ ) for( int k = 0; k < cn; k++ )
dst[k] += src[k]*a + dst[k]*b; dst[k] = src[k]*a + dst[k]*b;
} }
} }
} }

@ -556,7 +556,7 @@ cv::Point2d cv::phaseCorrelate(InputArray _src1, InputArray _src2, InputArray _w
t = weightedCentroid(C, peakLoc, Size(5, 5)); t = weightedCentroid(C, peakLoc, Size(5, 5));
// adjust shift relative to image center... // adjust shift relative to image center...
Point2d center((double)src1.cols / 2.0, (double)src1.rows / 2.0); Point2d center((double)padded1.cols / 2.0, (double)padded1.rows / 2.0);
return (center - t); return (center - t);
} }

@ -203,9 +203,6 @@ void cv::copyMakeBorder( InputArray _src, OutputArray _dst, int top, int bottom,
Mat src = _src.getMat(); Mat src = _src.getMat();
CV_Assert( top >= 0 && bottom >= 0 && left >= 0 && right >= 0 ); CV_Assert( top >= 0 && bottom >= 0 && left >= 0 && right >= 0 );
_dst.create( src.rows + top + bottom, src.cols + left + right, src.type() );
Mat dst = _dst.getMat();
if( src.isSubmatrix() && (borderType & BORDER_ISOLATED) == 0 ) if( src.isSubmatrix() && (borderType & BORDER_ISOLATED) == 0 )
{ {
Size wholeSize; Size wholeSize;
@ -221,6 +218,16 @@ void cv::copyMakeBorder( InputArray _src, OutputArray _dst, int top, int bottom,
bottom -= dbottom; bottom -= dbottom;
right -= dright; right -= dright;
} }
_dst.create( src.rows + top + bottom, src.cols + left + right, src.type() );
Mat dst = _dst.getMat();
if(top == 0 && left == 0 && bottom == 0 && right == 0)
{
if(src.data != dst.data)
src.copyTo(dst);
return;
}
borderType &= ~BORDER_ISOLATED; borderType &= ~BORDER_ISOLATED;

@ -63,8 +63,8 @@ void CV_PhaseCorrelatorTest::run( int )
{ {
ts->set_failed_test_info(cvtest::TS::OK); ts->set_failed_test_info(cvtest::TS::OK);
Mat r1 = Mat::ones(Size(128, 128), CV_64F); Mat r1 = Mat::ones(Size(129, 128), CV_64F);
Mat r2 = Mat::ones(Size(128, 128), CV_64F); Mat r2 = Mat::ones(Size(129, 128), CV_64F);
double expectedShiftX = -10.0; double expectedShiftX = -10.0;
double expectedShiftY = -20.0; double expectedShiftY = -20.0;

@ -48,7 +48,7 @@
#define __OPENCV_TRACKING_HPP__ #define __OPENCV_TRACKING_HPP__
#include "opencv2/core/core.hpp" #include "opencv2/core/core.hpp"
#include "opencv2/imgproc/imgproc_c.h" #include "opencv2/imgproc/imgproc.hpp"
#ifdef __cplusplus #ifdef __cplusplus
extern "C" { extern "C" {
@ -303,16 +303,19 @@ enum
OPTFLOW_FARNEBACK_GAUSSIAN = 256 OPTFLOW_FARNEBACK_GAUSSIAN = 256
}; };
//! constructs a pyramid which can be used as input for calcOpticalFlowPyrLK
CV_EXPORTS_W int buildOpticalFlowPyramid(InputArray _img, OutputArrayOfArrays pyramid,
Size winSize, int maxLevel, bool withDerivatives = true,
int pyrBorder = BORDER_REFLECT_101, int derivBorder = BORDER_CONSTANT,
bool tryReuseInputImage = true);
//! computes sparse optical flow using multi-scale Lucas-Kanade algorithm //! computes sparse optical flow using multi-scale Lucas-Kanade algorithm
CV_EXPORTS_W void calcOpticalFlowPyrLK( InputArray prevImg, InputArray nextImg, CV_EXPORTS_W void calcOpticalFlowPyrLK( InputArray prevImg, InputArray nextImg,
InputArray prevPts, CV_OUT InputOutputArray nextPts, InputArray prevPts, CV_OUT InputOutputArray nextPts,
OutputArray status, OutputArray err, OutputArray status, OutputArray err,
Size winSize=Size(21,21), int maxLevel=3, Size winSize=Size(21,21), int maxLevel=3,
TermCriteria criteria=TermCriteria( TermCriteria criteria=TermCriteria(TermCriteria::COUNT+TermCriteria::EPS, 30, 0.01),
TermCriteria::COUNT+TermCriteria::EPS, int flags=0, double minEigThreshold=1e-4);
30, 0.01),
int flags=0,
double minEigThreshold=1e-4);
//! computes dense optical flow using Farneback algorithm //! computes dense optical flow using Farneback algorithm
CV_EXPORTS_W void calcOpticalFlowFarneback( InputArray prev, InputArray next, CV_EXPORTS_W void calcOpticalFlowFarneback( InputArray prev, InputArray next,

@ -33,7 +33,7 @@ PERF_TEST_P(Path_Idx_Cn_NPoints_WSize, OpticalFlowPyrLK, testing::Combine(
testing::Range(0, 3), testing::Range(0, 3),
testing::Values(1, 3, 4), testing::Values(1, 3, 4),
testing::Values(make_tuple(9, 9), make_tuple(15, 15)), testing::Values(make_tuple(9, 9), make_tuple(15, 15)),
testing::Values(11, 21, 25) testing::Values(7, 11, 21, 25)
) )
) )
{ {
@ -49,7 +49,7 @@ PERF_TEST_P(Path_Idx_Cn_NPoints_WSize, OpticalFlowPyrLK, testing::Combine(
int nPointsY = min(get<1>(get<3>(GetParam())), img1.rows); int nPointsY = min(get<1>(get<3>(GetParam())), img1.rows);
int winSize = get<4>(GetParam()); int winSize = get<4>(GetParam());
int maxLevel = 2; int maxLevel = 2;
TermCriteria criteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS, 5, 0.01); TermCriteria criteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS, 7, 0.001);
int flags = 0; int flags = 0;
double minEigThreshold = 1e-4; double minEigThreshold = 1e-4;

@ -571,7 +571,7 @@ void BackgroundSubtractorMOG2::operator()(InputArray _image, OutputArray _fgmask
bgmodelUsedModes.data, nmixtures, (float)learningRate, bgmodelUsedModes.data, nmixtures, (float)learningRate,
(float)varThreshold, (float)varThreshold,
backgroundRatio, varThresholdGen, backgroundRatio, varThresholdGen,
fVarInit, fVarMin, fVarMax, fCT, fTau, fVarInit, fVarMin, fVarMax, -learningRate*fCT, fTau,
bShadowDetection, nShadowDetection)); bShadowDetection, nShadowDetection));
} }
} }

@ -493,6 +493,103 @@ struct LKTrackerInvoker
} }
int cv::buildOpticalFlowPyramid(InputArray _img, OutputArrayOfArrays pyramid, Size winSize, int maxLevel, bool withDerivatives,
int pyrBorder, int derivBorder, bool tryReuseInputImage)
{
Mat img = _img.getMat();
CV_Assert(img.depth() == CV_8U && winSize.width > 2 && winSize.height > 2 );
int pyrstep = withDerivatives ? 2 : 1;
pyramid.create(1, (maxLevel + 1) * pyrstep, 0 /*type*/, -1, true, 0);
int derivType = CV_MAKETYPE(DataType<deriv_type>::depth, img.channels() * 2);
//level 0
bool lvl0IsSet = false;
if(tryReuseInputImage && img.isSubmatrix() && (pyrBorder & BORDER_ISOLATED) == 0)
{
Size wholeSize;
Point ofs;
img.locateROI(wholeSize, ofs);
if (ofs.x >= winSize.width && ofs.y >= winSize.height
&& ofs.x + img.cols + winSize.width <= wholeSize.width
&& ofs.y + img.rows + winSize.height <= wholeSize.height)
{
pyramid.getMatRef(0) = img;
lvl0IsSet = true;
}
}
if(!lvl0IsSet)
{
Mat& temp = pyramid.getMatRef(0);
if(!temp.empty())
temp.adjustROI(winSize.height, winSize.height, winSize.width, winSize.width);
if(temp.type() != img.type() || temp.cols != winSize.width*2 + img.cols || temp.rows != winSize.height * 2 + img.rows)
temp.create(img.rows + winSize.height*2, img.cols + winSize.width*2, img.type());
if(pyrBorder == BORDER_TRANSPARENT)
img.copyTo(temp(Rect(winSize.width, winSize.height, img.cols, img.rows)));
else
copyMakeBorder(img, temp, winSize.height, winSize.height, winSize.width, winSize.width, pyrBorder);
temp.adjustROI(-winSize.height, -winSize.height, -winSize.width, -winSize.width);
}
Size sz = img.size();
Mat prevLevel = pyramid.getMatRef(0);
Mat thisLevel = prevLevel;
for(int level = 0; level <= maxLevel; ++level)
{
if (level != 0)
{
Mat& temp = pyramid.getMatRef(level * pyrstep);
if(!temp.empty())
temp.adjustROI(winSize.height, winSize.height, winSize.width, winSize.width);
if(temp.type() != img.type() || temp.cols != winSize.width*2 + sz.width || temp.rows != winSize.height * 2 + sz.height)
temp.create(sz.height + winSize.height*2, sz.width + winSize.width*2, img.type());
thisLevel = temp(Rect(winSize.width, winSize.height, sz.width, sz.height));
pyrDown(prevLevel, thisLevel, sz);
if(pyrBorder != BORDER_TRANSPARENT)
copyMakeBorder(thisLevel, temp, winSize.height, winSize.height, winSize.width, winSize.width, pyrBorder|BORDER_ISOLATED);
temp.adjustROI(-winSize.height, -winSize.height, -winSize.width, -winSize.width);
}
if(withDerivatives)
{
Mat& deriv = pyramid.getMatRef(level * pyrstep + 1);
if(!deriv.empty())
deriv.adjustROI(winSize.height, winSize.height, winSize.width, winSize.width);
if(deriv.type() != derivType || deriv.cols != winSize.width*2 + sz.width || deriv.rows != winSize.height * 2 + sz.height)
deriv.create(sz.height + winSize.height*2, sz.width + winSize.width*2, derivType);
Mat derivI = deriv(Rect(winSize.width, winSize.height, sz.width, sz.height));
calcSharrDeriv(thisLevel, derivI);
if(derivBorder != BORDER_TRANSPARENT)
copyMakeBorder(derivI, deriv, winSize.height, winSize.height, winSize.width, winSize.width, derivBorder|BORDER_ISOLATED);
deriv.adjustROI(-winSize.height, -winSize.height, -winSize.width, -winSize.width);
}
sz = Size((sz.width+1)/2, (sz.height+1)/2);
if( sz.width <= winSize.width || sz.height <= winSize.height )
{
pyramid.create(1, (level + 1) * pyrstep, 0 /*type*/, -1, true, 0);//check this
return level;
}
prevLevel = thisLevel;
}
return maxLevel;
}
void cv::calcOpticalFlowPyrLK( InputArray _prevImg, InputArray _nextImg, void cv::calcOpticalFlowPyrLK( InputArray _prevImg, InputArray _nextImg,
InputArray _prevPts, InputOutputArray _nextPts, InputArray _prevPts, InputOutputArray _nextPts,
OutputArray _status, OutputArray _err, OutputArray _status, OutputArray _err,
@ -504,14 +601,12 @@ void cv::calcOpticalFlowPyrLK( InputArray _prevImg, InputArray _nextImg,
if (tegra::calcOpticalFlowPyrLK(_prevImg, _nextImg, _prevPts, _nextPts, _status, _err, winSize, maxLevel, criteria, flags, minEigThreshold)) if (tegra::calcOpticalFlowPyrLK(_prevImg, _nextImg, _prevPts, _nextPts, _status, _err, winSize, maxLevel, criteria, flags, minEigThreshold))
return; return;
#endif #endif
Mat prevImg = _prevImg.getMat(), nextImg = _nextImg.getMat(), prevPtsMat = _prevPts.getMat(); Mat prevPtsMat = _prevPts.getMat();
const int derivDepth = DataType<deriv_type>::depth; const int derivDepth = DataType<deriv_type>::depth;
CV_Assert( maxLevel >= 0 && winSize.width > 2 && winSize.height > 2 ); CV_Assert( maxLevel >= 0 && winSize.width > 2 && winSize.height > 2 );
CV_Assert( prevImg.size() == nextImg.size() &&
prevImg.type() == nextImg.type() );
int level=0, i, k, npoints, cn = prevImg.channels(), cn2 = cn*2; int level=0, i, npoints;
CV_Assert( (npoints = prevPtsMat.checkVector(2, CV_32F, true)) >= 0 ); CV_Assert( (npoints = prevPtsMat.checkVector(2, CV_32F, true)) >= 0 );
if( npoints == 0 ) if( npoints == 0 )
@ -548,43 +643,73 @@ void cv::calcOpticalFlowPyrLK( InputArray _prevImg, InputArray _nextImg,
err = (float*)errMat.data; err = (float*)errMat.data;
} }
vector<Mat> prevPyr(maxLevel+1), nextPyr(maxLevel+1); vector<Mat> prevPyr, nextPyr;
int levels1 = -1;
// build the image pyramids. int lvlStep1 = 1;
// we pad each level with +/-winSize.{width|height} int levels2 = -1;
// pixels to simplify the further patch extraction. int lvlStep2 = 1;
// Thanks to the reference counting, "temp" mat (the pyramid layer + border)
// will not be deallocated, since {prevPyr|nextPyr}[level] will be a ROI in "temp". if(_prevImg.kind() == _InputArray::STD_VECTOR_MAT)
for( k = 0; k < 2; k++ )
{ {
Size sz = prevImg.size(); _prevImg.getMatVector(prevPyr);
vector<Mat>& pyr = k == 0 ? prevPyr : nextPyr;
Mat& img0 = k == 0 ? prevImg : nextImg; levels1 = int(prevPyr.size()) - 1;
CV_Assert(levels1 >= 0);
for( level = 0; level <= maxLevel; level++ )
if (levels1 % 2 == 1 && prevPyr[0].channels() * 2 == prevPyr[1].channels() && prevPyr[1].depth() == derivDepth)
{ {
Mat temp(sz.height + winSize.height*2, lvlStep1 = 2;
sz.width + winSize.width*2, levels1 /= 2;
img0.type()); }
pyr[level] = temp(Rect(winSize.width, winSize.height, sz.width, sz.height));
if( level == 0 ) // ensure that pyramid has reqired padding
img0.copyTo(pyr[level]); if(levels1 > 0)
else {
pyrDown(pyr[level-1], pyr[level], pyr[level].size()); Size fullSize;
copyMakeBorder(pyr[level], temp, winSize.height, winSize.height, Point ofs;
winSize.width, winSize.width, BORDER_REFLECT_101|BORDER_ISOLATED); prevPyr[lvlStep1].locateROI(fullSize, ofs);
sz = Size((sz.width+1)/2, (sz.height+1)/2); CV_Assert(ofs.x >= winSize.width && ofs.y >= winSize.height
if( sz.width <= winSize.width || sz.height <= winSize.height ) && ofs.x + prevPyr[lvlStep1].cols + winSize.width <= fullSize.width
{ && ofs.y + prevPyr[lvlStep1].rows + winSize.height <= fullSize.height);
maxLevel = level;
break;
}
} }
} }
// dI/dx ~ Ix, dI/dy ~ Iy
Mat derivIBuf((prevImg.rows + winSize.height*2), if(_nextImg.kind() == _InputArray::STD_VECTOR_MAT)
(prevImg.cols + winSize.width*2), {
CV_MAKETYPE(derivDepth, cn2)); _nextImg.getMatVector(nextPyr);
levels2 = int(nextPyr.size()) - 1;
CV_Assert(levels2 >= 0);
if (levels2 % 2 == 1 && nextPyr[0].channels() * 2 == nextPyr[1].channels() && nextPyr[1].depth() == derivDepth)
{
lvlStep2 = 2;
levels2 /= 2;
}
// ensure that pyramid has reqired padding
if(levels2 > 0)
{
Size fullSize;
Point ofs;
nextPyr[lvlStep2].locateROI(fullSize, ofs);
CV_Assert(ofs.x >= winSize.width && ofs.y >= winSize.height
&& ofs.x + nextPyr[lvlStep2].cols + winSize.width <= fullSize.width
&& ofs.y + nextPyr[lvlStep2].rows + winSize.height <= fullSize.height);
}
}
if(levels1 >= 0 || levels2 >= 0)
maxLevel = std::max(levels1, levels2);
if (levels1 < 0)
maxLevel = levels1 = buildOpticalFlowPyramid(_prevImg, prevPyr, winSize, maxLevel, false);
if (levels2 < 0)
levels2 = buildOpticalFlowPyramid(_nextImg, nextPyr, winSize, maxLevel, false);
CV_Assert(levels1 == levels2);
if( (criteria.type & TermCriteria::COUNT) == 0 ) if( (criteria.type & TermCriteria::COUNT) == 0 )
criteria.maxCount = 30; criteria.maxCount = 30;
@ -596,17 +721,31 @@ void cv::calcOpticalFlowPyrLK( InputArray _prevImg, InputArray _nextImg,
criteria.epsilon = std::min(std::max(criteria.epsilon, 0.), 10.); criteria.epsilon = std::min(std::max(criteria.epsilon, 0.), 10.);
criteria.epsilon *= criteria.epsilon; criteria.epsilon *= criteria.epsilon;
// dI/dx ~ Ix, dI/dy ~ Iy
Mat derivIBuf;
if(lvlStep1 == 1)
derivIBuf.create(prevPyr[0].rows + winSize.height*2, prevPyr[0].cols + winSize.width*2, CV_MAKETYPE(derivDepth, prevPyr[0].channels() * 2));
for( level = maxLevel; level >= 0; level-- ) for( level = maxLevel; level >= 0; level-- )
{ {
Size imgSize = prevPyr[level].size(); Mat derivI;
Mat _derivI( imgSize.height + winSize.height*2, if(lvlStep1 == 1)
imgSize.width + winSize.width*2, derivIBuf.type(), derivIBuf.data ); {
Mat derivI = _derivI(Rect(winSize.width, winSize.height, imgSize.width, imgSize.height)); Size imgSize = prevPyr[level * lvlStep1].size();
calcSharrDeriv(prevPyr[level], derivI); Mat _derivI( imgSize.height + winSize.height*2,
copyMakeBorder(derivI, _derivI, winSize.height, winSize.height, winSize.width, winSize.width, BORDER_CONSTANT|BORDER_ISOLATED); imgSize.width + winSize.width*2, derivIBuf.type(), derivIBuf.data );
derivI = _derivI(Rect(winSize.width, winSize.height, imgSize.width, imgSize.height));
calcSharrDeriv(prevPyr[level * lvlStep1], derivI);
copyMakeBorder(derivI, _derivI, winSize.height, winSize.height, winSize.width, winSize.width, BORDER_CONSTANT|BORDER_ISOLATED);
}
else
derivI = prevPyr[level * lvlStep1 + 1];
parallel_for(BlockedRange(0, npoints), LKTrackerInvoker(prevPyr[level], derivI, CV_Assert(prevPyr[level * lvlStep1].size() == nextPyr[level * lvlStep2].size());
nextPyr[level], prevPts, nextPts, CV_Assert(prevPyr[level * lvlStep1].type() == nextPyr[level * lvlStep2].type());
parallel_for(BlockedRange(0, npoints), LKTrackerInvoker(prevPyr[level * lvlStep1], derivI,
nextPyr[level * lvlStep2], prevPts, nextPts,
status, err, status, err,
winSize, criteria, level, maxLevel, winSize, criteria, level, maxLevel,
flags, (float)minEigThreshold)); flags, (float)minEigThreshold));

@ -53,7 +53,7 @@
#include "opencv2/video/tracking.hpp" #include "opencv2/video/tracking.hpp"
#include "opencv2/video/background_segm.hpp" #include "opencv2/video/background_segm.hpp"
#include "opencv2/imgproc/imgproc.hpp" #include "opencv2/imgproc/imgproc_c.h"
#include "opencv2/core/internal.hpp" #include "opencv2/core/internal.hpp"
#ifdef HAVE_TEGRA_OPTIMIZATION #ifdef HAVE_TEGRA_OPTIMIZATION

@ -26,13 +26,36 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
} }
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { public boolean openCamera() {
Log.i(TAG, "surfaceCreated"); Log.i(TAG, "openCamera");
synchronized (this) {
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
return false;
}
}
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
synchronized (this) {
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera("+width+", "+height+")");
synchronized (this) { synchronized (this) {
if (mCamera != null && mCamera.isOpened()) { if (mCamera != null && mCamera.isOpened()) {
Log.i(TAG, "before mCamera.getSupportedPreviewSizes()");
List<Size> sizes = mCamera.getSupportedPreviewSizes(); List<Size> sizes = mCamera.getSupportedPreviewSizes();
Log.i(TAG, "after mCamera.getSupportedPreviewSizes()");
int mFrameWidth = width; int mFrameWidth = width;
int mFrameHeight = height; int mFrameHeight = height;
@ -52,28 +75,22 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight); mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
} }
} }
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);
} }
public void surfaceCreated(SurfaceHolder holder) { public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated"); Log.i(TAG, "surfaceCreated");
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID); (new Thread(this)).start();
if (mCamera.isOpened()) {
(new Thread(this)).start();
} else {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
}
} }
public void surfaceDestroyed(SurfaceHolder holder) { public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed"); Log.i(TAG, "surfaceDestroyed");
if (mCamera != null) { releaseCamera();
synchronized (this) {
mCamera.release();
mCamera = null;
}
}
} }
protected abstract Bitmap processFrame(VideoCapture capture); protected abstract Bitmap processFrame(VideoCapture capture);

@ -1,6 +1,8 @@
package org.opencv.samples.puzzle15; package org.opencv.samples.puzzle15;
import android.app.Activity; import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle; import android.os.Bundle;
import android.util.Log; import android.util.Log;
import android.view.Menu; import android.view.Menu;
@ -18,6 +20,31 @@ public class puzzle15Activity extends Activity {
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
} }
@Override
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
mView.releaseCamera();
}
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */ /** Called when the activity is first created. */
@Override @Override
public void onCreate(Bundle savedInstanceState) { public void onCreate(Bundle savedInstanceState) {

@ -18,7 +18,7 @@ import android.view.View;
import android.view.View.OnTouchListener; import android.view.View.OnTouchListener;
public class puzzle15View extends SampleCvViewBase implements OnTouchListener { public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
private Mat mRgba; private Mat mRgba;
private Mat mRgba15; private Mat mRgba15;
private Mat[] mCells; private Mat[] mCells;
private Mat[] mCells15; private Mat[] mCells15;
@ -45,13 +45,13 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
} }
@Override @Override
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { public void surfaceCreated(SurfaceHolder holder) {
super.surfaceChanged(_holder, format, width, height);
synchronized (this) { synchronized (this) {
// initialize Mat before usage // initialize Mat before usage
mRgba = new Mat(); mRgba = new Mat();
} }
} super.surfaceCreated(holder);
}
public static void shuffle(int[] array) { public static void shuffle(int[] array) {
for (int i = array.length; i > 1; i--) { for (int i = array.length; i > 1; i--) {
@ -185,7 +185,9 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
} }
public boolean onTouch(View v, MotionEvent event) { public boolean onTouch(View v, MotionEvent event) {
int cols = mRgba.cols(); if(mRgba==null) return false;
int cols = mRgba.cols();
int rows = mRgba.rows(); int rows = mRgba.rows();
float xoffset = (getWidth() - cols) / 2; float xoffset = (getWidth() - cols) / 2;
float yoffset = (getHeight() - rows) / 2; float yoffset = (getHeight() - rows) / 2;

@ -1,6 +1,8 @@
package org.opencv.samples.colorblobdetect; package org.opencv.samples.colorblobdetect;
import android.app.Activity; import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle; import android.os.Bundle;
import android.util.Log; import android.util.Log;
import android.view.Window; import android.view.Window;
@ -15,6 +17,31 @@ public class ColorBlobDetectionActivity extends Activity {
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
} }
@Override
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
mView.releaseCamera();
}
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */ /** Called when the activity is first created. */
@Override @Override
public void onCreate(Bundle savedInstanceState) { public void onCreate(Bundle savedInstanceState) {

@ -56,12 +56,13 @@ public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchL
} }
@Override @Override
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { public void surfaceCreated(SurfaceHolder holder) {
super.surfaceChanged(_holder, format, width, height);
synchronized (this) { synchronized (this) {
// initialize Mat before usage // initialize Mat before usage
mRgba = new Mat(); mRgba = new Mat();
} }
super.surfaceCreated(holder);
} }
public boolean onTouch(View v, MotionEvent event) public boolean onTouch(View v, MotionEvent event)

@ -26,13 +26,36 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
} }
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { public boolean openCamera() {
Log.i(TAG, "surfaceCreated"); Log.i(TAG, "openCamera");
synchronized (this) {
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
return false;
}
}
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
synchronized (this) {
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera("+width+", "+height+")");
synchronized (this) { synchronized (this) {
if (mCamera != null && mCamera.isOpened()) { if (mCamera != null && mCamera.isOpened()) {
Log.i(TAG, "before mCamera.getSupportedPreviewSizes()");
List<Size> sizes = mCamera.getSupportedPreviewSizes(); List<Size> sizes = mCamera.getSupportedPreviewSizes();
Log.i(TAG, "after mCamera.getSupportedPreviewSizes()");
int mFrameWidth = width; int mFrameWidth = width;
int mFrameHeight = height; int mFrameHeight = height;
@ -52,28 +75,22 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight); mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
} }
} }
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);
} }
public void surfaceCreated(SurfaceHolder holder) { public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated"); Log.i(TAG, "surfaceCreated");
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID); (new Thread(this)).start();
if (mCamera.isOpened()) {
(new Thread(this)).start();
} else {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
}
} }
public void surfaceDestroyed(SurfaceHolder holder) { public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed"); Log.i(TAG, "surfaceDestroyed");
if (mCamera != null) { releaseCamera();
synchronized (this) {
mCamera.release();
mCamera = null;
}
}
} }
protected abstract Bitmap processFrame(VideoCapture capture); protected abstract Bitmap processFrame(VideoCapture capture);

@ -1,6 +1,8 @@
package org.opencv.samples.fd; package org.opencv.samples.fd;
import android.app.Activity; import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle; import android.os.Bundle;
import android.util.Log; import android.util.Log;
import android.view.Menu; import android.view.Menu;
@ -14,6 +16,8 @@ public class FdActivity extends Activity {
private MenuItem mItemFace40; private MenuItem mItemFace40;
private MenuItem mItemFace30; private MenuItem mItemFace30;
private MenuItem mItemFace20; private MenuItem mItemFace20;
private FdView mView;
public static float minFaceSize = 0.5f; public static float minFaceSize = 0.5f;
@ -21,13 +25,39 @@ public class FdActivity extends Activity {
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
} }
@Override
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
mView.releaseCamera();
}
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */ /** Called when the activity is first created. */
@Override @Override
public void onCreate(Bundle savedInstanceState) { public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate"); Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE); requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(new FdView(this)); mView = new FdView(this);
setContentView(mView);
} }
@Override @Override

@ -62,17 +62,17 @@ class FdView extends SampleCvViewBase {
} }
@Override @Override
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { public void surfaceCreated(SurfaceHolder holder) {
super.surfaceChanged(_holder, format, width, height);
synchronized (this) { synchronized (this) {
// initialize Mats before usage // initialize Mats before usage
mGray = new Mat(); mGray = new Mat();
mRgba = new Mat(); mRgba = new Mat();
} }
}
@Override super.surfaceCreated(holder);
}
@Override
protected Bitmap processFrame(VideoCapture capture) { protected Bitmap processFrame(VideoCapture capture) {
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME); capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);

@ -28,13 +28,36 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
} }
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { public boolean openCamera() {
Log.i(TAG, "surfaceCreated"); Log.i(TAG, "openCamera");
synchronized (this) {
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
return false;
}
}
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
synchronized (this) {
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera("+width+", "+height+")");
synchronized (this) { synchronized (this) {
if (mCamera != null && mCamera.isOpened()) { if (mCamera != null && mCamera.isOpened()) {
Log.i(TAG, "before mCamera.getSupportedPreviewSizes()");
List<Size> sizes = mCamera.getSupportedPreviewSizes(); List<Size> sizes = mCamera.getSupportedPreviewSizes();
Log.i(TAG, "after mCamera.getSupportedPreviewSizes()");
int mFrameWidth = width; int mFrameWidth = width;
int mFrameHeight = height; int mFrameHeight = height;
@ -54,28 +77,22 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight); mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
} }
} }
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);
} }
public void surfaceCreated(SurfaceHolder holder) { public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated"); Log.i(TAG, "surfaceCreated");
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID); (new Thread(this)).start();
if (mCamera.isOpened()) {
(new Thread(this)).start();
} else {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
}
} }
public void surfaceDestroyed(SurfaceHolder holder) { public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed"); Log.i(TAG, "surfaceDestroyed");
if (mCamera != null) { releaseCamera();
synchronized (this) {
mCamera.release();
mCamera = null;
}
}
} }
protected abstract Bitmap processFrame(VideoCapture capture); protected abstract Bitmap processFrame(VideoCapture capture);

@ -1,6 +1,8 @@
package org.opencv.samples.imagemanipulations; package org.opencv.samples.imagemanipulations;
import android.app.Activity; import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle; import android.os.Bundle;
import android.util.Log; import android.util.Log;
import android.view.Menu; import android.view.Menu;
@ -8,7 +10,8 @@ import android.view.MenuItem;
import android.view.Window; import android.view.Window;
public class ImageManipulationsActivity extends Activity { public class ImageManipulationsActivity extends Activity {
private static final String TAG = "Sample::Activity";
private static final String TAG = "Sample-ImageManipulations::Activity";
public static final int VIEW_MODE_RGBA = 0; public static final int VIEW_MODE_RGBA = 0;
public static final int VIEW_MODE_HIST = 1; public static final int VIEW_MODE_HIST = 1;
@ -29,18 +32,46 @@ public class ImageManipulationsActivity extends Activity {
private MenuItem mItemPreviewPosterize; private MenuItem mItemPreviewPosterize;
public static int viewMode = VIEW_MODE_RGBA; public static int viewMode = VIEW_MODE_RGBA;
private ImageManipulationsView mView;
public ImageManipulationsActivity() { public ImageManipulationsActivity() {
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
} }
@Override
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
mView.releaseCamera();
}
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */ /** Called when the activity is first created. */
@Override @Override
public void onCreate(Bundle savedInstanceState) { public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate"); Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE); requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(new ImageManipulationsView(this)); mView = new ImageManipulationsView(this);
setContentView(mView);
} }
@Override @Override

@ -55,9 +55,7 @@ class ImageManipulationsView extends SampleCvViewBase {
} }
@Override @Override
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { public void surfaceCreated(SurfaceHolder holder) {
super.surfaceChanged(_holder, format, width, height);
synchronized (this) { synchronized (this) {
// initialize Mats before usage // initialize Mats before usage
mGray = new Mat(); mGray = new Mat();
@ -83,9 +81,11 @@ class ImageManipulationsView extends SampleCvViewBase {
mP1 = new Point(); mP1 = new Point();
mP2 = new Point(); mP2 = new Point();
} }
}
private void CreateAuxiliaryMats() { super.surfaceCreated(holder);
}
private void CreateAuxiliaryMats() {
if (mRgba.empty()) if (mRgba.empty())
return; return;

@ -14,7 +14,7 @@ import android.view.SurfaceHolder;
import android.view.SurfaceView; import android.view.SurfaceView;
public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable { public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample::SurfaceView"; private static final String TAG = "Sample-ImageManipulations::SurfaceView";
private SurfaceHolder mHolder; private SurfaceHolder mHolder;
private VideoCapture mCamera; private VideoCapture mCamera;
@ -28,13 +28,36 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
} }
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { public boolean openCamera() {
Log.i(TAG, "surfaceCreated"); Log.i(TAG, "openCamera");
synchronized (this) {
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
return false;
}
}
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
synchronized (this) {
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera("+width+", "+height+")");
synchronized (this) { synchronized (this) {
if (mCamera != null && mCamera.isOpened()) { if (mCamera != null && mCamera.isOpened()) {
Log.i(TAG, "before mCamera.getSupportedPreviewSizes()");
List<Size> sizes = mCamera.getSupportedPreviewSizes(); List<Size> sizes = mCamera.getSupportedPreviewSizes();
Log.i(TAG, "after mCamera.getSupportedPreviewSizes()");
int mFrameWidth = width; int mFrameWidth = width;
int mFrameHeight = height; int mFrameHeight = height;
@ -54,28 +77,22 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight); mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
} }
} }
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);
} }
public void surfaceCreated(SurfaceHolder holder) { public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated"); Log.i(TAG, "surfaceCreated");
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID); (new Thread(this)).start();
if (mCamera.isOpened()) {
(new Thread(this)).start();
} else {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
}
} }
public void surfaceDestroyed(SurfaceHolder holder) { public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed"); Log.i(TAG, "surfaceDestroyed");
if (mCamera != null) { releaseCamera();
synchronized (this) {
mCamera.release();
mCamera = null;
}
}
} }
protected abstract Bitmap processFrame(VideoCapture capture); protected abstract Bitmap processFrame(VideoCapture capture);
@ -88,8 +105,10 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
Bitmap bmp = null; Bitmap bmp = null;
synchronized (this) { synchronized (this) {
if (mCamera == null) if (mCamera == null) {
Log.i(TAG, "mCamera == null");
break; break;
}
if (!mCamera.grab()) { if (!mCamera.grab()) {
Log.e(TAG, "mCamera.grab() failed"); Log.e(TAG, "mCamera.grab() failed");

@ -1,4 +1,4 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<resources> <resources>
<string name="app_name">Tutorial 1 Basic - 0. Android Camera</string> <string name="app_name">Tutorial 0 (Basic) - Android Camera</string>
</resources> </resources>

@ -1,6 +1,8 @@
package org.opencv.samples.tutorial0; package org.opencv.samples.tutorial0;
import android.app.Activity; import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle; import android.os.Bundle;
import android.util.Log; import android.util.Log;
import android.view.Menu; import android.view.Menu;
@ -19,7 +21,32 @@ public class Sample0Base extends Activity {
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
} }
/** Called when the activity is first created. */ @Override
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
mView.releaseCamera();
}
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */
@Override @Override
public void onCreate(Bundle savedInstanceState) { public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate"); Log.i(TAG, "onCreate");

@ -62,7 +62,8 @@ class Sample0View extends SampleViewBase {
} }
@Override @Override
protected void onPreviewStared(int previewWidth, int previewHeight) { protected void onPreviewStarted(int previewWidth, int previewHeight) {
Log.i(TAG, "onPreviewStarted("+previewWidth+", "+previewHeight+")");
/* Create a bitmap that will be used through to calculate the image to */ /* Create a bitmap that will be used through to calculate the image to */
mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888); mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888);
mRGBA = new int[previewWidth * previewHeight]; mRGBA = new int[previewWidth * previewHeight];
@ -70,12 +71,19 @@ class Sample0View extends SampleViewBase {
@Override @Override
protected void onPreviewStopped() { protected void onPreviewStopped() {
mBitmap.recycle(); Log.i(TAG, "onPreviewStopped");
mBitmap = null; if(mBitmap != null) {
mRGBA = null; mBitmap.recycle();
mBitmap = null;
}
if(mRGBA != null) {
mRGBA = null;
}
} }
public void setViewMode(int viewMode) { public void setViewMode(int viewMode) {
Log.i(TAG, "setViewMode("+viewMode+")");
mViewMode = viewMode; mViewMode = viewMode;
} }
} }

@ -50,62 +50,14 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
} }
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { public boolean openCamera() {
Log.i(TAG, "surfaceCreated"); Log.i(TAG, "openCamera");
if (mCamera != null) { releaseCamera();
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be coppied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStared(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */
mCamera.startPreview();
}
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
mCamera = Camera.open(); mCamera = Camera.open();
if(mCamera == null) {
Log.e(TAG, "Can't open camera!");
return false;
}
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() { mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) { public void onPreviewFrame(byte[] data, Camera camera) {
@ -116,15 +68,14 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
camera.addCallbackBuffer(mBuffer); camera.addCallbackBuffer(mBuffer);
} }
}); });
return true;
(new Thread(this)).start();
} }
public void surfaceDestroyed(SurfaceHolder holder) { public void releaseCamera() {
Log.i(TAG, "surfaceDestroyed"); Log.i(TAG, "releaseCamera");
mThreadRun = false; mThreadRun = false;
if (mCamera != null) { synchronized (this) {
synchronized (this) { if (mCamera != null) {
mCamera.stopPreview(); mCamera.stopPreview();
mCamera.setPreviewCallback(null); mCamera.setPreviewCallback(null);
mCamera.release(); mCamera.release();
@ -133,22 +84,92 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
} }
onPreviewStopped(); onPreviewStopped();
} }
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera");
synchronized (this) {
if (mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be copied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */
mCamera.startPreview();
}
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
releaseCamera();
}
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */ /* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
protected abstract Bitmap processFrame(byte[] data); protected abstract Bitmap processFrame(byte[] data);
/** /**
* This method is called when the preview process is beeing started. It is called before the first frame delivered and processFrame is called * This method is called when the preview process is being started. It is called before the first frame delivered and processFrame is called
* It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing. * It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing.
* @param previewWidth - the width of the preview frames that will be delivered via processFrame * @param previewWidth - the width of the preview frames that will be delivered via processFrame
* @param previewHeight - the height of the preview frames that will be delivered via processFrame * @param previewHeight - the height of the preview frames that will be delivered via processFrame
*/ */
protected abstract void onPreviewStared(int previewWidtd, int previewHeight); protected abstract void onPreviewStarted(int previewWidtd, int previewHeight);
/** /**
* This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed. * This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed.
* If the Bitmap object returned via processFrame is cached - it is a good time to recycle it. * If the Bitmap object returned via processFrame is cached - it is a good time to recycle it.
* Any other resourcses used during the preview can be released. * Any other resources used during the preview can be released.
*/ */
protected abstract void onPreviewStopped(); protected abstract void onPreviewStopped();
@ -175,5 +196,6 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
} }
} }
} }
Log.i(TAG, "Finishing processing thread");
} }
} }

@ -1,4 +1,4 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<resources> <resources>
<string name="app_name">Tutorial 1 Basic - 1. Add OpenCV</string> <string name="app_name">Tutorial 1 (Basic) - Add OpenCV</string>
</resources> </resources>

@ -1,6 +1,8 @@
package org.opencv.samples.tutorial1; package org.opencv.samples.tutorial1;
import android.app.Activity; import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle; import android.os.Bundle;
import android.util.Log; import android.util.Log;
import android.view.Menu; import android.view.Menu;
@ -19,6 +21,31 @@ public class Sample1Java extends Activity {
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
} }
@Override
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
mView.releaseCamera();
}
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */ /** Called when the activity is first created. */
@Override @Override
public void onCreate(Bundle savedInstanceState) { public void onCreate(Bundle savedInstanceState) {

@ -2,16 +2,15 @@ package org.opencv.samples.tutorial1;
import org.opencv.android.Utils; import org.opencv.android.Utils;
import org.opencv.core.Core; import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat; import org.opencv.core.Mat;
import org.opencv.core.Point; import org.opencv.core.Point;
import org.opencv.core.Scalar; import org.opencv.core.Scalar;
import org.opencv.core.CvType;
import org.opencv.imgproc.Imgproc; import org.opencv.imgproc.Imgproc;
import android.content.Context; import android.content.Context;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.util.Log; import android.util.Log;
import android.view.SurfaceHolder;
class Sample1View extends SampleViewBase { class Sample1View extends SampleViewBase {
@ -32,7 +31,7 @@ class Sample1View extends SampleViewBase {
} }
@Override @Override
protected void onPreviewStared(int previewWidth, int previewHeight) { protected void onPreviewStarted(int previewWidth, int previewHeight) {
synchronized (this) { synchronized (this) {
// initialize Mats before usage // initialize Mats before usage
mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1); mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1);
@ -94,7 +93,7 @@ class Sample1View extends SampleViewBase {
try { try {
Utils.matToBitmap(mRgba, bmp); Utils.matToBitmap(mRgba, bmp);
} catch(Exception e) { } catch(Exception e) {
Log.e("org.opencv.samples.puzzle15", "Utils.matToBitmap() throws an exception: " + e.getMessage()); Log.e("org.opencv.samples.tutorial1", "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle(); bmp.recycle();
bmp = null; bmp = null;
} }

@ -49,63 +49,14 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
mCamera.setPreviewDisplay(null); mCamera.setPreviewDisplay(null);
} }
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { public boolean openCamera() {
Log.i(TAG, "surfaceCreated"); Log.i(TAG, "openCamera");
if (mCamera != null) { releaseCamera();
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be coppied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStared(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */
mCamera.startPreview();
}
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
mCamera = Camera.open(); mCamera = Camera.open();
if(mCamera == null) {
Log.e(TAG, "Can't open camera!");
return false;
}
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() { mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) { public void onPreviewFrame(byte[] data, Camera camera) {
@ -116,15 +67,14 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
camera.addCallbackBuffer(mBuffer); camera.addCallbackBuffer(mBuffer);
} }
}); });
return true;
(new Thread(this)).start();
} }
public void surfaceDestroyed(SurfaceHolder holder) { public void releaseCamera() {
Log.i(TAG, "surfaceDestroyed"); Log.i(TAG, "releaseCamera");
mThreadRun = false; mThreadRun = false;
if (mCamera != null) { synchronized (this) {
synchronized (this) { if (mCamera != null) {
mCamera.stopPreview(); mCamera.stopPreview();
mCamera.setPreviewCallback(null); mCamera.setPreviewCallback(null);
mCamera.release(); mCamera.release();
@ -133,22 +83,92 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
} }
onPreviewStopped(); onPreviewStopped();
} }
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera");
synchronized (this) {
if (mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be copied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */
mCamera.startPreview();
}
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
releaseCamera();
}
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */ /* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
protected abstract Bitmap processFrame(byte[] data); protected abstract Bitmap processFrame(byte[] data);
/** /**
* This method is called when the preview process is beeing started. It is called before the first frame delivered and processFrame is called * This method is called when the preview process is being started. It is called before the first frame delivered and processFrame is called
* It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing. * It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing.
* @param previewWidth - the width of the preview frames that will be delivered via processFrame * @param previewWidth - the width of the preview frames that will be delivered via processFrame
* @param previewHeight - the height of the preview frames that will be delivered via processFrame * @param previewHeight - the height of the preview frames that will be delivered via processFrame
*/ */
protected abstract void onPreviewStared(int previewWidtd, int previewHeight); protected abstract void onPreviewStarted(int previewWidtd, int previewHeight);
/** /**
* This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed. * This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed.
* If the Bitmap object returned via processFrame is cached - it is a good time to recycle it. * If the Bitmap object returned via processFrame is cached - it is a good time to recycle it.
* Any other resourcses used during the preview can be released. * Any other resources used during the preview can be released.
*/ */
protected abstract void onPreviewStopped(); protected abstract void onPreviewStopped();

@ -1,4 +1,4 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<resources> <resources>
<string name="app_name">Tutorial 1 Basic - 2. Use OpenCV Camera</string> <string name="app_name">Tutorial 2 (Basic) - Use OpenCV Camera</string>
</resources> </resources>

@ -1,6 +1,8 @@
package org.opencv.samples.tutorial2; package org.opencv.samples.tutorial2;
import android.app.Activity; import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle; import android.os.Bundle;
import android.util.Log; import android.util.Log;
import android.view.Menu; import android.view.Menu;
@ -19,18 +21,46 @@ public class Sample2NativeCamera extends Activity {
private MenuItem mItemPreviewCanny; private MenuItem mItemPreviewCanny;
public static int viewMode = VIEW_MODE_RGBA; public static int viewMode = VIEW_MODE_RGBA;
private Sample2View mView;
public Sample2NativeCamera() { public Sample2NativeCamera() {
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
} }
@Override
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
mView.releaseCamera();
}
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */ /** Called when the activity is first created. */
@Override @Override
public void onCreate(Bundle savedInstanceState) { public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate"); Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE); requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(new Sample2View(this)); mView = new Sample2View(this);
setContentView(mView);
} }
@Override @Override

@ -1,12 +1,8 @@
package org.opencv.samples.tutorial2; package org.opencv.samples.tutorial2;
import java.util.ArrayList;
import java.util.List;
import org.opencv.android.Utils; import org.opencv.android.Utils;
import org.opencv.core.Core; import org.opencv.core.Core;
import org.opencv.core.Mat; import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Point; import org.opencv.core.Point;
import org.opencv.core.Scalar; import org.opencv.core.Scalar;
import org.opencv.highgui.Highgui; import org.opencv.highgui.Highgui;
@ -22,36 +18,25 @@ class Sample2View extends SampleCvViewBase {
private Mat mRgba; private Mat mRgba;
private Mat mGray; private Mat mGray;
private Mat mIntermediateMat; private Mat mIntermediateMat;
private Mat mIntermediateMat2;
private Mat mEmpty;
private Scalar lo, hi;
private Scalar bl, wh;
public Sample2View(Context context) { public Sample2View(Context context) {
super(context); super(context);
} }
@Override @Override
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { public void surfaceCreated(SurfaceHolder holder) {
super.surfaceChanged(_holder, format, width, height);
synchronized (this) { synchronized (this) {
// initialize Mats before usage // initialize Mats before usage
mGray = new Mat(); mGray = new Mat();
mRgba = new Mat(); mRgba = new Mat();
mIntermediateMat = new Mat(); mIntermediateMat = new Mat();
mIntermediateMat2 = new Mat();
mEmpty = new Mat();
lo = new Scalar(85, 100, 30);
hi = new Scalar(130, 255, 255);
bl = new Scalar(0, 0, 0, 255);
wh = new Scalar(255, 255, 255, 255);
} }
super.surfaceCreated(holder);
} }
@Override @Override
protected Bitmap processFrame(VideoCapture capture) { protected Bitmap processFrame(VideoCapture capture) {
/**/
switch (Sample2NativeCamera.viewMode) { switch (Sample2NativeCamera.viewMode) {
case Sample2NativeCamera.VIEW_MODE_GRAY: case Sample2NativeCamera.VIEW_MODE_GRAY:
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME); capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
@ -62,36 +47,11 @@ class Sample2View extends SampleCvViewBase {
Core.putText(mRgba, "OpenCV + Android", new Point(10, 100), 3, 2, new Scalar(255, 0, 0, 255), 3); Core.putText(mRgba, "OpenCV + Android", new Point(10, 100), 3, 2, new Scalar(255, 0, 0, 255), 3);
break; break;
case Sample2NativeCamera.VIEW_MODE_CANNY: case Sample2NativeCamera.VIEW_MODE_CANNY:
/*capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME); capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
Imgproc.Canny(mGray, mIntermediateMat, 80, 100); Imgproc.Canny(mGray, mIntermediateMat, 80, 100);
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4); Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
*/
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
Imgproc.cvtColor(mRgba, mIntermediateMat, Imgproc.COLOR_RGB2HSV_FULL);
Core.inRange(mIntermediateMat, lo, hi, mIntermediateMat2); // green
Imgproc.dilate(mIntermediateMat2, mIntermediateMat2, mEmpty);
//
List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
Mat hierarchy = new Mat();
Imgproc.findContours(mIntermediateMat2, contours, hierarchy,Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);
Log.d("processFrame", "contours.size()" + contours.size());
double maxArea = 0;
int indexMaxArea = -1;
for (int i = 0; i < contours.size(); i++) {
double s = Imgproc.contourArea(contours.get(i));
if(s > maxArea){
indexMaxArea = i;
maxArea = s;
}
}
mRgba.setTo(bl);
Imgproc.drawContours(mRgba, contours, indexMaxArea, wh);
//
//Imgproc.cvtColor(mIntermediateMat2, mRgba, Imgproc.COLOR_GRAY2RGBA);
break; break;
} }
/**/
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888); Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);
@ -99,7 +59,7 @@ class Sample2View extends SampleCvViewBase {
Utils.matToBitmap(mRgba, bmp); Utils.matToBitmap(mRgba, bmp);
return bmp; return bmp;
} catch(Exception e) { } catch(Exception e) {
Log.e("org.opencv.samples.puzzle15", "Utils.matToBitmap() throws an exception: " + e.getMessage()); Log.e("org.opencv.samples.tutorial2", "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle(); bmp.recycle();
return null; return null;
} }
@ -118,9 +78,6 @@ class Sample2View extends SampleCvViewBase {
if (mIntermediateMat != null) if (mIntermediateMat != null)
mIntermediateMat.release(); mIntermediateMat.release();
if (mIntermediateMat2 != null)
mIntermediateMat2.release();
mRgba = null; mRgba = null;
mGray = null; mGray = null;
mIntermediateMat = null; mIntermediateMat = null;

@ -26,13 +26,36 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
} }
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { public boolean openCamera() {
Log.i(TAG, "surfaceCreated"); Log.i(TAG, "openCamera");
synchronized (this) {
releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
return false;
}
}
return true;
}
public void releaseCamera() {
Log.i(TAG, "releaseCamera");
synchronized (this) {
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
}
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera("+width+", "+height+")");
synchronized (this) { synchronized (this) {
if (mCamera != null && mCamera.isOpened()) { if (mCamera != null && mCamera.isOpened()) {
Log.i(TAG, "before mCamera.getSupportedPreviewSizes()");
List<Size> sizes = mCamera.getSupportedPreviewSizes(); List<Size> sizes = mCamera.getSupportedPreviewSizes();
Log.i(TAG, "after mCamera.getSupportedPreviewSizes()");
int mFrameWidth = width; int mFrameWidth = width;
int mFrameHeight = height; int mFrameHeight = height;
@ -52,28 +75,22 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight); mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
} }
} }
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);
} }
public void surfaceCreated(SurfaceHolder holder) { public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated"); Log.i(TAG, "surfaceCreated");
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID); (new Thread(this)).start();
if (mCamera.isOpened()) {
(new Thread(this)).start();
} else {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera");
}
} }
public void surfaceDestroyed(SurfaceHolder holder) { public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed"); Log.i(TAG, "surfaceDestroyed");
if (mCamera != null) { releaseCamera();
synchronized (this) {
mCamera.release();
mCamera = null;
}
}
} }
protected abstract Bitmap processFrame(VideoCapture capture); protected abstract Bitmap processFrame(VideoCapture capture);

@ -1,4 +1,4 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<resources> <resources>
<string name="app_name">Tutorial 2 Advanced - 1. Add Native OpenCV</string> <string name="app_name">Tutorial 3 (Advanced) - Add Native OpenCV</string>
</resources> </resources>

@ -1,23 +1,52 @@
package org.opencv.samples.tutorial3; package org.opencv.samples.tutorial3;
import android.app.Activity; import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle; import android.os.Bundle;
import android.util.Log; import android.util.Log;
import android.view.Window; import android.view.Window;
public class Sample3Native extends Activity { public class Sample3Native extends Activity {
private static final String TAG = "Sample::Activity"; private static final String TAG = "Sample::Activity";
private Sample3View mView;
public Sample3Native() { public Sample3Native() {
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
} }
@Override
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
mView.releaseCamera();
}
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */ /** Called when the activity is first created. */
@Override @Override
public void onCreate(Bundle savedInstanceState) { public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate"); Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE); requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(new Sample3View(this)); mView = new Sample3View(this);
setContentView(mView);
} }
} }

@ -14,7 +14,7 @@ class Sample3View extends SampleViewBase {
} }
@Override @Override
protected void onPreviewStared(int previewWidtd, int previewHeight) { protected void onPreviewStarted(int previewWidtd, int previewHeight) {
mFrameSize = previewWidtd * previewHeight; mFrameSize = previewWidtd * previewHeight;
mRGBA = new int[mFrameSize]; mRGBA = new int[mFrameSize];
mBitmap = Bitmap.createBitmap(previewWidtd, previewHeight, Bitmap.Config.ARGB_8888); mBitmap = Bitmap.createBitmap(previewWidtd, previewHeight, Bitmap.Config.ARGB_8888);

@ -2,7 +2,6 @@ package org.opencv.samples.tutorial3;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import android.content.Context; import android.content.Context;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.graphics.Canvas; import android.graphics.Canvas;
@ -49,62 +48,14 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
mCamera.setPreviewDisplay(null); mCamera.setPreviewDisplay(null);
} }
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { public boolean openCamera() {
Log.i(TAG, "surfaceCreated"); Log.i(TAG, "openCamera");
if (mCamera != null) { releaseCamera();
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be coppied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStared(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */
mCamera.startPreview();
}
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
mCamera = Camera.open(); mCamera = Camera.open();
if(mCamera == null) {
Log.e(TAG, "Can't open camera!");
return false;
}
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() { mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) { public void onPreviewFrame(byte[] data, Camera camera) {
@ -115,15 +66,14 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
camera.addCallbackBuffer(mBuffer); camera.addCallbackBuffer(mBuffer);
} }
}); });
return true;
(new Thread(this)).start();
} }
public void surfaceDestroyed(SurfaceHolder holder) { public void releaseCamera() {
Log.i(TAG, "surfaceDestroyed"); Log.i(TAG, "releaseCamera");
mThreadRun = false; mThreadRun = false;
if (mCamera != null) { synchronized (this) {
synchronized (this) { if (mCamera != null) {
mCamera.stopPreview(); mCamera.stopPreview();
mCamera.setPreviewCallback(null); mCamera.setPreviewCallback(null);
mCamera.release(); mCamera.release();
@ -132,22 +82,93 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
} }
onPreviewStopped(); onPreviewStopped();
} }
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera");
synchronized (this) {
if (mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be copied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */
mCamera.startPreview();
}
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
releaseCamera();
}
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */ /* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
protected abstract Bitmap processFrame(byte[] data); protected abstract Bitmap processFrame(byte[] data);
/** /**
* This method is called when the preview process is beeing started. It is called before the first frame delivered and processFrame is called * This method is called when the preview process is being started. It is called before the first frame delivered and processFrame is called
* It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing. * It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing.
* @param previewWidth - the width of the preview frames that will be delivered via processFrame * @param previewWidth - the width of the preview frames that will be delivered via processFrame
* @param previewHeight - the height of the preview frames that will be delivered via processFrame * @param previewHeight - the height of the preview frames that will be delivered via processFrame
*/ */
protected abstract void onPreviewStared(int previewWidtd, int previewHeight); protected abstract void onPreviewStarted(int previewWidtd, int previewHeight);
/** /**
* This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed. * This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed.
* If the Bitmap object returned via processFrame is cached - it is a good time to recycle it. * If the Bitmap object returned via processFrame is cached - it is a good time to recycle it.
* Any other resourcses used during the preview can be released. * Any other resources used during the preview can be released.
*/ */
protected abstract void onPreviewStopped(); protected abstract void onPreviewStopped();

@ -1,4 +1,4 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<resources> <resources>
<string name="app_name">Tutorial 2 Advanced - 2. Mix Java+Native OpenCV</string> <string name="app_name">Tutorial 4 (Advanced) - Mix Java+Native OpenCV</string>
</resources> </resources>

@ -1,6 +1,8 @@
package org.opencv.samples.tutorial4; package org.opencv.samples.tutorial4;
import android.app.Activity; import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle; import android.os.Bundle;
import android.util.Log; import android.util.Log;
import android.view.Menu; import android.view.Menu;
@ -21,6 +23,31 @@ public class Sample4Mixed extends Activity {
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
} }
@Override
protected void onPause() {
Log.i(TAG, "onPause");
super.onPause();
mView.releaseCamera();
}
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */ /** Called when the activity is first created. */
@Override @Override
public void onCreate(Bundle savedInstanceState) { public void onCreate(Bundle savedInstanceState) {

@ -1,14 +1,13 @@
package org.opencv.samples.tutorial4; package org.opencv.samples.tutorial4;
import org.opencv.android.Utils; import org.opencv.android.Utils;
import org.opencv.core.Mat;
import org.opencv.core.CvType; import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.imgproc.Imgproc; import org.opencv.imgproc.Imgproc;
import android.content.Context; import android.content.Context;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.util.Log; import android.util.Log;
import android.view.SurfaceHolder;
class Sample4View extends SampleViewBase { class Sample4View extends SampleViewBase {
@ -30,7 +29,7 @@ class Sample4View extends SampleViewBase {
} }
@Override @Override
protected void onPreviewStared(int previewWidtd, int previewHeight) { protected void onPreviewStarted(int previewWidtd, int previewHeight) {
// initialize Mats before usage // initialize Mats before usage
mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1); mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1);
mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth()); mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth());

@ -2,7 +2,6 @@ package org.opencv.samples.tutorial4;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import android.content.Context; import android.content.Context;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.graphics.Canvas; import android.graphics.Canvas;
@ -49,62 +48,14 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
mCamera.setPreviewDisplay(null); mCamera.setPreviewDisplay(null);
} }
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { public boolean openCamera() {
Log.i(TAG, "surfaceCreated"); Log.i(TAG, "openCamera");
if (mCamera != null) { releaseCamera();
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be coppied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStared(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */
mCamera.startPreview();
}
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
mCamera = Camera.open(); mCamera = Camera.open();
if(mCamera == null) {
Log.e(TAG, "Can't open camera!");
return false;
}
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() { mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) { public void onPreviewFrame(byte[] data, Camera camera) {
@ -115,15 +66,14 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
camera.addCallbackBuffer(mBuffer); camera.addCallbackBuffer(mBuffer);
} }
}); });
return true;
(new Thread(this)).start();
} }
public void surfaceDestroyed(SurfaceHolder holder) { public void releaseCamera() {
Log.i(TAG, "surfaceDestroyed"); Log.i(TAG, "releaseCamera");
mThreadRun = false; mThreadRun = false;
if (mCamera != null) { synchronized (this) {
synchronized (this) { if (mCamera != null) {
mCamera.stopPreview(); mCamera.stopPreview();
mCamera.setPreviewCallback(null); mCamera.setPreviewCallback(null);
mCamera.release(); mCamera.release();
@ -132,22 +82,94 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
} }
onPreviewStopped(); onPreviewStopped();
} }
public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera");
synchronized (this) {
if (mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be copied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */
mCamera.startPreview();
}
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed");
releaseCamera();
}
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */ /* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
protected abstract Bitmap processFrame(byte[] data); protected abstract Bitmap processFrame(byte[] data);
/** /**
* This method is called when the preview process is beeing started. It is called before the first frame delivered and processFrame is called * This method is called when the preview process is being started. It is called before the first frame delivered and processFrame is called
* It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing. * It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing.
* @param previewWidth - the width of the preview frames that will be delivered via processFrame * @param previewWidth - the width of the preview frames that will be delivered via processFrame
* @param previewHeight - the height of the preview frames that will be delivered via processFrame * @param previewHeight - the height of the preview frames that will be delivered via processFrame
*/ */
protected abstract void onPreviewStared(int previewWidtd, int previewHeight); protected abstract void onPreviewStarted(int previewWidtd, int previewHeight);
/** /**
* This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed. * This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed.
* If the Bitmap object returned via processFrame is cached - it is a good time to recycle it. * If the Bitmap object returned via processFrame is cached - it is a good time to recycle it.
* Any other resourcses used during the preview can be released. * Any other resources used during the preview can be released.
*/ */
protected abstract void onPreviewStopped(); protected abstract void onPreviewStopped();

Loading…
Cancel
Save