mirror of https://github.com/opencv/opencv.git
Merge pull request #80 from apavlenko/android-samples-refactor
commit
080b9bbcd3
56 changed files with 2046 additions and 3171 deletions
@ -0,0 +1,335 @@ |
||||
package org.opencv.android; |
||||
|
||||
import java.util.List; |
||||
|
||||
import org.opencv.android.Utils; |
||||
import org.opencv.core.Mat; |
||||
import org.opencv.core.Size; |
||||
import org.opencv.highgui.Highgui; |
||||
|
||||
import android.app.Activity; |
||||
import android.app.AlertDialog; |
||||
import android.content.Context; |
||||
import android.content.DialogInterface; |
||||
import android.graphics.Bitmap; |
||||
import android.graphics.Canvas; |
||||
import android.util.AttributeSet; |
||||
import android.util.Log; |
||||
import android.view.SurfaceHolder; |
||||
import android.view.SurfaceView; |
||||
|
||||
/** |
||||
* This is a basic class, implementing the interaction with Camera and OpenCV library. |
||||
* The main responsibility of it - is to control when camera can be enabled, process the frame, |
||||
* call external listener to make any adjustments to the frame and then draw the resulting |
||||
* frame to the screen. |
||||
* The clients shall implement CvCameraViewListener |
||||
* TODO: add method to control the format in which the frames will be delivered to CvCameraViewListener |
||||
*/ |
||||
public abstract class CameraBridgeViewBase extends SurfaceView implements SurfaceHolder.Callback { |
||||
|
||||
private static final int MAX_UNSPECIFIED = -1; |
||||
|
||||
protected int mFrameWidth; |
||||
protected int mFrameHeight; |
||||
|
||||
protected int mMaxHeight; |
||||
protected int mMaxWidth; |
||||
|
||||
protected int mPreviewFormat = Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA; |
||||
|
||||
private Bitmap mCacheBitmap; |
||||
|
||||
public CameraBridgeViewBase(Context context, AttributeSet attrs) { |
||||
super(context, attrs); |
||||
getHolder().addCallback(this); |
||||
mMaxWidth = MAX_UNSPECIFIED; |
||||
mMaxHeight = MAX_UNSPECIFIED; |
||||
} |
||||
|
||||
public interface CvCameraViewListener { |
||||
/** |
||||
* This method is invoked when camera preview has started. After this method is invoked |
||||
* the frames will start to be delivered to client via the onCameraFrame() callback. |
||||
* @param width - the width of the frames that will be delivered |
||||
* @param height - the height of the frames that will be delivered |
||||
*/ |
||||
public void onCameraViewStarted(int width, int height); |
||||
|
||||
/** |
||||
* This method is invoked when camera preview has been stopped for some reason. |
||||
* No frames will be delivered via onCameraFrame() callback after this method is called. |
||||
*/ |
||||
public void onCameraViewStopped(); |
||||
|
||||
/** |
||||
* This method is invoked when delivery of the frame needs to be done. |
||||
* The returned values - is a modified frame which needs to be displayed on the screen. |
||||
* TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc) |
||||
*/ |
||||
public Mat onCameraFrame(Mat inputFrame); |
||||
|
||||
} |
||||
|
||||
private static final int STOPPED = 0; |
||||
private static final int STARTED = 1; |
||||
|
||||
private static final String TAG = "CameraBridge"; |
||||
|
||||
private CvCameraViewListener mListener; |
||||
private int mState = STOPPED; |
||||
|
||||
private boolean mEnabled; |
||||
private boolean mSurfaceExist; |
||||
|
||||
private Object mSyncObject = new Object(); |
||||
|
||||
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) { |
||||
synchronized(mSyncObject) { |
||||
if (!mSurfaceExist) { |
||||
mSurfaceExist = true; |
||||
checkCurrentState(); |
||||
} else { |
||||
/** Surface changed. We need to stop camera and restart with new parameters */ |
||||
/* Pretend that old surface has been destroyed */ |
||||
mSurfaceExist = false; |
||||
checkCurrentState(); |
||||
/* Now use new surface. Say we have it now */ |
||||
mSurfaceExist = true; |
||||
checkCurrentState(); |
||||
} |
||||
} |
||||
} |
||||
|
||||
public void surfaceCreated(SurfaceHolder holder) { |
||||
/* Do nothing. Wait until surfaceChanged delivered */ |
||||
} |
||||
|
||||
public void surfaceDestroyed(SurfaceHolder holder) { |
||||
synchronized(mSyncObject) { |
||||
mSurfaceExist = false; |
||||
checkCurrentState(); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* This method is provided for clients, so they can enable the camera connection. |
||||
* The actual onCameraViewStarted callback will be delivered only after both this method is called and surface is available |
||||
*/ |
||||
public void enableView() { |
||||
synchronized(mSyncObject) { |
||||
mEnabled = true; |
||||
checkCurrentState(); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* This method is provided for clients, so they can disable camera connection and stop |
||||
* the delivery of frames even though the surface view itself is not destroyed and still stays on the scren |
||||
*/ |
||||
public void disableView() { |
||||
synchronized(mSyncObject) { |
||||
mEnabled = false; |
||||
checkCurrentState(); |
||||
} |
||||
} |
||||
|
||||
public void setCvCameraViewListener(CvCameraViewListener listener) { |
||||
mListener = listener; |
||||
} |
||||
|
||||
/** |
||||
* This method sets the maximum size that camera frame is allowed to be. When selecting |
||||
* size - the biggest size which less or equal the size set will be selected. |
||||
* As an example - we set setMaxFrameSize(200,200) and we have 176x152 and 320x240 sizes. The |
||||
* preview frame will be selected with 176x152 size. |
||||
* This method is useful when need to restrict the size of preview frame for some reason (for example for video recording) |
||||
* @param maxWidth - the maximum width allowed for camera frame. |
||||
* @param maxHeight - the maximum height allowed for camera frame |
||||
*/ |
||||
public void setMaxFrameSize(int maxWidth, int maxHeight) { |
||||
mMaxWidth = maxWidth; |
||||
mMaxHeight = maxHeight; |
||||
} |
||||
|
||||
public void SetCaptureFormat(int format) |
||||
{ |
||||
mPreviewFormat = format; |
||||
} |
||||
|
||||
/** |
||||
* Called when mSyncObject lock is held |
||||
*/ |
||||
private void checkCurrentState() { |
||||
int targetState; |
||||
|
||||
if (mEnabled && mSurfaceExist) { |
||||
targetState = STARTED; |
||||
} else { |
||||
targetState = STOPPED; |
||||
} |
||||
|
||||
if (targetState != mState) { |
||||
/* The state change detected. Need to exit the current state and enter target state */ |
||||
processExitState(mState); |
||||
mState = targetState; |
||||
processEnterState(mState); |
||||
} |
||||
} |
||||
|
||||
private void processEnterState(int state) { |
||||
switch(state) { |
||||
case STARTED: |
||||
onEnterStartedState(); |
||||
if (mListener != null) { |
||||
mListener.onCameraViewStarted(mFrameWidth, mFrameHeight); |
||||
} |
||||
break; |
||||
case STOPPED: |
||||
onEnterStoppedState(); |
||||
if (mListener != null) { |
||||
mListener.onCameraViewStopped(); |
||||
} |
||||
break; |
||||
}; |
||||
} |
||||
|
||||
private void processExitState(int state) { |
||||
switch(state) { |
||||
case STARTED: |
||||
onExitStartedState(); |
||||
break; |
||||
case STOPPED: |
||||
onExitStoppedState(); |
||||
break; |
||||
}; |
||||
} |
||||
|
||||
private void onEnterStoppedState() { |
||||
/* nothing to do */ |
||||
} |
||||
|
||||
private void onExitStoppedState() { |
||||
/* nothing to do */ |
||||
} |
||||
|
||||
// NOTE: The order of bitmap constructor and camera connection is important for android 4.1.x
|
||||
// Bitmap must be constructed before surface
|
||||
private void onEnterStartedState() { |
||||
/* Connect camera */ |
||||
if (!connectCamera(getWidth(), getHeight())) { |
||||
AlertDialog ad = new AlertDialog.Builder(getContext()).create(); |
||||
ad.setCancelable(false); // This blocks the 'BACK' button
|
||||
ad.setMessage("It seems that you device does not support camera (or it is locked). Application will be closed."); |
||||
ad.setButton(DialogInterface.BUTTON_NEUTRAL, "OK", new DialogInterface.OnClickListener() { |
||||
public void onClick(DialogInterface dialog, int which) { |
||||
dialog.dismiss(); |
||||
((Activity) getContext()).finish(); |
||||
} |
||||
}); |
||||
ad.show(); |
||||
|
||||
} |
||||
} |
||||
|
||||
private void onExitStartedState() { |
||||
disconnectCamera(); |
||||
if (mCacheBitmap != null) { |
||||
mCacheBitmap.recycle(); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* This method shall be called by the subclasses when they have valid |
||||
* object and want it to be delivered to external client (via callback) and |
||||
* then displayed on the screen. |
||||
* @param frame - the current frame to be delivered |
||||
*/ |
||||
protected void deliverAndDrawFrame(Mat frame) { |
||||
Mat modified; |
||||
|
||||
if (mListener != null) { |
||||
modified = mListener.onCameraFrame(frame); |
||||
} else { |
||||
modified = frame; |
||||
} |
||||
|
||||
boolean bmpValid = true; |
||||
if (modified != null) { |
||||
try { |
||||
Utils.matToBitmap(modified, mCacheBitmap); |
||||
} catch(Exception e) { |
||||
Log.e(TAG, "Mat type: " + modified); |
||||
Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight()); |
||||
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage()); |
||||
bmpValid = false; |
||||
} |
||||
} |
||||
|
||||
if (bmpValid && mCacheBitmap != null) { |
||||
Canvas canvas = getHolder().lockCanvas(); |
||||
if (canvas != null) { |
||||
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR); |
||||
canvas.drawBitmap(mCacheBitmap, (canvas.getWidth() - mCacheBitmap.getWidth()) / 2, (canvas.getHeight() - mCacheBitmap.getHeight()) / 2, null); |
||||
getHolder().unlockCanvasAndPost(canvas); |
||||
} |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* This method is invoked shall perform concrete operation to initialize the camera. |
||||
* CONTRACT: as a result of this method variables mFrameWidth and mFrameHeight MUST be |
||||
* initialized with the size of the Camera frames that will be delivered to external processor. |
||||
* @param width - the width of this SurfaceView |
||||
* @param height - the height of this SurfaceView |
||||
*/ |
||||
protected abstract boolean connectCamera(int width, int height); |
||||
|
||||
/** |
||||
* Disconnects and release the particular camera object being connected to this surface view. |
||||
* Called when syncObject lock is held |
||||
*/ |
||||
protected abstract void disconnectCamera(); |
||||
|
||||
// NOTE: On Android 4.1.x the function must be called before SurfaceTextre constructor!
|
||||
protected void AllocateCache() |
||||
{ |
||||
mCacheBitmap = Bitmap.createBitmap(mFrameWidth, mFrameHeight, Bitmap.Config.ARGB_8888); |
||||
} |
||||
|
||||
public interface ListItemAccessor { |
||||
public int getWidth(Object obj); |
||||
public int getHeight(Object obj); |
||||
}; |
||||
|
||||
/** |
||||
* This helper method can be called by subclasses to select camera preview size. |
||||
* It goes over the list of the supported preview sizes and selects the maximum one which |
||||
* fits both values set via setMaxFrameSize() and surface frame allocated for this view |
||||
* @param supportedSizes |
||||
* @param surfaceWidth |
||||
* @param surfaceHeight |
||||
* @return |
||||
*/ |
||||
protected Size calculateCameraFrameSize(List<?> supportedSizes, ListItemAccessor accessor, int surfaceWidth, int surfaceHeight) { |
||||
int calcWidth = 0; |
||||
int calcHeight = 0; |
||||
|
||||
int maxAllowedWidth = (mMaxWidth != MAX_UNSPECIFIED && mMaxWidth < surfaceWidth)? mMaxWidth : surfaceWidth; |
||||
int maxAllowedHeight = (mMaxHeight != MAX_UNSPECIFIED && mMaxHeight < surfaceHeight)? mMaxHeight : surfaceHeight; |
||||
|
||||
for (Object size : supportedSizes) { |
||||
int width = accessor.getWidth(size); |
||||
int height = accessor.getHeight(size); |
||||
|
||||
if (width <= maxAllowedWidth && height <= maxAllowedHeight) { |
||||
if (width >= calcWidth && height >= calcHeight) { |
||||
calcWidth = (int) width; |
||||
calcHeight = (int) height; |
||||
} |
||||
} |
||||
} |
||||
|
||||
return new Size(calcWidth, calcHeight); |
||||
} |
||||
} |
@ -0,0 +1,242 @@ |
||||
package org.opencv.android; |
||||
|
||||
import java.io.IOException; |
||||
import java.util.List; |
||||
|
||||
import android.annotation.TargetApi; |
||||
import android.content.Context; |
||||
import android.graphics.ImageFormat; |
||||
import android.graphics.SurfaceTexture; |
||||
import android.hardware.Camera; |
||||
import android.hardware.Camera.PreviewCallback; |
||||
import android.os.Build; |
||||
import android.util.AttributeSet; |
||||
import android.util.Log; |
||||
import android.view.SurfaceHolder; |
||||
|
||||
import org.opencv.core.CvType; |
||||
import org.opencv.core.Mat; |
||||
import org.opencv.core.Size; |
||||
import org.opencv.highgui.Highgui; |
||||
import org.opencv.imgproc.Imgproc; |
||||
|
||||
/** |
||||
* This class is an implementation of the Bridge View between OpenCv and JAVA Camera. |
||||
* This class relays on the functionality available in base class and only implements |
||||
* required functions: |
||||
* connectCamera - opens Java camera and sets the PreviewCallback to be delivered. |
||||
* disconnectCamera - closes the camera and stops preview. |
||||
* When frame is delivered via callback from Camera - it processed via OpenCV to be |
||||
* converted to RGBA32 and then passed to the external callback for modifications if required. |
||||
*/ |
||||
public class JavaCameraView extends CameraBridgeViewBase implements PreviewCallback { |
||||
|
||||
private static final int MAGIC_TEXTURE_ID = 10; |
||||
private static final String TAG = "JavaCameraView"; |
||||
|
||||
private Mat mBaseMat; |
||||
private byte mBuffer[]; |
||||
|
||||
private Thread mThread; |
||||
private boolean mStopThread; |
||||
|
||||
public static class JavaCameraSizeAccessor implements ListItemAccessor { |
||||
|
||||
public int getWidth(Object obj) { |
||||
Camera.Size size = (Camera.Size) obj; |
||||
return size.width; |
||||
} |
||||
|
||||
public int getHeight(Object obj) { |
||||
Camera.Size size = (Camera.Size) obj; |
||||
return size.height; |
||||
} |
||||
} |
||||
|
||||
private Camera mCamera; |
||||
|
||||
public JavaCameraView(Context context, AttributeSet attrs) { |
||||
super(context, attrs); |
||||
Log.d(TAG, "Java camera view ctor"); |
||||
} |
||||
|
||||
@TargetApi(11) |
||||
protected boolean initializeCamera(int width, int height) { |
||||
Log.d(TAG, "Initialize java camera"); |
||||
synchronized (this) { |
||||
mCamera = null; |
||||
|
||||
Log.d(TAG, "Trying to open camera with old open()"); |
||||
try { |
||||
mCamera = Camera.open(); |
||||
} |
||||
catch (Exception e){ |
||||
Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage()); |
||||
} |
||||
|
||||
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) { |
||||
boolean connected = false; |
||||
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) { |
||||
Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(camIdx) + ")"); |
||||
try { |
||||
mCamera = Camera.open(camIdx); |
||||
connected = true; |
||||
} catch (RuntimeException e) { |
||||
Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage()); |
||||
} |
||||
if (connected) break; |
||||
} |
||||
} |
||||
|
||||
if (mCamera == null) |
||||
return false; |
||||
|
||||
/* Now set camera parameters */ |
||||
try { |
||||
Camera.Parameters params = mCamera.getParameters(); |
||||
Log.d(TAG, "getSupportedPreviewSizes()"); |
||||
List<android.hardware.Camera.Size> sizes = params.getSupportedPreviewSizes(); |
||||
|
||||
/* Select the size that fits surface considering maximum size allowed */ |
||||
Size frameSize = calculateCameraFrameSize(sizes, new JavaCameraSizeAccessor(), width, height); |
||||
|
||||
params.setPreviewFormat(ImageFormat.NV21); |
||||
params.setPreviewSize((int)frameSize.width, (int)frameSize.height); |
||||
|
||||
List<String> FocusModes = params.getSupportedFocusModes(); |
||||
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) |
||||
{ |
||||
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); |
||||
} |
||||
|
||||
mCamera.setParameters(params); |
||||
params = mCamera.getParameters(); |
||||
|
||||
mFrameWidth = params.getPreviewSize().width; |
||||
mFrameHeight = params.getPreviewSize().height; |
||||
|
||||
int size = mFrameWidth * mFrameHeight; |
||||
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8; |
||||
mBuffer = new byte[size]; |
||||
|
||||
mCamera.addCallbackBuffer(mBuffer); |
||||
mCamera.setPreviewCallbackWithBuffer(this); |
||||
|
||||
mBaseMat = new Mat(mFrameHeight + (mFrameHeight/2), mFrameWidth, CvType.CV_8UC1); |
||||
|
||||
AllocateCache(); |
||||
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { |
||||
SurfaceTexture tex = new SurfaceTexture(MAGIC_TEXTURE_ID); |
||||
getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); |
||||
mCamera.setPreviewTexture(tex); |
||||
} else |
||||
mCamera.setPreviewDisplay(null); |
||||
} catch (IOException e) { |
||||
e.printStackTrace(); |
||||
} |
||||
|
||||
/* Finally we are ready to start the preview */ |
||||
Log.d(TAG, "startPreview"); |
||||
mCamera.startPreview(); |
||||
} |
||||
|
||||
return true; |
||||
} |
||||
|
||||
protected void releaseCamera() { |
||||
synchronized (this) { |
||||
mCamera.stopPreview(); |
||||
mCamera.release(); |
||||
mCamera = null; |
||||
} |
||||
} |
||||
|
||||
@Override |
||||
protected boolean connectCamera(int width, int height) { |
||||
|
||||
/* 1. We need to instantiate camera |
||||
* 2. We need to start thread which will be getting frames |
||||
*/ |
||||
/* First step - initialize camera connection */ |
||||
Log.d(TAG, "Connecting to camera"); |
||||
if (!initializeCamera(getWidth(), getHeight())) |
||||
return false; |
||||
|
||||
/* now we can start update thread */ |
||||
Log.d(TAG, "Starting processing thread"); |
||||
mStopThread = false; |
||||
mThread = new Thread(new CameraWorker()); |
||||
mThread.start(); |
||||
|
||||
return true; |
||||
} |
||||
|
||||
protected void disconnectCamera() { |
||||
/* 1. We need to stop thread which updating the frames |
||||
* 2. Stop camera and release it |
||||
*/ |
||||
Log.d(TAG, "Disconnecting from camera"); |
||||
try { |
||||
mStopThread = true; |
||||
Log.d(TAG, "Notify thread"); |
||||
synchronized (this) { |
||||
this.notify(); |
||||
} |
||||
Log.d(TAG, "Wating for thread"); |
||||
mThread.join(); |
||||
} catch (InterruptedException e) { |
||||
e.printStackTrace(); |
||||
} finally { |
||||
mThread = null; |
||||
} |
||||
|
||||
/* Now release camera */ |
||||
releaseCamera(); |
||||
} |
||||
|
||||
public void onPreviewFrame(byte[] frame, Camera arg1) { |
||||
Log.i(TAG, "Preview Frame received. Need to create MAT and deliver it to clients"); |
||||
Log.i(TAG, "Frame size is " + frame.length); |
||||
synchronized (this) |
||||
{ |
||||
mBaseMat.put(0, 0, frame); |
||||
this.notify(); |
||||
} |
||||
if (mCamera != null) |
||||
mCamera.addCallbackBuffer(mBuffer); |
||||
} |
||||
|
||||
private class CameraWorker implements Runnable { |
||||
|
||||
public void run() { |
||||
do { |
||||
synchronized (JavaCameraView.this) { |
||||
try { |
||||
JavaCameraView.this.wait(); |
||||
} catch (InterruptedException e) { |
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace(); |
||||
} |
||||
} |
||||
|
||||
if (!mStopThread) { |
||||
Mat frameMat = new Mat(); |
||||
switch (mPreviewFormat) { |
||||
case Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA: |
||||
Imgproc.cvtColor(mBaseMat, frameMat, Imgproc.COLOR_YUV2RGBA_NV21, 4); |
||||
break; |
||||
case Highgui.CV_CAP_ANDROID_GREY_FRAME: |
||||
frameMat = mBaseMat.submat(0, mFrameHeight, 0, mFrameWidth); |
||||
break; |
||||
default: |
||||
Log.e(TAG, "Invalid frame format! Only RGBA and Gray Scale are supported!"); |
||||
}; |
||||
deliverAndDrawFrame(frameMat); |
||||
frameMat.release(); |
||||
} |
||||
} while (!mStopThread); |
||||
Log.d(TAG, "Finish processing thread"); |
||||
} |
||||
} |
||||
} |
@ -0,0 +1,145 @@ |
||||
package org.opencv.android; |
||||
|
||||
import org.opencv.core.Mat; |
||||
import org.opencv.core.Size; |
||||
import org.opencv.highgui.Highgui; |
||||
import org.opencv.highgui.VideoCapture; |
||||
|
||||
import android.content.Context; |
||||
import android.util.AttributeSet; |
||||
import android.util.Log; |
||||
|
||||
/** |
||||
* This class is an implementation of a bridge between SurfaceView and native OpenCV camera. |
||||
* Due to the big amount of work done, by the base class this child is only responsible |
||||
* for creating camera, destroying camera and delivering frames while camera is enabled |
||||
*/ |
||||
public class NativeCameraView extends CameraBridgeViewBase { |
||||
|
||||
public static final String TAG = "NativeCameraView"; |
||||
private boolean mStopThread; |
||||
private Thread mThread; |
||||
private VideoCapture mCamera; |
||||
|
||||
public NativeCameraView(Context context, AttributeSet attrs) { |
||||
super(context, attrs); |
||||
} |
||||
|
||||
@Override |
||||
protected boolean connectCamera(int width, int height) { |
||||
|
||||
/* 1. We need to instantiate camera |
||||
* 2. We need to start thread which will be getting frames |
||||
*/ |
||||
/* First step - initialize camera connection */ |
||||
if (!initializeCamera(getWidth(), getHeight())) |
||||
return false; |
||||
|
||||
/* now we can start update thread */ |
||||
mThread = new Thread(new CameraWorker()); |
||||
mThread.start(); |
||||
|
||||
return true; |
||||
} |
||||
|
||||
@Override |
||||
protected void disconnectCamera() { |
||||
/* 1. We need to stop thread which updating the frames |
||||
* 2. Stop camera and release it |
||||
*/ |
||||
try { |
||||
mStopThread = true; |
||||
mThread.join(); |
||||
} catch (InterruptedException e) { |
||||
e.printStackTrace(); |
||||
} finally { |
||||
mThread = null; |
||||
mStopThread = false; |
||||
} |
||||
|
||||
/* Now release camera */ |
||||
releaseCamera(); |
||||
} |
||||
|
||||
public static class OpenCvSizeAccessor implements ListItemAccessor { |
||||
|
||||
public int getWidth(Object obj) { |
||||
Size size = (Size)obj; |
||||
return (int)size.width; |
||||
} |
||||
|
||||
public int getHeight(Object obj) { |
||||
Size size = (Size)obj; |
||||
return (int)size.height; |
||||
} |
||||
|
||||
} |
||||
|
||||
private boolean initializeCamera(int width, int height) { |
||||
synchronized (this) { |
||||
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID); |
||||
|
||||
if (mCamera == null) |
||||
return false; |
||||
|
||||
//TODO: improve error handling
|
||||
|
||||
java.util.List<Size> sizes = mCamera.getSupportedPreviewSizes(); |
||||
|
||||
/* Select the size that fits surface considering maximum size allowed */ |
||||
Size frameSize = calculateCameraFrameSize(sizes, new OpenCvSizeAccessor(), width, height); |
||||
|
||||
mFrameWidth = (int)frameSize.width; |
||||
mFrameHeight = (int)frameSize.height; |
||||
|
||||
AllocateCache(); |
||||
|
||||
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, frameSize.width); |
||||
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, frameSize.height); |
||||
} |
||||
|
||||
Log.i(TAG, "Selected camera frame size = (" + mFrameWidth + ", " + mFrameHeight + ")"); |
||||
|
||||
return true; |
||||
} |
||||
|
||||
private void releaseCamera() { |
||||
synchronized (this) { |
||||
if (mCamera != null) { |
||||
mCamera.release(); |
||||
} |
||||
} |
||||
} |
||||
|
||||
private class CameraWorker implements Runnable { |
||||
|
||||
private Mat mRgba = new Mat(); |
||||
private Mat mGray = new Mat(); |
||||
|
||||
public void run() { |
||||
do { |
||||
if (!mCamera.grab()) { |
||||
Log.e(TAG, "Camera frame grab failed"); |
||||
break; |
||||
} |
||||
|
||||
switch (mPreviewFormat) { |
||||
case Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA: |
||||
{ |
||||
mCamera.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); |
||||
deliverAndDrawFrame(mRgba); |
||||
} break; |
||||
case Highgui.CV_CAP_ANDROID_GREY_FRAME: |
||||
mCamera.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME); |
||||
deliverAndDrawFrame(mGray); |
||||
break; |
||||
default: |
||||
Log.e(TAG, "Invalid frame format! Only RGBA and Gray Scale are supported!"); |
||||
} |
||||
|
||||
} while (!mStopThread); |
||||
|
||||
} |
||||
} |
||||
|
||||
} |
@ -1,8 +1,8 @@ |
||||
<?xml version="1.0" encoding="UTF-8"?> |
||||
<classpath> |
||||
<classpathentry kind="src" path="src"/> |
||||
<classpathentry kind="src" path="gen"/> |
||||
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/> |
||||
<classpathentry kind="con" path="com.android.ide.eclipse.adt.LIBRARIES"/> |
||||
<classpathentry kind="src" path="src"/> |
||||
<classpathentry kind="src" path="gen"/> |
||||
<classpathentry kind="output" path="bin/classes"/> |
||||
</classpath> |
||||
|
@ -0,0 +1,11 @@ |
||||
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android" |
||||
xmlns:tools="http://schemas.android.com/tools" |
||||
android:layout_width="match_parent" |
||||
android:layout_height="match_parent" > |
||||
|
||||
<org.opencv.android.JavaCameraView |
||||
android:layout_width="fill_parent" |
||||
android:layout_height="fill_parent" |
||||
android:id="@+id/puzzle_activity_surface_view" /> |
||||
|
||||
</LinearLayout> |
@ -0,0 +1,6 @@ |
||||
<menu xmlns:android="http://schemas.android.com/apk/res/android"> |
||||
<item android:id="@+id/menu_start_new_game" |
||||
android:title="@string/menu_start_new_game" |
||||
android:orderInCategory="100" /> |
||||
<item android:id="@+id/menu_toggle_tile_numbers" android:title="@string/menu_toggle_tile_numbers"></item> |
||||
</menu> |
@ -1,4 +1,6 @@ |
||||
<?xml version="1.0" encoding="utf-8"?> |
||||
<resources> |
||||
<string name="app_name">OCV 15 Puzzle</string> |
||||
</resources> |
||||
<string name="menu_toggle_tile_numbers">Show/hide tile numbers</string> |
||||
<string name="menu_start_new_game">Start new game</string> |
||||
|
||||
</resources> |
@ -0,0 +1,130 @@ |
||||
package org.opencv.samples.puzzle15; |
||||
|
||||
import org.opencv.android.BaseLoaderCallback; |
||||
import org.opencv.android.LoaderCallbackInterface; |
||||
import org.opencv.android.OpenCVLoader; |
||||
import org.opencv.core.Mat; |
||||
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener; |
||||
import org.opencv.android.JavaCameraView; |
||||
|
||||
import android.os.Bundle; |
||||
import android.app.Activity; |
||||
import android.util.Log; |
||||
import android.view.Menu; |
||||
import android.view.MenuItem; |
||||
import android.view.MotionEvent; |
||||
import android.view.Window; |
||||
import android.view.View; |
||||
|
||||
public class Puzzle15Activity extends Activity implements CvCameraViewListener, View.OnTouchListener { |
||||
|
||||
private static final String TAG = "Sample::Puzzle15::Activity"; |
||||
|
||||
private JavaCameraView mOpenCvCameraView; |
||||
private Puzzle15Processor mPuzzle15; |
||||
|
||||
private int mGameWidth; |
||||
private int mGameHeight; |
||||
|
||||
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) { |
||||
|
||||
@Override |
||||
public void onManagerConnected(int status) { |
||||
switch (status) { |
||||
case LoaderCallbackInterface.SUCCESS: |
||||
{ |
||||
Log.i(TAG, "OpenCV loaded successfully"); |
||||
|
||||
/* Now enable camera view to start receiving frames */ |
||||
mOpenCvCameraView.setOnTouchListener(Puzzle15Activity.this); |
||||
mOpenCvCameraView.enableView(); |
||||
} break; |
||||
default: |
||||
{ |
||||
super.onManagerConnected(status); |
||||
} break; |
||||
} |
||||
} |
||||
}; |
||||
|
||||
@Override |
||||
public void onCreate(Bundle savedInstanceState) { |
||||
super.onCreate(savedInstanceState); |
||||
requestWindowFeature(Window.FEATURE_NO_TITLE); |
||||
|
||||
setContentView(R.layout.activity_puzzle15); |
||||
|
||||
mOpenCvCameraView = (JavaCameraView) findViewById(R.id.puzzle_activity_surface_view); |
||||
mOpenCvCameraView.setCvCameraViewListener(this); |
||||
mPuzzle15 = new Puzzle15Processor(); |
||||
mPuzzle15.prepareNewGame(); |
||||
} |
||||
|
||||
@Override |
||||
public void onPause() |
||||
{ |
||||
mOpenCvCameraView.disableView(); |
||||
super.onPause(); |
||||
} |
||||
|
||||
@Override |
||||
public void onResume() |
||||
{ |
||||
super.onResume(); |
||||
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mLoaderCallback); |
||||
} |
||||
|
||||
public void onDestroy() { |
||||
super.onDestroy(); |
||||
mOpenCvCameraView.disableView(); |
||||
} |
||||
|
||||
@Override |
||||
public boolean onCreateOptionsMenu(Menu menu) { |
||||
getMenuInflater().inflate(R.menu.activity_puzzle15, menu); |
||||
return true; |
||||
} |
||||
|
||||
@Override |
||||
public boolean onOptionsItemSelected(MenuItem item) { |
||||
Log.i(TAG, "Menu Item selected " + item); |
||||
if (item.getItemId() == R.id.menu_start_new_game) { |
||||
/* We need to start new game */ |
||||
mPuzzle15.prepareNewGame(); |
||||
} else if (item.getItemId() == R.id.menu_toggle_tile_numbers) { |
||||
/* We need to enable or disable drawing of the tile numbers */ |
||||
mPuzzle15.toggleTileNumbers(); |
||||
} |
||||
return true; |
||||
} |
||||
|
||||
public void onCameraViewStarted(int width, int height) { |
||||
mGameWidth = width; |
||||
mGameHeight = height; |
||||
mPuzzle15.prepareGameSize(width, height); |
||||
} |
||||
|
||||
public void onCameraViewStopped() { |
||||
} |
||||
|
||||
public Mat onCameraFrame(Mat inputFrame) { |
||||
return mPuzzle15.puzzleFrame(inputFrame); |
||||
} |
||||
|
||||
public boolean onTouch(View view, MotionEvent event) { |
||||
int xpos, ypos; |
||||
|
||||
xpos = (view.getWidth() - mGameWidth) / 2; |
||||
xpos = (int)event.getX() - xpos; |
||||
|
||||
ypos = (view.getHeight() - mGameHeight) / 2; |
||||
ypos = (int)event.getY() - ypos; |
||||
|
||||
if (xpos >=0 && xpos <= mGameWidth && ypos >=0 && ypos <= mGameHeight) { |
||||
/* click is inside the picture. Deliver this event to processor */ |
||||
mPuzzle15.deliverTouchEvent(xpos, ypos); |
||||
} |
||||
|
||||
return false; |
||||
} |
||||
} |
@ -0,0 +1,193 @@ |
||||
package org.opencv.samples.puzzle15; |
||||
|
||||
import org.opencv.core.Core; |
||||
import org.opencv.core.CvType; |
||||
import org.opencv.core.Mat; |
||||
import org.opencv.core.Scalar; |
||||
import org.opencv.core.Size; |
||||
import org.opencv.core.Point; |
||||
|
||||
import android.util.Log; |
||||
|
||||
|
||||
/** |
||||
* This class is a controller for puzzle game. |
||||
* It converts the image from Camera into the shuffled image |
||||
*/ |
||||
public class Puzzle15Processor { |
||||
|
||||
private static final int GRID_SIZE = 4; |
||||
private static final int GRID_AREA = GRID_SIZE * GRID_SIZE; |
||||
private static final int GRID_EMPTY_INDEX = GRID_AREA - 1; |
||||
private static final String TAG = "Puzzle15Processor"; |
||||
|
||||
private int[] mIndexes; |
||||
private int[] mTextWidths; |
||||
private int[] mTextHeights; |
||||
|
||||
private Mat mRgba15; |
||||
|
||||
private Mat[] mCells; |
||||
private Mat[] mCells15; |
||||
private boolean mShowTileNumbers = true; |
||||
|
||||
public Puzzle15Processor() { |
||||
mTextWidths = new int[GRID_AREA]; |
||||
mTextHeights = new int[GRID_AREA]; |
||||
|
||||
mIndexes = new int [GRID_AREA]; |
||||
|
||||
for (int i = 0; i < GRID_AREA; i++) |
||||
mIndexes[i] = i; |
||||
} |
||||
|
||||
/* this method is intended to make processor prepared for a new game */ |
||||
public synchronized void prepareNewGame() { |
||||
do { |
||||
shuffle(mIndexes); |
||||
} while (!isPuzzleSolvable()); |
||||
} |
||||
|
||||
/* This method is to make the processor know the size of the frames that |
||||
* will be delivered via puzzleFrame. |
||||
* If the frames will be different size - then the result is unpredictable |
||||
*/ |
||||
public synchronized void prepareGameSize(int width, int height) { |
||||
mRgba15 = new Mat(height, width, CvType.CV_8UC4); |
||||
|
||||
mCells = new Mat[GRID_AREA]; |
||||
mCells15 = new Mat[GRID_AREA]; |
||||
|
||||
for (int i = 0; i < GRID_SIZE; i++) { |
||||
for (int j = 0; j < GRID_SIZE; j++) { |
||||
int k = i * GRID_SIZE + j; |
||||
mCells15[k] = mRgba15.submat(i * height / GRID_SIZE, (i + 1) * height / GRID_SIZE, j * width / GRID_SIZE, (j + 1) * width / GRID_SIZE); |
||||
} |
||||
} |
||||
|
||||
for (int i = 0; i < GRID_AREA; i++) { |
||||
Size s = Core.getTextSize(Integer.toString(i + 1), 3/* CV_FONT_HERSHEY_COMPLEX */, 1, 2, null); |
||||
mTextHeights[i] = (int) s.height; |
||||
mTextWidths[i] = (int) s.width; |
||||
} |
||||
} |
||||
|
||||
/* this method to be called from the outside. it processes the frame and shuffles |
||||
* the tiles as specified by mIndexes array |
||||
*/ |
||||
public synchronized Mat puzzleFrame(Mat inputPicture) { |
||||
int rows = inputPicture.rows(); |
||||
int cols = inputPicture.cols(); |
||||
|
||||
rows = rows - rows%4; |
||||
cols = cols - cols%4; |
||||
|
||||
for (int i = 0; i < GRID_SIZE; i++) { |
||||
for (int j = 0; j < GRID_SIZE; j++) { |
||||
int k = i * GRID_SIZE + j; |
||||
mCells[k] = inputPicture.submat(i * inputPicture.rows() / GRID_SIZE, (i + 1) * inputPicture.rows() / GRID_SIZE, j * inputPicture.cols()/ GRID_SIZE, (j + 1) * inputPicture.cols() / GRID_SIZE); |
||||
} |
||||
} |
||||
|
||||
rows = rows - rows%4; |
||||
cols = cols - cols%4; |
||||
|
||||
// copy shuffled tiles
|
||||
for (int i = 0; i < GRID_AREA; i++) { |
||||
int idx = mIndexes[i]; |
||||
if (idx == GRID_EMPTY_INDEX) |
||||
mCells15[i].setTo(new Scalar(0x33, 0x33, 0x33, 0xFF)); |
||||
else { |
||||
mCells[idx].copyTo(mCells15[i]); |
||||
if (mShowTileNumbers) { |
||||
Core.putText(mCells15[i], Integer.toString(1 + idx), new Point((cols / GRID_SIZE - mTextWidths[idx]) / 2, |
||||
(rows / GRID_SIZE + mTextHeights[idx]) / 2), 3/* CV_FONT_HERSHEY_COMPLEX */, 1, new Scalar(255, 0, 0, 255), 2); |
||||
} |
||||
} |
||||
} |
||||
|
||||
drawGrid(cols, rows, mRgba15); |
||||
|
||||
return mRgba15; |
||||
} |
||||
|
||||
public void toggleTileNumbers() { |
||||
mShowTileNumbers = !mShowTileNumbers; |
||||
} |
||||
|
||||
public void deliverTouchEvent(int x, int y) { |
||||
int rows = mRgba15.rows(); |
||||
int cols = mRgba15.cols(); |
||||
|
||||
int row = (int) Math.floor(y * GRID_SIZE / rows); |
||||
int col = (int) Math.floor(x * GRID_SIZE / cols); |
||||
|
||||
if (row < 0 || row >= GRID_SIZE || col < 0 || col >= GRID_SIZE) { |
||||
Log.e(TAG, "It is not expected to get touch event outside of picture"); |
||||
return ; |
||||
} |
||||
|
||||
int idx = row * GRID_SIZE + col; |
||||
int idxtoswap = -1; |
||||
|
||||
// left
|
||||
if (idxtoswap < 0 && col > 0) |
||||
if (mIndexes[idx - 1] == GRID_EMPTY_INDEX) |
||||
idxtoswap = idx - 1; |
||||
// right
|
||||
if (idxtoswap < 0 && col < GRID_SIZE - 1) |
||||
if (mIndexes[idx + 1] == GRID_EMPTY_INDEX) |
||||
idxtoswap = idx + 1; |
||||
// top
|
||||
if (idxtoswap < 0 && row > 0) |
||||
if (mIndexes[idx - GRID_SIZE] == GRID_EMPTY_INDEX) |
||||
idxtoswap = idx - GRID_SIZE; |
||||
// bottom
|
||||
if (idxtoswap < 0 && row < GRID_SIZE - 1) |
||||
if (mIndexes[idx + GRID_SIZE] == GRID_EMPTY_INDEX) |
||||
idxtoswap = idx + GRID_SIZE; |
||||
|
||||
// swap
|
||||
if (idxtoswap >= 0) { |
||||
synchronized (this) { |
||||
int touched = mIndexes[idx]; |
||||
mIndexes[idx] = mIndexes[idxtoswap]; |
||||
mIndexes[idxtoswap] = touched; |
||||
} |
||||
} |
||||
} |
||||
|
||||
private void drawGrid(int cols, int rows, Mat drawMat) { |
||||
for (int i = 1; i < GRID_SIZE; i++) { |
||||
Core.line(drawMat, new Point(0, i * rows / GRID_SIZE), new Point(cols, i * rows / GRID_SIZE), new Scalar(0, 255, 0, 255), 3); |
||||
Core.line(drawMat, new Point(i * cols / GRID_SIZE, 0), new Point(i * cols / GRID_SIZE, rows), new Scalar(0, 255, 0, 255), 3); |
||||
} |
||||
} |
||||
|
||||
private static void shuffle(int[] array) { |
||||
for (int i = array.length; i > 1; i--) { |
||||
int temp = array[i - 1]; |
||||
int randIx = (int) (Math.random() * i); |
||||
array[i - 1] = array[randIx]; |
||||
array[randIx] = temp; |
||||
} |
||||
} |
||||
|
||||
private boolean isPuzzleSolvable() { |
||||
|
||||
int sum = 0; |
||||
for (int i = 0; i < GRID_AREA; i++) { |
||||
if (mIndexes[i] == GRID_EMPTY_INDEX) |
||||
sum += (i / GRID_SIZE) + 1; |
||||
else { |
||||
int smaller = 0; |
||||
for (int j = i + 1; j < GRID_AREA; j++) { |
||||
if (mIndexes[j] < mIndexes[i]) |
||||
smaller++; |
||||
} |
||||
sum += smaller; |
||||
} |
||||
} |
||||
return sum % 2 == 0; |
||||
} |
||||
} |
@ -1,117 +0,0 @@ |
||||
package org.opencv.samples.puzzle15; |
||||
|
||||
import java.util.List; |
||||
|
||||
import org.opencv.core.Size; |
||||
import org.opencv.highgui.VideoCapture; |
||||
import org.opencv.highgui.Highgui; |
||||
|
||||
import android.content.Context; |
||||
import android.graphics.Bitmap; |
||||
import android.graphics.Canvas; |
||||
import android.util.Log; |
||||
import android.view.SurfaceHolder; |
||||
import android.view.SurfaceView; |
||||
|
||||
public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable { |
||||
private static final String TAG = "OCVSample::BaseView"; |
||||
|
||||
private SurfaceHolder mHolder; |
||||
private VideoCapture mCamera; |
||||
|
||||
public SampleCvViewBase(Context context) { |
||||
super(context); |
||||
mHolder = getHolder(); |
||||
mHolder.addCallback(this); |
||||
Log.i(TAG, "Instantiated new " + this.getClass()); |
||||
} |
||||
|
||||
public synchronized boolean openCamera() { |
||||
Log.i(TAG, "Opening Camera"); |
||||
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID); |
||||
if (!mCamera.isOpened()) { |
||||
releaseCamera(); |
||||
Log.e(TAG, "Can't open native camera"); |
||||
return false; |
||||
} |
||||
return true; |
||||
} |
||||
|
||||
public synchronized void releaseCamera() { |
||||
Log.i(TAG, "Releasing Camera"); |
||||
if (mCamera != null) { |
||||
mCamera.release(); |
||||
mCamera = null; |
||||
} |
||||
} |
||||
|
||||
public synchronized void setupCamera(int width, int height) { |
||||
if (mCamera != null && mCamera.isOpened()) { |
||||
Log.i(TAG, "Setup Camera - " + width + "x" + height); |
||||
List<Size> sizes = mCamera.getSupportedPreviewSizes(); |
||||
int mFrameWidth = width; |
||||
int mFrameHeight = height; |
||||
|
||||
// selecting optimal camera preview size
|
||||
{ |
||||
double minDiff = Double.MAX_VALUE; |
||||
for (Size size : sizes) { |
||||
if (Math.abs(size.height - height) < minDiff) { |
||||
mFrameWidth = (int) size.width; |
||||
mFrameHeight = (int) size.height; |
||||
minDiff = Math.abs(size.height - height); |
||||
} |
||||
} |
||||
} |
||||
|
||||
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth); |
||||
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight); |
||||
} |
||||
} |
||||
|
||||
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { |
||||
Log.i(TAG, "called surfaceChanged"); |
||||
setupCamera(width, height); |
||||
} |
||||
|
||||
public void surfaceCreated(SurfaceHolder holder) { |
||||
Log.i(TAG, "called surfaceCreated"); |
||||
(new Thread(this)).start(); |
||||
} |
||||
|
||||
public void surfaceDestroyed(SurfaceHolder holder) { |
||||
Log.i(TAG, "called surfaceDestroyed"); |
||||
} |
||||
|
||||
protected abstract Bitmap processFrame(VideoCapture capture); |
||||
|
||||
public void run() { |
||||
Log.i(TAG, "Started processing thread"); |
||||
while (true) { |
||||
Bitmap bmp = null; |
||||
|
||||
synchronized (this) { |
||||
if (mCamera == null) |
||||
break; |
||||
|
||||
if (!mCamera.grab()) { |
||||
Log.e(TAG, "mCamera.grab() failed"); |
||||
break; |
||||
} |
||||
|
||||
bmp = processFrame(mCamera); |
||||
} |
||||
|
||||
if (bmp != null) { |
||||
Canvas canvas = mHolder.lockCanvas(); |
||||
if (canvas != null) { |
||||
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR); |
||||
canvas.drawBitmap(bmp, (canvas.getWidth() - bmp.getWidth()) / 2, (canvas.getHeight() - bmp.getHeight()) / 2, null); |
||||
mHolder.unlockCanvasAndPost(canvas); |
||||
} |
||||
bmp.recycle(); |
||||
} |
||||
} |
||||
Log.i(TAG, "Finished processing thread"); |
||||
} |
||||
} |
@ -1,123 +0,0 @@ |
||||
package org.opencv.samples.puzzle15; |
||||
|
||||
import org.opencv.android.BaseLoaderCallback; |
||||
import org.opencv.android.LoaderCallbackInterface; |
||||
import org.opencv.android.OpenCVLoader; |
||||
|
||||
import android.app.Activity; |
||||
import android.app.AlertDialog; |
||||
import android.content.DialogInterface; |
||||
import android.os.Bundle; |
||||
import android.util.Log; |
||||
import android.view.Menu; |
||||
import android.view.MenuItem; |
||||
import android.view.Window; |
||||
import android.view.WindowManager; |
||||
|
||||
/** Activity class implements LoaderCallbackInterface to handle OpenCV initialization status **/ |
||||
public class puzzle15Activity extends Activity { |
||||
private static final String TAG = "OCVSample::Activity"; |
||||
|
||||
private MenuItem mItemNewGame; |
||||
private MenuItem mItemToggleNumbers; |
||||
private puzzle15View mView = null; |
||||
|
||||
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) { |
||||
@Override |
||||
public void onManagerConnected(int status) { |
||||
switch (status) { |
||||
case LoaderCallbackInterface.SUCCESS: |
||||
{ |
||||
Log.i(TAG, "OpenCV loaded successfully"); |
||||
// Create and set View
|
||||
mView = new puzzle15View(mAppContext); |
||||
setContentView(mView); |
||||
|
||||
// Check native OpenCV camera
|
||||
if( !mView.openCamera() ) { |
||||
AlertDialog ad = new AlertDialog.Builder(mAppContext).create(); |
||||
ad.setCancelable(false); // This blocks the 'BACK' button
|
||||
ad.setMessage("Fatal error: can't open camera!"); |
||||
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { |
||||
public void onClick(DialogInterface dialog, int which) { |
||||
dialog.dismiss(); |
||||
finish(); |
||||
} |
||||
}); |
||||
ad.show(); |
||||
} |
||||
} break; |
||||
|
||||
/** OpenCV loader cannot start Google Play **/ |
||||
case LoaderCallbackInterface.MARKET_ERROR: |
||||
{ |
||||
Log.d(TAG, "Google Play service is not accessible!"); |
||||
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create(); |
||||
MarketErrorMessage.setTitle("OpenCV Manager"); |
||||
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command."); |
||||
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
|
||||
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { |
||||
public void onClick(DialogInterface dialog, int which) { |
||||
finish(); |
||||
} |
||||
}); |
||||
MarketErrorMessage.show(); |
||||
} break; |
||||
default: |
||||
{ |
||||
super.onManagerConnected(status); |
||||
} break; |
||||
} |
||||
} |
||||
}; |
||||
|
||||
public puzzle15Activity() { |
||||
Log.i(TAG, "Instantiated new " + this.getClass()); |
||||
} |
||||
|
||||
@Override |
||||
protected void onPause() { |
||||
Log.i(TAG, "called onPause"); |
||||
if (null != mView) |
||||
mView.releaseCamera(); |
||||
super.onPause(); |
||||
} |
||||
|
||||
@Override |
||||
protected void onResume() { |
||||
Log.i(TAG, "called onResume"); |
||||
super.onResume(); |
||||
|
||||
Log.i(TAG, "Trying to load OpenCV library"); |
||||
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack)) { |
||||
Log.e(TAG, "Cannot connect to OpenCV Manager"); |
||||
} |
||||
} |
||||
|
||||
/** Called when the activity is first created. */ |
||||
@Override |
||||
public void onCreate(Bundle savedInstanceState) { |
||||
Log.i(TAG, "called onCreate"); |
||||
super.onCreate(savedInstanceState); |
||||
requestWindowFeature(Window.FEATURE_NO_TITLE); |
||||
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); |
||||
} |
||||
|
||||
@Override |
||||
public boolean onCreateOptionsMenu(Menu menu) { |
||||
Log.i(TAG, "called onCreateOptionsMenu"); |
||||
mItemNewGame = menu.add("Start new game"); |
||||
mItemToggleNumbers = menu.add("Show/hide tile numbers"); |
||||
return true; |
||||
} |
||||
|
||||
@Override |
||||
public boolean onOptionsItemSelected(MenuItem item) { |
||||
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item); |
||||
if (item == mItemNewGame) |
||||
mView.startNewGame(); |
||||
else if (item == mItemToggleNumbers) |
||||
mView.tolggleTileNumbers(); |
||||
return true; |
||||
} |
||||
} |
@ -1,250 +0,0 @@ |
||||
package org.opencv.samples.puzzle15; |
||||
|
||||
import org.opencv.android.Utils; |
||||
import org.opencv.core.Core; |
||||
import org.opencv.core.Mat; |
||||
import org.opencv.core.Size; |
||||
import org.opencv.core.Point; |
||||
import org.opencv.core.Scalar; |
||||
import org.opencv.highgui.Highgui; |
||||
import org.opencv.highgui.VideoCapture; |
||||
|
||||
import android.content.Context; |
||||
import android.graphics.Bitmap; |
||||
import android.util.Log; |
||||
import android.view.MotionEvent; |
||||
import android.view.SurfaceHolder; |
||||
import android.view.View; |
||||
import android.view.View.OnTouchListener; |
||||
|
||||
public class puzzle15View extends SampleCvViewBase implements OnTouchListener { |
||||
private static final String TAG = "OCVSample::View"; |
||||
|
||||
private Mat mRgba; |
||||
private Mat mRgba15; |
||||
private Mat[] mCells; |
||||
private Mat[] mCells15; |
||||
private int[] mIndexses; |
||||
private int[] mTextWidths; |
||||
private int[] mTextHeights; |
||||
private boolean mShowTileNumbers = true; |
||||
|
||||
int gridSize = 4; |
||||
int gridArea = gridSize * gridSize; |
||||
int gridEmptyIdx = gridArea - 1; |
||||
|
||||
public puzzle15View(Context context) { |
||||
super(context); |
||||
setOnTouchListener(this); |
||||
|
||||
mTextWidths = new int[gridArea]; |
||||
mTextHeights = new int[gridArea]; |
||||
for (int i = 0; i < gridArea; i++) { |
||||
Size s = Core.getTextSize(Integer.toString(i + 1), 3/* CV_FONT_HERSHEY_COMPLEX */, 1, 2, null); |
||||
mTextHeights[i] = (int) s.height; |
||||
mTextWidths[i] = (int) s.width; |
||||
} |
||||
Log.i(TAG, "Instantiated new " + this.getClass()); |
||||
} |
||||
|
||||
@Override |
||||
public void surfaceCreated(SurfaceHolder holder) { |
||||
Log.i(TAG, "called surfaceCreated"); |
||||
synchronized (this) { |
||||
// initialize Mat before usage
|
||||
mRgba = new Mat(); |
||||
} |
||||
|
||||
super.surfaceCreated(holder); |
||||
} |
||||
|
||||
public static void shuffle(int[] array) { |
||||
for (int i = array.length; i > 1; i--) { |
||||
int temp = array[i - 1]; |
||||
int randIx = (int) (Math.random() * i); |
||||
array[i - 1] = array[randIx]; |
||||
array[randIx] = temp; |
||||
} |
||||
} |
||||
|
||||
public boolean isPuzzleSolvable() { |
||||
if (gridSize != 4) |
||||
return true; |
||||
|
||||
int sum = 0; |
||||
for (int i = 0; i < gridArea; i++) { |
||||
if (mIndexses[i] == gridEmptyIdx) |
||||
sum += (i / gridSize) + 1; |
||||
else { |
||||
int smaller = 0; |
||||
for (int j = i + 1; j < gridArea; j++) { |
||||
if (mIndexses[j] < mIndexses[i]) |
||||
smaller++; |
||||
} |
||||
sum += smaller; |
||||
} |
||||
} |
||||
|
||||
return sum % 2 == 0; |
||||
} |
||||
|
||||
private void createPuzzle(int cols, int rows, int type) { |
||||
mCells = new Mat[gridArea]; |
||||
mCells15 = new Mat[gridArea]; |
||||
|
||||
mRgba15 = new Mat(rows, cols, type); |
||||
mIndexses = new int[gridArea]; |
||||
|
||||
for (int i = 0; i < gridSize; i++) { |
||||
for (int j = 0; j < gridSize; j++) { |
||||
int k = i * gridSize + j; |
||||
mIndexses[k] = k; |
||||
mCells[k] = mRgba.submat(i * rows / gridSize, (i + 1) * rows / gridSize, j * cols / gridSize, (j + 1) * cols / gridSize); |
||||
mCells15[k] = mRgba15.submat(i * rows / gridSize, (i + 1) * rows / gridSize, j * cols / gridSize, (j + 1) * cols / gridSize); |
||||
} |
||||
} |
||||
|
||||
startNewGame(); |
||||
} |
||||
|
||||
private void drawGrid(int cols, int rows) { |
||||
for (int i = 1; i < gridSize; i++) { |
||||
Core.line(mRgba15, new Point(0, i * rows / gridSize), new Point(cols, i * rows / gridSize), new Scalar(0, 255, 0, 255), 3); |
||||
Core.line(mRgba15, new Point(i * cols / gridSize, 0), new Point(i * cols / gridSize, rows), new Scalar(0, 255, 0, 255), 3); |
||||
} |
||||
} |
||||
|
||||
public synchronized void startNewGame() { |
||||
do { |
||||
shuffle(mIndexses); |
||||
} while (!isPuzzleSolvable()); |
||||
} |
||||
|
||||
public void tolggleTileNumbers() { |
||||
mShowTileNumbers = !mShowTileNumbers; |
||||
} |
||||
|
||||
@Override |
||||
protected Bitmap processFrame(VideoCapture capture) { |
||||
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); |
||||
int cols = mRgba.cols(); |
||||
int rows = mRgba.rows(); |
||||
|
||||
rows = rows - rows%4; |
||||
cols = cols - cols%4; |
||||
|
||||
if (mCells == null) |
||||
createPuzzle(cols, rows, mRgba.type()); |
||||
else if(mRgba15.cols() != cols || mRgba15.rows() != rows) { |
||||
releaseMats(); |
||||
createPuzzle(cols, rows, mRgba.type()); |
||||
} |
||||
|
||||
// copy shuffled tiles
|
||||
for (int i = 0; i < gridArea; i++) { |
||||
int idx = mIndexses[i]; |
||||
if (idx == gridEmptyIdx) |
||||
mCells15[i].setTo(new Scalar(0x33, 0x33, 0x33, 0xFF)); |
||||
else { |
||||
mCells[idx].copyTo(mCells15[i]); |
||||
if (mShowTileNumbers) { |
||||
Core.putText(mCells15[i], Integer.toString(1 + idx), new Point((cols / gridSize - mTextWidths[idx]) / 2, |
||||
(rows / gridSize + mTextHeights[idx]) / 2), 3/* CV_FONT_HERSHEY_COMPLEX */, 1, new Scalar(255, 0, 0, 255), 2); |
||||
} |
||||
} |
||||
} |
||||
|
||||
drawGrid(cols, rows); |
||||
Bitmap bmp = Bitmap.createBitmap(cols, rows, Bitmap.Config.ARGB_8888); |
||||
try { |
||||
Utils.matToBitmap(mRgba15, bmp); |
||||
return bmp; |
||||
} catch(Exception e) { |
||||
Log.e("org.opencv.samples.puzzle15", "Utils.matToBitmap() throws an exception: " + e.getMessage()); |
||||
bmp.recycle(); |
||||
return null; |
||||
} |
||||
} |
||||
|
||||
@Override |
||||
public void run() { |
||||
super.run(); |
||||
|
||||
synchronized (this) { |
||||
releaseMats(); |
||||
|
||||
if (mRgba != null) |
||||
mRgba.release(); |
||||
mRgba = null; |
||||
} |
||||
} |
||||
|
||||
private void releaseMats() { |
||||
// Explicitly deallocate Mats
|
||||
if (mCells != null) { |
||||
for (Mat m : mCells) |
||||
m.release(); |
||||
} |
||||
if (mCells15 != null) { |
||||
for (Mat m : mCells15) |
||||
m.release(); |
||||
} |
||||
|
||||
if (mRgba15 != null) |
||||
mRgba15.release(); |
||||
|
||||
mRgba15 = null; |
||||
mCells = null; |
||||
mCells15 = null; |
||||
mIndexses = null; |
||||
} |
||||
|
||||
public boolean onTouch(View v, MotionEvent event) { |
||||
if(mRgba==null) return false; |
||||
|
||||
int cols = mRgba.cols(); |
||||
int rows = mRgba.rows(); |
||||
float xoffset = (getWidth() - cols) / 2; |
||||
float yoffset = (getHeight() - rows) / 2; |
||||
|
||||
float x = event.getX() - xoffset; |
||||
float y = event.getY() - yoffset; |
||||
|
||||
int row = (int) Math.floor(y * gridSize / rows); |
||||
int col = (int) Math.floor(x * gridSize / cols); |
||||
|
||||
if (row < 0 || row >= gridSize || col < 0 || col >= gridSize) |
||||
return false; |
||||
|
||||
int idx = row * gridSize + col; |
||||
int idxtoswap = -1; |
||||
|
||||
// left
|
||||
if (idxtoswap < 0 && col > 0) |
||||
if (mIndexses[idx - 1] == gridEmptyIdx) |
||||
idxtoswap = idx - 1; |
||||
// right
|
||||
if (idxtoswap < 0 && col < gridSize - 1) |
||||
if (mIndexses[idx + 1] == gridEmptyIdx) |
||||
idxtoswap = idx + 1; |
||||
// top
|
||||
if (idxtoswap < 0 && row > 0) |
||||
if (mIndexses[idx - gridSize] == gridEmptyIdx) |
||||
idxtoswap = idx - gridSize; |
||||
// bottom
|
||||
if (idxtoswap < 0 && row < gridSize - 1) |
||||
if (mIndexses[idx + gridSize] == gridEmptyIdx) |
||||
idxtoswap = idx + gridSize; |
||||
|
||||
// swap
|
||||
if (idxtoswap >= 0) { |
||||
synchronized (this) { |
||||
int touched = mIndexses[idx]; |
||||
mIndexses[idx] = mIndexses[idxtoswap]; |
||||
mIndexses[idxtoswap] = touched; |
||||
} |
||||
} |
||||
|
||||
return false;// don't need subsequent touch events
|
||||
} |
||||
} |
@ -1,8 +1,8 @@ |
||||
<?xml version="1.0" encoding="UTF-8"?> |
||||
<classpath> |
||||
<classpathentry kind="src" path="src"/> |
||||
<classpathentry kind="src" path="gen"/> |
||||
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/> |
||||
<classpathentry kind="con" path="com.android.ide.eclipse.adt.LIBRARIES"/> |
||||
<classpathentry kind="src" path="src"/> |
||||
<classpathentry kind="src" path="gen"/> |
||||
<classpathentry kind="output" path="bin/classes"/> |
||||
</classpath> |
||||
|
@ -0,0 +1,11 @@ |
||||
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android" |
||||
xmlns:tools="http://schemas.android.com/tools" |
||||
android:layout_width="match_parent" |
||||
android:layout_height="match_parent" > |
||||
|
||||
<org.opencv.android.JavaCameraView |
||||
android:layout_width="fill_parent" |
||||
android:layout_height="fill_parent" |
||||
android:id="@+id/color_blob_detection_activity_surface_view" /> |
||||
|
||||
</LinearLayout> |
@ -1,151 +0,0 @@ |
||||
package org.opencv.samples.colorblobdetect; |
||||
|
||||
import java.util.List; |
||||
import org.opencv.android.Utils; |
||||
import org.opencv.core.Core; |
||||
import org.opencv.core.CvType; |
||||
import org.opencv.core.Mat; |
||||
import org.opencv.core.MatOfPoint; |
||||
import org.opencv.core.Rect; |
||||
import org.opencv.core.Scalar; |
||||
import org.opencv.core.Size; |
||||
import org.opencv.highgui.Highgui; |
||||
import org.opencv.highgui.VideoCapture; |
||||
import org.opencv.imgproc.Imgproc; |
||||
|
||||
import android.content.Context; |
||||
import android.graphics.Bitmap; |
||||
import android.util.Log; |
||||
import android.view.MotionEvent; |
||||
import android.view.SurfaceHolder; |
||||
import android.view.View; |
||||
import android.view.View.OnTouchListener; |
||||
|
||||
public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchListener { |
||||
private static final String TAG = "OCVSample::View"; |
||||
|
||||
private Mat mRgba; |
||||
private boolean mIsColorSelected = false; |
||||
private Scalar mBlobColorRgba = new Scalar(255); |
||||
private Scalar mBlobColorHsv = new Scalar(255); |
||||
private ColorBlobDetector mDetector = new ColorBlobDetector(); |
||||
private Mat mSpectrum = new Mat(); |
||||
private static Size SPECTRUM_SIZE = new Size(200, 32); |
||||
private static final Scalar CONTOUR_COLOR = new Scalar(255,0,0,255); |
||||
|
||||
|
||||
public ColorBlobDetectionView(Context context) { |
||||
super(context); |
||||
setOnTouchListener(this); |
||||
Log.i(TAG, "Instantiated new " + this.getClass()); |
||||
} |
||||
|
||||
@Override |
||||
public void surfaceCreated(SurfaceHolder holder) { |
||||
Log.i(TAG, "called surfaceCreated"); |
||||
synchronized (this) { |
||||
// initialize Mat before usage
|
||||
mRgba = new Mat(); |
||||
} |
||||
|
||||
super.surfaceCreated(holder); |
||||
} |
||||
|
||||
private Scalar converScalarHsv2Rgba(Scalar hsvColor) { |
||||
Mat pointMatRgba = new Mat(); |
||||
Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor); |
||||
Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4); |
||||
|
||||
return new Scalar(pointMatRgba.get(0, 0)); |
||||
} |
||||
|
||||
public boolean onTouch(View v, MotionEvent event) { |
||||
int cols = mRgba.cols(); |
||||
int rows = mRgba.rows(); |
||||
|
||||
int xOffset = (getWidth() - cols) / 2; |
||||
int yOffset = (getHeight() - rows) / 2; |
||||
|
||||
int x = (int)event.getX() - xOffset; |
||||
int y = (int)event.getY() - yOffset; |
||||
|
||||
Log.i(TAG, "Touch image coordinates: (" + x + ", " + y + ")"); |
||||
|
||||
if ((x < 0) || (y < 0) || (x > cols) || (y > rows)) return false; |
||||
|
||||
Rect touchedRect = new Rect(); |
||||
|
||||
touchedRect.x = (x>4) ? x-4 : 0; |
||||
touchedRect.y = (y>4) ? y-4 : 0; |
||||
|
||||
touchedRect.width = (x+4 < cols) ? x + 4 - touchedRect.x : cols - touchedRect.x; |
||||
touchedRect.height = (y+4 < rows) ? y + 4 - touchedRect.y : rows - touchedRect.y; |
||||
|
||||
Mat touchedRegionRgba = mRgba.submat(touchedRect); |
||||
|
||||
Mat touchedRegionHsv = new Mat(); |
||||
Imgproc.cvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL); |
||||
|
||||
// Calculate average color of touched region
|
||||
mBlobColorHsv = Core.sumElems(touchedRegionHsv); |
||||
int pointCount = touchedRect.width*touchedRect.height; |
||||
for (int i = 0; i < mBlobColorHsv.val.length; i++) |
||||
mBlobColorHsv.val[i] /= pointCount; |
||||
|
||||
mBlobColorRgba = converScalarHsv2Rgba(mBlobColorHsv); |
||||
|
||||
Log.i(TAG, "Touched rgba color: (" + mBlobColorRgba.val[0] + ", " + mBlobColorRgba.val[1] + |
||||
", " + mBlobColorRgba.val[2] + ", " + mBlobColorRgba.val[3] + ")"); |
||||
|
||||
mDetector.setHsvColor(mBlobColorHsv); |
||||
|
||||
Imgproc.resize(mDetector.getSpectrum(), mSpectrum, SPECTRUM_SIZE); |
||||
|
||||
mIsColorSelected = true; |
||||
|
||||
return false; // don't need subsequent touch events
|
||||
} |
||||
|
||||
@Override |
||||
protected Bitmap processFrame(VideoCapture capture) { |
||||
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); |
||||
|
||||
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888); |
||||
|
||||
if (mIsColorSelected) { |
||||
mDetector.process(mRgba); |
||||
List<MatOfPoint> contours = mDetector.getContours(); |
||||
Log.e(TAG, "Contours count: " + contours.size()); |
||||
Imgproc.drawContours(mRgba, contours, -1, CONTOUR_COLOR); |
||||
|
||||
Mat colorLabel = mRgba.submat(2, 34, 2, 34); |
||||
colorLabel.setTo(mBlobColorRgba); |
||||
|
||||
Mat spectrumLabel = mRgba.submat(2, 2 + mSpectrum.rows(), 38, 38 + mSpectrum.cols()); |
||||
mSpectrum.copyTo(spectrumLabel); |
||||
} |
||||
|
||||
try { |
||||
Utils.matToBitmap(mRgba, bmp); |
||||
} catch(Exception e) { |
||||
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage()); |
||||
bmp.recycle(); |
||||
bmp = null; |
||||
} |
||||
|
||||
return bmp; |
||||
} |
||||
|
||||
@Override |
||||
public void run() { |
||||
super.run(); |
||||
|
||||
synchronized (this) { |
||||
// Explicitly deallocate Mats
|
||||
if (mRgba != null) |
||||
mRgba.release(); |
||||
|
||||
mRgba = null; |
||||
} |
||||
} |
||||
} |
@ -1,117 +0,0 @@ |
||||
package org.opencv.samples.colorblobdetect; |
||||
|
||||
import java.util.List; |
||||
|
||||
import org.opencv.core.Size; |
||||
import org.opencv.highgui.VideoCapture; |
||||
import org.opencv.highgui.Highgui; |
||||
|
||||
import android.content.Context; |
||||
import android.graphics.Bitmap; |
||||
import android.graphics.Canvas; |
||||
import android.util.Log; |
||||
import android.view.SurfaceHolder; |
||||
import android.view.SurfaceView; |
||||
|
||||
public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable { |
||||
private static final String TAG = "OCVSample::BaseView"; |
||||
|
||||
private SurfaceHolder mHolder; |
||||
private VideoCapture mCamera; |
||||
|
||||
public SampleCvViewBase(Context context) { |
||||
super(context); |
||||
mHolder = getHolder(); |
||||
mHolder.addCallback(this); |
||||
Log.i(TAG, "Instantiated new " + this.getClass()); |
||||
} |
||||
|
||||
public synchronized boolean openCamera() { |
||||
Log.i(TAG, "Opening Camera"); |
||||
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID); |
||||
if (!mCamera.isOpened()) { |
||||
releaseCamera(); |
||||
Log.e(TAG, "Can't open native camera"); |
||||
return false; |
||||
} |
||||
return true; |
||||
} |
||||
|
||||
public synchronized void releaseCamera() { |
||||
Log.i(TAG, "Releasing Camera"); |
||||
if (mCamera != null) { |
||||
mCamera.release(); |
||||
mCamera = null; |
||||
} |
||||
} |
||||
|
||||
public synchronized void setupCamera(int width, int height) { |
||||
if (mCamera != null && mCamera.isOpened()) { |
||||
Log.i(TAG, "Setup Camera - " + width + "x" + height); |
||||
List<Size> sizes = mCamera.getSupportedPreviewSizes(); |
||||
int mFrameWidth = width; |
||||
int mFrameHeight = height; |
||||
|
||||
// selecting optimal camera preview size
|
||||
{ |
||||
double minDiff = Double.MAX_VALUE; |
||||
for (Size size : sizes) { |
||||
if (Math.abs(size.height - height) < minDiff) { |
||||
mFrameWidth = (int) size.width; |
||||
mFrameHeight = (int) size.height; |
||||
minDiff = Math.abs(size.height - height); |
||||
} |
||||
} |
||||
} |
||||
|
||||
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth); |
||||
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight); |
||||
} |
||||
} |
||||
|
||||
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { |
||||
Log.i(TAG, "called surfaceChanged"); |
||||
setupCamera(width, height); |
||||
} |
||||
|
||||
public void surfaceCreated(SurfaceHolder holder) { |
||||
Log.i(TAG, "called surfaceCreated"); |
||||
(new Thread(this)).start(); |
||||
} |
||||
|
||||
public void surfaceDestroyed(SurfaceHolder holder) { |
||||
Log.i(TAG, "called surfaceDestroyed"); |
||||
} |
||||
|
||||
protected abstract Bitmap processFrame(VideoCapture capture); |
||||
|
||||
public void run() { |
||||
Log.i(TAG, "Started processing thread"); |
||||
while (true) { |
||||
Bitmap bmp = null; |
||||
|
||||
synchronized (this) { |
||||
if (mCamera == null) |
||||
break; |
||||
|
||||
if (!mCamera.grab()) { |
||||
Log.e(TAG, "mCamera.grab() failed"); |
||||
break; |
||||
} |
||||
|
||||
bmp = processFrame(mCamera); |
||||
} |
||||
|
||||
if (bmp != null) { |
||||
Canvas canvas = mHolder.lockCanvas(); |
||||
if (canvas != null) { |
||||
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR); |
||||
canvas.drawBitmap(bmp, (canvas.getWidth() - bmp.getWidth()) / 2, (canvas.getHeight() - bmp.getHeight()) / 2, null); |
||||
mHolder.unlockCanvasAndPost(canvas); |
||||
} |
||||
bmp.recycle(); |
||||
} |
||||
} |
||||
Log.i(TAG, "Finished processing thread"); |
||||
} |
||||
} |
@ -1,8 +1,8 @@ |
||||
<?xml version="1.0" encoding="UTF-8"?> |
||||
<classpath> |
||||
<classpathentry kind="src" path="src"/> |
||||
<classpathentry kind="src" path="gen"/> |
||||
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/> |
||||
<classpathentry kind="con" path="com.android.ide.eclipse.adt.LIBRARIES"/> |
||||
<classpathentry kind="src" path="src"/> |
||||
<classpathentry kind="src" path="gen"/> |
||||
<classpathentry kind="output" path="bin/classes"/> |
||||
</classpath> |
||||
|
@ -0,0 +1,11 @@ |
||||
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android" |
||||
xmlns:tools="http://schemas.android.com/tools" |
||||
android:layout_width="match_parent" |
||||
android:layout_height="match_parent" > |
||||
|
||||
<org.opencv.android.JavaCameraView |
||||
android:layout_width="fill_parent" |
||||
android:layout_height="fill_parent" |
||||
android:id="@+id/fd_activity_surface_view" /> |
||||
|
||||
</LinearLayout> |
@ -1,175 +0,0 @@ |
||||
package org.opencv.samples.fd; |
||||
|
||||
import java.io.File; |
||||
import java.io.FileOutputStream; |
||||
import java.io.IOException; |
||||
import java.io.InputStream; |
||||
|
||||
import org.opencv.android.Utils; |
||||
import org.opencv.core.Core; |
||||
import org.opencv.core.Mat; |
||||
import org.opencv.core.MatOfRect; |
||||
import org.opencv.core.Rect; |
||||
import org.opencv.core.Scalar; |
||||
import org.opencv.core.Size; |
||||
import org.opencv.highgui.Highgui; |
||||
import org.opencv.highgui.VideoCapture; |
||||
import org.opencv.objdetect.CascadeClassifier; |
||||
|
||||
import android.content.Context; |
||||
import android.graphics.Bitmap; |
||||
import android.util.Log; |
||||
import android.view.SurfaceHolder; |
||||
|
||||
class FdView extends SampleCvViewBase { |
||||
private static final String TAG = "OCVSample::View"; |
||||
private Mat mRgba; |
||||
private Mat mGray; |
||||
private File mCascadeFile; |
||||
private CascadeClassifier mJavaDetector; |
||||
private DetectionBasedTracker mNativeDetector; |
||||
|
||||
private static final Scalar FACE_RECT_COLOR = new Scalar(0, 255, 0, 255); |
||||
|
||||
public static final int JAVA_DETECTOR = 0; |
||||
public static final int NATIVE_DETECTOR = 1; |
||||
|
||||
private int mDetectorType = JAVA_DETECTOR; |
||||
|
||||
private float mRelativeFaceSize = 0; |
||||
private int mAbsoluteFaceSize = 0; |
||||
|
||||
public void setMinFaceSize(float faceSize) { |
||||
mRelativeFaceSize = faceSize; |
||||
mAbsoluteFaceSize = 0; |
||||
} |
||||
|
||||
public void setDetectorType(int type) { |
||||
if (mDetectorType != type) { |
||||
mDetectorType = type; |
||||
|
||||
if (type == NATIVE_DETECTOR) { |
||||
Log.i(TAG, "Detection Based Tracker enabled"); |
||||
mNativeDetector.start(); |
||||
} else { |
||||
Log.i(TAG, "Cascade detector enabled"); |
||||
mNativeDetector.stop(); |
||||
} |
||||
} |
||||
} |
||||
|
||||
public FdView(Context context) { |
||||
super(context); |
||||
|
||||
try { |
||||
// load cascade file from application resources
|
||||
InputStream is = context.getResources().openRawResource(R.raw.lbpcascade_frontalface); |
||||
File cascadeDir = context.getDir("cascade", Context.MODE_PRIVATE); |
||||
mCascadeFile = new File(cascadeDir, "lbpcascade_frontalface.xml"); |
||||
FileOutputStream os = new FileOutputStream(mCascadeFile); |
||||
|
||||
byte[] buffer = new byte[4096]; |
||||
int bytesRead; |
||||
while ((bytesRead = is.read(buffer)) != -1) { |
||||
os.write(buffer, 0, bytesRead); |
||||
} |
||||
is.close(); |
||||
os.close(); |
||||
|
||||
mJavaDetector = new CascadeClassifier(mCascadeFile.getAbsolutePath()); |
||||
if (mJavaDetector.empty()) { |
||||
Log.e(TAG, "Failed to load cascade classifier"); |
||||
mJavaDetector = null; |
||||
} else |
||||
Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath()); |
||||
|
||||
mNativeDetector = new DetectionBasedTracker(mCascadeFile.getAbsolutePath(), 0); |
||||
|
||||
cascadeDir.delete(); |
||||
|
||||
} catch (IOException e) { |
||||
e.printStackTrace(); |
||||
Log.e(TAG, "Failed to load cascade. Exception thrown: " + e); |
||||
} |
||||
|
||||
Log.i(TAG, "Instantiated new " + this.getClass()); |
||||
} |
||||
|
||||
@Override |
||||
public void surfaceCreated(SurfaceHolder holder) { |
||||
Log.i(TAG, "called surfaceCreated"); |
||||
synchronized (this) { |
||||
// initialize Mats before usage
|
||||
mGray = new Mat(); |
||||
mRgba = new Mat(); |
||||
} |
||||
|
||||
super.surfaceCreated(holder); |
||||
} |
||||
|
||||
@Override |
||||
protected Bitmap processFrame(VideoCapture capture) { |
||||
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); |
||||
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME); |
||||
|
||||
if (mAbsoluteFaceSize == 0) { |
||||
int height = mGray.rows(); |
||||
if (Math.round(height * mRelativeFaceSize) > 0) { |
||||
mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize); |
||||
} |
||||
mNativeDetector.setMinFaceSize(mAbsoluteFaceSize); |
||||
} |
||||
|
||||
MatOfRect faces = new MatOfRect(); |
||||
|
||||
if (mDetectorType == JAVA_DETECTOR) { |
||||
if (mJavaDetector != null) |
||||
mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
|
||||
new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size()); |
||||
} |
||||
else if (mDetectorType == NATIVE_DETECTOR) { |
||||
if (mNativeDetector != null) |
||||
mNativeDetector.detect(mGray, faces); |
||||
} |
||||
else { |
||||
Log.e(TAG, "Detection method is not selected!"); |
||||
} |
||||
|
||||
Rect[] facesArray = faces.toArray(); |
||||
for (int i = 0; i < facesArray.length; i++) |
||||
Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3); |
||||
|
||||
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888); |
||||
|
||||
try { |
||||
Utils.matToBitmap(mRgba, bmp); |
||||
} catch(Exception e) { |
||||
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage()); |
||||
bmp.recycle(); |
||||
bmp = null; |
||||
} |
||||
|
||||
return bmp; |
||||
} |
||||
|
||||
@Override |
||||
public void run() { |
||||
super.run(); |
||||
|
||||
synchronized (this) { |
||||
// Explicitly deallocate Mats
|
||||
if (mRgba != null) |
||||
mRgba.release(); |
||||
if (mGray != null) |
||||
mGray.release(); |
||||
if (mCascadeFile != null) |
||||
mCascadeFile.delete(); |
||||
if (mNativeDetector != null) |
||||
mNativeDetector.release(); |
||||
|
||||
mRgba = null; |
||||
mGray = null; |
||||
mCascadeFile = null; |
||||
} |
||||
} |
||||
} |
@ -1,123 +0,0 @@ |
||||
package org.opencv.samples.fd; |
||||
|
||||
import java.util.List; |
||||
|
||||
import org.opencv.core.Size; |
||||
import org.opencv.highgui.VideoCapture; |
||||
import org.opencv.highgui.Highgui; |
||||
|
||||
import android.content.Context; |
||||
import android.graphics.Bitmap; |
||||
import android.graphics.Canvas; |
||||
import android.util.Log; |
||||
import android.view.SurfaceHolder; |
||||
import android.view.SurfaceView; |
||||
|
||||
public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable { |
||||
private static final String TAG = "OCVSample::BaseView"; |
||||
|
||||
private SurfaceHolder mHolder; |
||||
private VideoCapture mCamera; |
||||
private FpsMeter mFps; |
||||
|
||||
public SampleCvViewBase(Context context) { |
||||
super(context); |
||||
mHolder = getHolder(); |
||||
mHolder.addCallback(this); |
||||
mFps = new FpsMeter(); |
||||
Log.i(TAG, "Instantiated new " + this.getClass()); |
||||
} |
||||
|
||||
public synchronized boolean openCamera() { |
||||
Log.i(TAG, "Opening Camera"); |
||||
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID); |
||||
if (!mCamera.isOpened()) { |
||||
releaseCamera(); |
||||
Log.e(TAG, "Can't open native camera"); |
||||
return false; |
||||
} |
||||
return true; |
||||
} |
||||
|
||||
public synchronized void releaseCamera() { |
||||
Log.i(TAG, "Releasing Camera"); |
||||
if (mCamera != null) { |
||||
mCamera.release(); |
||||
mCamera = null; |
||||
} |
||||
} |
||||
|
||||
public synchronized void setupCamera(int width, int height) { |
||||
if (mCamera != null && mCamera.isOpened()) { |
||||
Log.i(TAG, "Setup Camera - " + width + "x" + height); |
||||
List<Size> sizes = mCamera.getSupportedPreviewSizes(); |
||||
int mFrameWidth = width; |
||||
int mFrameHeight = height; |
||||
|
||||
// selecting optimal camera preview size
|
||||
{ |
||||
double minDiff = Double.MAX_VALUE; |
||||
for (Size size : sizes) { |
||||
if (Math.abs(size.height - height) < minDiff) { |
||||
mFrameWidth = (int) size.width; |
||||
mFrameHeight = (int) size.height; |
||||
minDiff = Math.abs(size.height - height); |
||||
} |
||||
} |
||||
} |
||||
|
||||
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth); |
||||
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight); |
||||
} |
||||
} |
||||
|
||||
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { |
||||
Log.i(TAG, "called surfaceChanged"); |
||||
setupCamera(width, height); |
||||
} |
||||
|
||||
public void surfaceCreated(SurfaceHolder holder) { |
||||
Log.i(TAG, "called surfaceCreated"); |
||||
(new Thread(this)).start(); |
||||
} |
||||
|
||||
public void surfaceDestroyed(SurfaceHolder holder) { |
||||
Log.i(TAG, "called surfaceDestroyed"); |
||||
} |
||||
protected abstract Bitmap processFrame(VideoCapture capture); |
||||
|
||||
public void run() { |
||||
Log.i(TAG, "Started processing thread"); |
||||
mFps.init(); |
||||
|
||||
while (true) { |
||||
Bitmap bmp = null; |
||||
|
||||
synchronized (this) { |
||||
if (mCamera == null) |
||||
break; |
||||
|
||||
if (!mCamera.grab()) { |
||||
Log.e(TAG, "mCamera.grab() failed"); |
||||
break; |
||||
} |
||||
|
||||
bmp = processFrame(mCamera); |
||||
|
||||
mFps.measure(); |
||||
} |
||||
|
||||
if (bmp != null) { |
||||
Canvas canvas = mHolder.lockCanvas(); |
||||
if (canvas != null) { |
||||
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR); |
||||
canvas.drawBitmap(bmp, (canvas.getWidth() - bmp.getWidth()) / 2, (canvas.getHeight() - bmp.getHeight()) / 2, null); |
||||
mFps.draw(canvas, (canvas.getWidth() - bmp.getWidth()) / 2, 0); |
||||
mHolder.unlockCanvasAndPost(canvas); |
||||
} |
||||
bmp.recycle(); |
||||
} |
||||
} |
||||
Log.i(TAG, "Finished processing thread"); |
||||
} |
||||
} |
@ -1,8 +1,8 @@ |
||||
<?xml version="1.0" encoding="UTF-8"?> |
||||
<classpath> |
||||
<classpathentry kind="src" path="src"/> |
||||
<classpathentry kind="src" path="gen"/> |
||||
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/> |
||||
<classpathentry kind="con" path="com.android.ide.eclipse.adt.LIBRARIES"/> |
||||
<classpathentry kind="src" path="src"/> |
||||
<classpathentry kind="src" path="gen"/> |
||||
<classpathentry kind="output" path="bin/classes"/> |
||||
</classpath> |
||||
|
@ -0,0 +1,11 @@ |
||||
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android" |
||||
xmlns:tools="http://schemas.android.com/tools" |
||||
android:layout_width="match_parent" |
||||
android:layout_height="match_parent" > |
||||
|
||||
<org.opencv.android.JavaCameraView |
||||
android:layout_width="fill_parent" |
||||
android:layout_height="fill_parent" |
||||
android:id="@+id/image_manipulations_activity_surface_view" /> |
||||
|
||||
</LinearLayout> |
@ -1,283 +0,0 @@ |
||||
package org.opencv.samples.imagemanipulations; |
||||
|
||||
import java.util.Arrays; |
||||
|
||||
import org.opencv.android.Utils; |
||||
import org.opencv.core.Core; |
||||
import org.opencv.core.Mat; |
||||
import org.opencv.core.MatOfFloat; |
||||
import org.opencv.core.MatOfInt; |
||||
import org.opencv.core.Size; |
||||
import org.opencv.core.Point; |
||||
import org.opencv.core.Scalar; |
||||
import org.opencv.core.CvType; |
||||
import org.opencv.imgproc.Imgproc; |
||||
import org.opencv.highgui.Highgui; |
||||
import org.opencv.highgui.VideoCapture; |
||||
|
||||
import android.content.Context; |
||||
import android.graphics.Bitmap; |
||||
import android.util.Log; |
||||
import android.view.SurfaceHolder; |
||||
|
||||
class ImageManipulationsView extends SampleCvViewBase { |
||||
private static final String TAG = "OCVSample::View"; |
||||
private Size mSize0; |
||||
private Size mSizeRgba; |
||||
private Size mSizeRgbaInner; |
||||
|
||||
private Mat mRgba; |
||||
private Mat mGray; |
||||
private Mat mIntermediateMat; |
||||
private Mat mHist; |
||||
private Mat mMat0; |
||||
private MatOfInt mChannels[]; |
||||
private MatOfInt mHistSize; |
||||
private int mHistSizeNum; |
||||
private MatOfFloat mRanges; |
||||
private Scalar mColorsRGB[]; |
||||
private Scalar mColorsHue[]; |
||||
private Scalar mWhilte; |
||||
private Point mP1; |
||||
private Point mP2; |
||||
private float mBuff[]; |
||||
private Mat mRgbaInnerWindow; |
||||
private Mat mGrayInnerWindow; |
||||
private Mat mBlurWindow; |
||||
private Mat mZoomWindow; |
||||
private Mat mZoomCorner; |
||||
private Mat mSepiaKernel; |
||||
|
||||
public ImageManipulationsView(Context context) { |
||||
super(context); |
||||
|
||||
mSepiaKernel = new Mat(4, 4, CvType.CV_32F); |
||||
mSepiaKernel.put(0, 0, /* R */0.189f, 0.769f, 0.393f, 0f); |
||||
mSepiaKernel.put(1, 0, /* G */0.168f, 0.686f, 0.349f, 0f); |
||||
mSepiaKernel.put(2, 0, /* B */0.131f, 0.534f, 0.272f, 0f); |
||||
mSepiaKernel.put(3, 0, /* A */0.000f, 0.000f, 0.000f, 1f); |
||||
|
||||
Log.i(TAG, "Instantiated new " + this.getClass()); |
||||
} |
||||
|
||||
@Override |
||||
public void surfaceCreated(SurfaceHolder holder) { |
||||
Log.i(TAG, "called surfaceCreated"); |
||||
synchronized (this) { |
||||
// initialize Mats before usage
|
||||
mGray = new Mat(); |
||||
mRgba = new Mat(); |
||||
mIntermediateMat = new Mat(); |
||||
mSize0 = new Size(); |
||||
mHist = new Mat(); |
||||
mChannels = new MatOfInt[] { new MatOfInt(0), new MatOfInt(1), new MatOfInt(2) }; |
||||
mHistSizeNum = 25; |
||||
mBuff = new float[mHistSizeNum]; |
||||
mHistSize = new MatOfInt(mHistSizeNum); |
||||
mRanges = new MatOfFloat(0f, 256f); |
||||
mMat0 = new Mat(); |
||||
mColorsRGB = new Scalar[] { new Scalar(200, 0, 0, 255), new Scalar(0, 200, 0, 255), new Scalar(0, 0, 200, 255) }; |
||||
mColorsHue = new Scalar[] { |
||||
new Scalar(255, 0, 0, 255), new Scalar(255, 60, 0, 255), new Scalar(255, 120, 0, 255), new Scalar(255, 180, 0, 255), new Scalar(255, 240, 0, 255), |
||||
new Scalar(215, 213, 0, 255), new Scalar(150, 255, 0, 255), new Scalar(85, 255, 0, 255), new Scalar(20, 255, 0, 255), new Scalar(0, 255, 30, 255), |
||||
new Scalar(0, 255, 85, 255), new Scalar(0, 255, 150, 255), new Scalar(0, 255, 215, 255), new Scalar(0, 234, 255, 255), new Scalar(0, 170, 255, 255), |
||||
new Scalar(0, 120, 255, 255), new Scalar(0, 60, 255, 255), new Scalar(0, 0, 255, 255), new Scalar(64, 0, 255, 255), new Scalar(120, 0, 255, 255), |
||||
new Scalar(180, 0, 255, 255), new Scalar(255, 0, 255, 255), new Scalar(255, 0, 215, 255), new Scalar(255, 0, 85, 255), new Scalar(255, 0, 0, 255) |
||||
}; |
||||
mWhilte = Scalar.all(255); |
||||
mP1 = new Point(); |
||||
mP2 = new Point(); |
||||
} |
||||
|
||||
super.surfaceCreated(holder); |
||||
} |
||||
|
||||
private void CreateAuxiliaryMats() { |
||||
if (mRgba.empty()) |
||||
return; |
||||
|
||||
mSizeRgba = mRgba.size(); |
||||
|
||||
int rows = (int) mSizeRgba.height; |
||||
int cols = (int) mSizeRgba.width; |
||||
|
||||
int left = cols / 8; |
||||
int top = rows / 8; |
||||
|
||||
int width = cols * 3 / 4; |
||||
int height = rows * 3 / 4; |
||||
|
||||
if (mRgbaInnerWindow == null) |
||||
mRgbaInnerWindow = mRgba.submat(top, top + height, left, left + width); |
||||
mSizeRgbaInner = mRgbaInnerWindow.size(); |
||||
|
||||
if (mGrayInnerWindow == null && !mGray.empty()) |
||||
mGrayInnerWindow = mGray.submat(top, top + height, left, left + width); |
||||
|
||||
if (mBlurWindow == null) |
||||
mBlurWindow = mRgba.submat(0, rows, cols / 3, cols * 2 / 3); |
||||
|
||||
if (mZoomCorner == null) |
||||
mZoomCorner = mRgba.submat(0, rows / 2 - rows / 10, 0, cols / 2 - cols / 10); |
||||
|
||||
if (mZoomWindow == null) |
||||
mZoomWindow = mRgba.submat(rows / 2 - 9 * rows / 100, rows / 2 + 9 * rows / 100, cols / 2 - 9 * cols / 100, cols / 2 + 9 * cols / 100); |
||||
} |
||||
|
||||
@Override |
||||
protected Bitmap processFrame(VideoCapture capture) { |
||||
switch (ImageManipulationsActivity.viewMode) { |
||||
|
||||
case ImageManipulationsActivity.VIEW_MODE_RGBA: |
||||
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); |
||||
break; |
||||
|
||||
case ImageManipulationsActivity.VIEW_MODE_HIST: |
||||
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); |
||||
if ((mSizeRgba == null) || (mRgba.cols() != mSizeRgba.width) || (mRgba.height() != mSizeRgba.height)) |
||||
CreateAuxiliaryMats(); |
||||
int thikness = (int) (mSizeRgba.width / (mHistSizeNum + 10) / 5); |
||||
if(thikness > 5) thikness = 5; |
||||
int offset = (int) ((mSizeRgba.width - (5*mHistSizeNum + 4*10)*thikness)/2); |
||||
// RGB
|
||||
for(int c=0; c<3; c++) { |
||||
Imgproc.calcHist(Arrays.asList(mRgba), mChannels[c], mMat0, mHist, mHistSize, mRanges); |
||||
Core.normalize(mHist, mHist, mSizeRgba.height/2, 0, Core.NORM_INF); |
||||
mHist.get(0, 0, mBuff); |
||||
for(int h=0; h<mHistSizeNum; h++) { |
||||
mP1.x = mP2.x = offset + (c * (mHistSizeNum + 10) + h) * thikness; |
||||
mP1.y = mSizeRgba.height-1; |
||||
mP2.y = mP1.y - 2 - (int)mBuff[h]; |
||||
Core.line(mRgba, mP1, mP2, mColorsRGB[c], thikness); |
||||
} |
||||
} |
||||
// Value and Hue
|
||||
Imgproc.cvtColor(mRgba, mIntermediateMat, Imgproc.COLOR_RGB2HSV_FULL); |
||||
// Value
|
||||
Imgproc.calcHist(Arrays.asList(mIntermediateMat), mChannels[2], mMat0, mHist, mHistSize, mRanges); |
||||
Core.normalize(mHist, mHist, mSizeRgba.height/2, 0, Core.NORM_INF); |
||||
mHist.get(0, 0, mBuff); |
||||
for(int h=0; h<mHistSizeNum; h++) { |
||||
mP1.x = mP2.x = offset + (3 * (mHistSizeNum + 10) + h) * thikness; |
||||
mP1.y = mSizeRgba.height-1; |
||||
mP2.y = mP1.y - 2 - (int)mBuff[h]; |
||||
Core.line(mRgba, mP1, mP2, mWhilte, thikness); |
||||
} |
||||
// Hue
|
||||
Imgproc.calcHist(Arrays.asList(mIntermediateMat), mChannels[0], mMat0, mHist, mHistSize, mRanges); |
||||
Core.normalize(mHist, mHist, mSizeRgba.height/2, 0, Core.NORM_INF); |
||||
mHist.get(0, 0, mBuff); |
||||
for(int h=0; h<mHistSizeNum; h++) { |
||||
mP1.x = mP2.x = offset + (4 * (mHistSizeNum + 10) + h) * thikness; |
||||
mP1.y = mSizeRgba.height-1; |
||||
mP2.y = mP1.y - 2 - (int)mBuff[h]; |
||||
Core.line(mRgba, mP1, mP2, mColorsHue[h], thikness); |
||||
} |
||||
break; |
||||
|
||||
case ImageManipulationsActivity.VIEW_MODE_CANNY: |
||||
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); |
||||
|
||||
if ((mRgbaInnerWindow == null) || (mGrayInnerWindow == null) || (mRgba.cols() != mSizeRgba.width) || (mRgba.height() != mSizeRgba.height)) |
||||
CreateAuxiliaryMats(); |
||||
Imgproc.Canny(mRgbaInnerWindow, mIntermediateMat, 80, 90); |
||||
Imgproc.cvtColor(mIntermediateMat, mRgbaInnerWindow, Imgproc.COLOR_GRAY2BGRA, 4); |
||||
break; |
||||
|
||||
case ImageManipulationsActivity.VIEW_MODE_SOBEL: |
||||
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); |
||||
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME); |
||||
|
||||
if ((mRgbaInnerWindow == null) || (mGrayInnerWindow == null) || (mRgba.cols() != mSizeRgba.width) || (mRgba.height() != mSizeRgba.height)) |
||||
CreateAuxiliaryMats(); |
||||
|
||||
Imgproc.Sobel(mGrayInnerWindow, mIntermediateMat, CvType.CV_8U, 1, 1); |
||||
Core.convertScaleAbs(mIntermediateMat, mIntermediateMat, 10, 0); |
||||
Imgproc.cvtColor(mIntermediateMat, mRgbaInnerWindow, Imgproc.COLOR_GRAY2BGRA, 4); |
||||
break; |
||||
|
||||
case ImageManipulationsActivity.VIEW_MODE_SEPIA: |
||||
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); |
||||
Core.transform(mRgba, mRgba, mSepiaKernel); |
||||
break; |
||||
|
||||
case ImageManipulationsActivity.VIEW_MODE_ZOOM: |
||||
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); |
||||
if ((mZoomCorner == null) || (mZoomWindow == null) || (mRgba.cols() != mSizeRgba.width) || (mRgba.height() != mSizeRgba.height)) |
||||
CreateAuxiliaryMats(); |
||||
Imgproc.resize(mZoomWindow, mZoomCorner, mZoomCorner.size()); |
||||
|
||||
Size wsize = mZoomWindow.size(); |
||||
Core.rectangle(mZoomWindow, new Point(1, 1), new Point(wsize.width - 2, wsize.height - 2), new Scalar(255, 0, 0, 255), 2); |
||||
break; |
||||
|
||||
case ImageManipulationsActivity.VIEW_MODE_PIXELIZE: |
||||
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); |
||||
if ((mRgbaInnerWindow == null) || (mRgba.cols() != mSizeRgba.width) || (mRgba.height() != mSizeRgba.height)) |
||||
CreateAuxiliaryMats(); |
||||
Imgproc.resize(mRgbaInnerWindow, mIntermediateMat, mSize0, 0.1, 0.1, Imgproc.INTER_NEAREST); |
||||
Imgproc.resize(mIntermediateMat, mRgbaInnerWindow, mSizeRgbaInner, 0., 0., Imgproc.INTER_NEAREST); |
||||
break; |
||||
|
||||
case ImageManipulationsActivity.VIEW_MODE_POSTERIZE: |
||||
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); |
||||
if ((mRgbaInnerWindow == null) || (mRgba.cols() != mSizeRgba.width) || (mRgba.height() != mSizeRgba.height)) |
||||
CreateAuxiliaryMats(); |
||||
/* |
||||
Imgproc.cvtColor(mRgbaInnerWindow, mIntermediateMat, Imgproc.COLOR_RGBA2RGB); |
||||
Imgproc.pyrMeanShiftFiltering(mIntermediateMat, mIntermediateMat, 5, 50); |
||||
Imgproc.cvtColor(mIntermediateMat, mRgbaInnerWindow, Imgproc.COLOR_RGB2RGBA); |
||||
*/ |
||||
Imgproc.Canny(mRgbaInnerWindow, mIntermediateMat, 80, 90); |
||||
mRgbaInnerWindow.setTo(new Scalar(0, 0, 0, 255), mIntermediateMat); |
||||
Core.convertScaleAbs(mRgbaInnerWindow, mIntermediateMat, 1./16, 0); |
||||
Core.convertScaleAbs(mIntermediateMat, mRgbaInnerWindow, 16, 0); |
||||
break; |
||||
} |
||||
|
||||
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888); |
||||
|
||||
try { |
||||
Utils.matToBitmap(mRgba, bmp); |
||||
return bmp; |
||||
} catch(Exception e) { |
||||
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage()); |
||||
bmp.recycle(); |
||||
return null; |
||||
} |
||||
} |
||||
|
||||
@Override |
||||
public void run() { |
||||
super.run(); |
||||
|
||||
synchronized (this) { |
||||
// Explicitly deallocate Mats
|
||||
if (mZoomWindow != null) |
||||
mZoomWindow.release(); |
||||
if (mZoomCorner != null) |
||||
mZoomCorner.release(); |
||||
if (mBlurWindow != null) |
||||
mBlurWindow.release(); |
||||
if (mGrayInnerWindow != null) |
||||
mGrayInnerWindow.release(); |
||||
if (mRgbaInnerWindow != null) |
||||
mRgbaInnerWindow.release(); |
||||
if (mRgba != null) |
||||
mRgba.release(); |
||||
if (mGray != null) |
||||
mGray.release(); |
||||
if (mIntermediateMat != null) |
||||
mIntermediateMat.release(); |
||||
|
||||
mRgba = null; |
||||
mGray = null; |
||||
mIntermediateMat = null; |
||||
mRgbaInnerWindow = null; |
||||
mGrayInnerWindow = null; |
||||
mBlurWindow = null; |
||||
mZoomCorner = null; |
||||
mZoomWindow = null; |
||||
} |
||||
} |
||||
} |
@ -1,124 +0,0 @@ |
||||
package org.opencv.samples.imagemanipulations; |
||||
|
||||
import java.util.List; |
||||
|
||||
import org.opencv.core.Size; |
||||
import org.opencv.highgui.VideoCapture; |
||||
import org.opencv.highgui.Highgui; |
||||
|
||||
import android.content.Context; |
||||
import android.graphics.Bitmap; |
||||
import android.graphics.Canvas; |
||||
import android.util.Log; |
||||
import android.view.SurfaceHolder; |
||||
import android.view.SurfaceView; |
||||
|
||||
public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable { |
||||
private static final String TAG = "OCVSample::BaseView"; |
||||
|
||||
private SurfaceHolder mHolder; |
||||
private VideoCapture mCamera; |
||||
private FpsMeter mFps; |
||||
|
||||
public SampleCvViewBase(Context context) { |
||||
super(context); |
||||
mHolder = getHolder(); |
||||
mHolder.addCallback(this); |
||||
mFps = new FpsMeter(); |
||||
Log.i(TAG, "Instantiated new " + this.getClass()); |
||||
} |
||||
|
||||
public synchronized boolean openCamera() { |
||||
Log.i(TAG, "Opening Camera"); |
||||
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID); |
||||
if (!mCamera.isOpened()) { |
||||
releaseCamera(); |
||||
Log.e(TAG, "Can't open native camera"); |
||||
return false; |
||||
} |
||||
return true; |
||||
} |
||||
|
||||
public synchronized void releaseCamera() { |
||||
Log.i(TAG, "Releasing Camera"); |
||||
if (mCamera != null) { |
||||
mCamera.release(); |
||||
mCamera = null; |
||||
} |
||||
} |
||||
|
||||
public synchronized void setupCamera(int width, int height) { |
||||
if (mCamera != null && mCamera.isOpened()) { |
||||
Log.i(TAG, "Setup Camera - " + width + "x" + height); |
||||
List<Size> sizes = mCamera.getSupportedPreviewSizes(); |
||||
int mFrameWidth = width; |
||||
int mFrameHeight = height; |
||||
|
||||
// selecting optimal camera preview size
|
||||
{ |
||||
double minDiff = Double.MAX_VALUE; |
||||
for (Size size : sizes) { |
||||
if (Math.abs(size.height - height) < minDiff) { |
||||
mFrameWidth = (int) size.width; |
||||
mFrameHeight = (int) size.height; |
||||
minDiff = Math.abs(size.height - height); |
||||
} |
||||
} |
||||
} |
||||
|
||||
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth); |
||||
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight); |
||||
} |
||||
} |
||||
|
||||
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { |
||||
Log.i(TAG, "called surfaceChanged"); |
||||
setupCamera(width, height); |
||||
} |
||||
|
||||
public void surfaceCreated(SurfaceHolder holder) { |
||||
Log.i(TAG, "called surfaceCreated"); |
||||
(new Thread(this)).start(); |
||||
} |
||||
|
||||
public void surfaceDestroyed(SurfaceHolder holder) { |
||||
Log.i(TAG, "called surfaceDestroyed"); |
||||
} |
||||
|
||||
protected abstract Bitmap processFrame(VideoCapture capture); |
||||
|
||||
public void run() { |
||||
Log.i(TAG, "Started processing thread"); |
||||
mFps.init(); |
||||
|
||||
while (true) { |
||||
Bitmap bmp = null; |
||||
|
||||
synchronized (this) { |
||||
if (mCamera == null) |
||||
break; |
||||
|
||||
if (!mCamera.grab()) { |
||||
Log.e(TAG, "mCamera.grab() failed"); |
||||
break; |
||||
} |
||||
|
||||
bmp = processFrame(mCamera); |
||||
|
||||
mFps.measure(); |
||||
} |
||||
|
||||
if (bmp != null) { |
||||
Canvas canvas = mHolder.lockCanvas(); |
||||
if (canvas != null) { |
||||
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR); |
||||
canvas.drawBitmap(bmp, (canvas.getWidth() - bmp.getWidth()) / 2, (canvas.getHeight() - bmp.getHeight()), null); |
||||
mFps.draw(canvas, (canvas.getWidth() - bmp.getWidth()) / 2, 0); |
||||
mHolder.unlockCanvasAndPost(canvas); |
||||
} |
||||
bmp.recycle(); |
||||
} |
||||
} |
||||
Log.i(TAG, "Finished processing thread"); |
||||
} |
||||
} |
@ -1,8 +1,8 @@ |
||||
<?xml version="1.0" encoding="UTF-8"?> |
||||
<classpath> |
||||
<classpathentry kind="src" path="src"/> |
||||
<classpathentry kind="src" path="gen"/> |
||||
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/> |
||||
<classpathentry exported="true" kind="con" path="com.android.ide.eclipse.adt.LIBRARIES"/> |
||||
<classpathentry kind="src" path="src"/> |
||||
<classpathentry kind="src" path="gen"/> |
||||
<classpathentry kind="output" path="bin/classes"/> |
||||
</classpath> |
||||
|
@ -1,8 +1,8 @@ |
||||
<?xml version="1.0" encoding="UTF-8"?> |
||||
<classpath> |
||||
<classpathentry kind="src" path="src"/> |
||||
<classpathentry kind="src" path="gen"/> |
||||
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/> |
||||
<classpathentry kind="con" path="com.android.ide.eclipse.adt.LIBRARIES"/> |
||||
<classpathentry kind="src" path="src"/> |
||||
<classpathentry kind="src" path="gen"/> |
||||
<classpathentry kind="output" path="bin/classes"/> |
||||
</classpath> |
||||
|
@ -0,0 +1,11 @@ |
||||
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android" |
||||
xmlns:tools="http://schemas.android.com/tools" |
||||
android:layout_width="match_parent" |
||||
android:layout_height="match_parent" > |
||||
|
||||
<org.opencv.android.JavaCameraView |
||||
android:layout_width="fill_parent" |
||||
android:layout_height="fill_parent" |
||||
android:id="@+id/tutorial1_activity_surface_view" /> |
||||
|
||||
</LinearLayout> |
@ -1,113 +0,0 @@ |
||||
package org.opencv.samples.tutorial1; |
||||
|
||||
import org.opencv.android.Utils; |
||||
import org.opencv.core.Core; |
||||
import org.opencv.core.CvType; |
||||
import org.opencv.core.Mat; |
||||
import org.opencv.core.Point; |
||||
import org.opencv.core.Scalar; |
||||
import org.opencv.imgproc.Imgproc; |
||||
|
||||
import android.content.Context; |
||||
import android.graphics.Bitmap; |
||||
import android.util.Log; |
||||
|
||||
class Sample1View extends SampleViewBase { |
||||
private static final String TAG = "OCVSample::View"; |
||||
|
||||
public static final int VIEW_MODE_RGBA = 0; |
||||
public static final int VIEW_MODE_GRAY = 1; |
||||
public static final int VIEW_MODE_CANNY = 2; |
||||
|
||||
private Mat mYuv; |
||||
private Mat mRgba; |
||||
private Mat mGraySubmat; |
||||
private Mat mIntermediateMat; |
||||
private Bitmap mBitmap; |
||||
private int mViewMode; |
||||
|
||||
public Sample1View(Context context) { |
||||
super(context); |
||||
mViewMode = VIEW_MODE_RGBA; |
||||
Log.i(TAG, "Instantiated new " + this.getClass()); |
||||
} |
||||
|
||||
@Override |
||||
protected void onPreviewStarted(int previewWidth, int previewHeight) { |
||||
Log.i(TAG, "called onPreviewStarted("+previewWidth+", "+previewHeight+")"); |
||||
|
||||
// initialize Mats before usage
|
||||
mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1); |
||||
mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth()); |
||||
|
||||
mRgba = new Mat(); |
||||
mIntermediateMat = new Mat(); |
||||
|
||||
mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888); |
||||
} |
||||
|
||||
@Override |
||||
protected void onPreviewStopped() { |
||||
Log.i(TAG, "called onPreviewStopped"); |
||||
|
||||
if(mBitmap != null) { |
||||
mBitmap.recycle(); |
||||
mBitmap = null; |
||||
} |
||||
|
||||
synchronized (this) { |
||||
// Explicitly deallocate Mats
|
||||
if (mYuv != null) |
||||
mYuv.release(); |
||||
if (mRgba != null) |
||||
mRgba.release(); |
||||
if (mGraySubmat != null) |
||||
mGraySubmat.release(); |
||||
if (mIntermediateMat != null) |
||||
mIntermediateMat.release(); |
||||
|
||||
mYuv = null; |
||||
mRgba = null; |
||||
mGraySubmat = null; |
||||
mIntermediateMat = null; |
||||
} |
||||
} |
||||
|
||||
@Override |
||||
protected Bitmap processFrame(byte[] data) { |
||||
mYuv.put(0, 0, data); |
||||
|
||||
final int viewMode = mViewMode; |
||||
|
||||
switch (viewMode) { |
||||
case VIEW_MODE_GRAY: |
||||
Imgproc.cvtColor(mGraySubmat, mRgba, Imgproc.COLOR_GRAY2RGBA, 4); |
||||
break; |
||||
case VIEW_MODE_RGBA: |
||||
Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420sp2RGB, 4); |
||||
Core.putText(mRgba, "OpenCV + Android", new Point(10, 100), 3/* CV_FONT_HERSHEY_COMPLEX */, 2, new Scalar(255, 0, 0, 255), 3); |
||||
break; |
||||
case VIEW_MODE_CANNY: |
||||
Imgproc.Canny(mGraySubmat, mIntermediateMat, 80, 100); |
||||
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4); |
||||
break; |
||||
} |
||||
|
||||
Bitmap bmp = mBitmap; |
||||
|
||||
try { |
||||
Utils.matToBitmap(mRgba, bmp); |
||||
} catch(Exception e) { |
||||
Log.e("org.opencv.samples.tutorial1", "Utils.matToBitmap() throws an exception: " + e.getMessage()); |
||||
bmp.recycle(); |
||||
bmp = null; |
||||
} |
||||
return bmp; |
||||
} |
||||
|
||||
public void setViewMode(int viewMode) { |
||||
Log.i(TAG, "called setViewMode("+viewMode+")"); |
||||
mViewMode = viewMode; |
||||
} |
||||
|
||||
} |
@ -1,229 +0,0 @@ |
||||
package org.opencv.samples.tutorial1; |
||||
|
||||
import java.io.IOException; |
||||
import java.util.List; |
||||
|
||||
import android.content.Context; |
||||
import android.graphics.Bitmap; |
||||
import android.graphics.Canvas; |
||||
import android.graphics.ImageFormat; |
||||
import android.graphics.SurfaceTexture; |
||||
import android.hardware.Camera; |
||||
import android.hardware.Camera.PreviewCallback; |
||||
import android.os.Build; |
||||
import android.util.Log; |
||||
import android.view.SurfaceHolder; |
||||
import android.view.SurfaceView; |
||||
|
||||
public abstract class SampleViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable { |
||||
private static final String TAG = "OCVSample::BaseView"; |
||||
|
||||
private Camera mCamera; |
||||
private SurfaceHolder mHolder; |
||||
private int mFrameWidth; |
||||
private int mFrameHeight; |
||||
private byte[] mFrame; |
||||
private volatile boolean mThreadRun; |
||||
private byte[] mBuffer; |
||||
private SurfaceTexture mSf; |
||||
|
||||
|
||||
public SampleViewBase(Context context) { |
||||
super(context); |
||||
mHolder = getHolder(); |
||||
mHolder.addCallback(this); |
||||
Log.i(TAG, "Instantiated new " + this.getClass()); |
||||
} |
||||
|
||||
public int getFrameWidth() { |
||||
return mFrameWidth; |
||||
} |
||||
|
||||
public int getFrameHeight() { |
||||
return mFrameHeight; |
||||
} |
||||
|
||||
public void setPreview() throws IOException { |
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { |
||||
mSf = new SurfaceTexture(10); |
||||
mCamera.setPreviewTexture( mSf ); |
||||
} |
||||
else |
||||
mCamera.setPreviewDisplay(null); |
||||
} |
||||
|
||||
public boolean openCamera() { |
||||
Log.i(TAG, "Opening Camera"); |
||||
mCamera = null; |
||||
|
||||
try { |
||||
mCamera = Camera.open(); |
||||
} |
||||
catch (Exception e){ |
||||
Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage()); |
||||
} |
||||
|
||||
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) { |
||||
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) { |
||||
try { |
||||
mCamera = Camera.open(camIdx); |
||||
} |
||||
catch (RuntimeException e) { |
||||
Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage()); |
||||
} |
||||
} |
||||
} |
||||
|
||||
if(mCamera == null) { |
||||
Log.e(TAG, "Can't open any camera"); |
||||
return false; |
||||
} |
||||
|
||||
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() { |
||||
public void onPreviewFrame(byte[] data, Camera camera) { |
||||
synchronized (SampleViewBase.this) { |
||||
System.arraycopy(data, 0, mFrame, 0, data.length); |
||||
SampleViewBase.this.notify(); |
||||
} |
||||
camera.addCallbackBuffer(mBuffer); |
||||
} |
||||
}); |
||||
|
||||
return true; |
||||
} |
||||
|
||||
public void releaseCamera() { |
||||
Log.i(TAG, "Releasing Camera"); |
||||
mThreadRun = false; |
||||
synchronized (this) { |
||||
if (mCamera != null) { |
||||
mCamera.stopPreview(); |
||||
mCamera.release(); |
||||
mCamera = null; |
||||
} |
||||
} |
||||
onPreviewStopped(); |
||||
} |
||||
|
||||
public synchronized void setupCamera(int width, int height) { |
||||
if (mCamera != null) { |
||||
Log.i(TAG, "Setup Camera - " + width + "x" + height); |
||||
Camera.Parameters params = mCamera.getParameters(); |
||||
List<Camera.Size> sizes = params.getSupportedPreviewSizes(); |
||||
mFrameWidth = width; |
||||
mFrameHeight = height; |
||||
|
||||
// selecting optimal camera preview size
|
||||
{ |
||||
int minDiff = Integer.MAX_VALUE; |
||||
for (Camera.Size size : sizes) { |
||||
if (Math.abs(size.height - height) < minDiff) { |
||||
mFrameWidth = size.width; |
||||
mFrameHeight = size.height; |
||||
minDiff = Math.abs(size.height - height); |
||||
} |
||||
} |
||||
} |
||||
|
||||
params.setPreviewSize(getFrameWidth(), getFrameHeight()); |
||||
|
||||
List<String> FocusModes = params.getSupportedFocusModes(); |
||||
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) |
||||
{ |
||||
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); |
||||
} |
||||
|
||||
mCamera.setParameters(params); |
||||
|
||||
/* Now allocate the buffer */ |
||||
params = mCamera.getParameters(); |
||||
int size = params.getPreviewSize().width * params.getPreviewSize().height; |
||||
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8; |
||||
mBuffer = new byte[size]; |
||||
/* The buffer where the current frame will be copied */ |
||||
mFrame = new byte [size]; |
||||
mCamera.addCallbackBuffer(mBuffer); |
||||
|
||||
/* Notify that the preview is about to be started and deliver preview size */ |
||||
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height); |
||||
|
||||
try { |
||||
setPreview(); |
||||
} catch (IOException e) { |
||||
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e); |
||||
} |
||||
|
||||
/* Now we can start a preview */ |
||||
mCamera.startPreview(); |
||||
} |
||||
} |
||||
|
||||
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { |
||||
Log.i(TAG, "called surfaceChanged"); |
||||
// stop preview before making changes
|
||||
try { |
||||
mCamera.stopPreview(); |
||||
} catch (Exception e){ |
||||
// ignore: tried to stop a non-existent preview
|
||||
} |
||||
|
||||
// start preview with new settings
|
||||
setupCamera(width, height); |
||||
} |
||||
|
||||
public void surfaceCreated(SurfaceHolder holder) { |
||||
Log.i(TAG, "called surfaceCreated"); |
||||
(new Thread(this)).start(); |
||||
} |
||||
|
||||
public void surfaceDestroyed(SurfaceHolder holder) { |
||||
Log.i(TAG, "called surfaceDestroyed"); |
||||
} |
||||
|
||||
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */ |
||||
protected abstract Bitmap processFrame(byte[] data); |
||||
|
||||
/** |
||||
* This method is called when the preview process is being started. It is called before the first frame delivered and processFrame is called |
||||
* It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing. |
||||
* @param previewWidth - the width of the preview frames that will be delivered via processFrame |
||||
* @param previewHeight - the height of the preview frames that will be delivered via processFrame |
||||
*/ |
||||
protected abstract void onPreviewStarted(int previewWidtd, int previewHeight); |
||||
|
||||
/** |
||||
* This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed. |
||||
* If the Bitmap object returned via processFrame is cached - it is a good time to recycle it. |
||||
* Any other resources used during the preview can be released. |
||||
*/ |
||||
protected abstract void onPreviewStopped(); |
||||
|
||||
public void run() { |
||||
mThreadRun = true; |
||||
Log.i(TAG, "Started processing thread"); |
||||
while (mThreadRun) { |
||||
Bitmap bmp = null; |
||||
|
||||
synchronized (this) { |
||||
try { |
||||
this.wait(); |
||||
if (!mThreadRun) |
||||
break; |
||||
bmp = processFrame(mFrame); |
||||
} catch (InterruptedException e) { |
||||
e.printStackTrace(); |
||||
} |
||||
} |
||||
|
||||
if (bmp != null) { |
||||
Canvas canvas = mHolder.lockCanvas(); |
||||
if (canvas != null) { |
||||
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR); |
||||
canvas.drawBitmap(bmp, (canvas.getWidth() - getFrameWidth()) / 2, (canvas.getHeight() - getFrameHeight()) / 2, null); |
||||
mHolder.unlockCanvasAndPost(canvas); |
||||
} |
||||
} |
||||
} |
||||
Log.i(TAG, "Finished processing thread"); |
||||
} |
||||
} |
@ -1,8 +1,8 @@ |
||||
<?xml version="1.0" encoding="UTF-8"?> |
||||
<classpath> |
||||
<classpathentry kind="src" path="src"/> |
||||
<classpathentry kind="src" path="gen"/> |
||||
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/> |
||||
<classpathentry kind="con" path="com.android.ide.eclipse.adt.LIBRARIES"/> |
||||
<classpathentry kind="src" path="src"/> |
||||
<classpathentry kind="src" path="gen"/> |
||||
<classpathentry kind="output" path="bin/classes"/> |
||||
</classpath> |
||||
|
@ -0,0 +1,11 @@ |
||||
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android" |
||||
xmlns:tools="http://schemas.android.com/tools" |
||||
android:layout_width="match_parent" |
||||
android:layout_height="match_parent" > |
||||
|
||||
<org.opencv.android.NativeCameraView |
||||
android:layout_width="fill_parent" |
||||
android:layout_height="fill_parent" |
||||
android:id="@+id/tutorial2_activity_surface_view" /> |
||||
|
||||
</LinearLayout> |
@ -1,90 +0,0 @@ |
||||
package org.opencv.samples.tutorial2; |
||||
|
||||
import org.opencv.android.Utils; |
||||
import org.opencv.core.Core; |
||||
import org.opencv.core.Mat; |
||||
import org.opencv.core.Point; |
||||
import org.opencv.core.Scalar; |
||||
import org.opencv.highgui.Highgui; |
||||
import org.opencv.highgui.VideoCapture; |
||||
import org.opencv.imgproc.Imgproc; |
||||
|
||||
import android.content.Context; |
||||
import android.graphics.Bitmap; |
||||
import android.util.Log; |
||||
import android.view.SurfaceHolder; |
||||
|
||||
class Sample2View extends SampleCvViewBase { |
||||
private static final String TAG = "OCVSample::View"; |
||||
|
||||
private Mat mRgba; |
||||
private Mat mGray; |
||||
private Mat mIntermediateMat; |
||||
|
||||
public Sample2View(Context context) { |
||||
super(context); |
||||
Log.i(TAG, "Instantiated new " + this.getClass()); |
||||
} |
||||
|
||||
@Override |
||||
public void surfaceCreated(SurfaceHolder holder) { |
||||
Log.i(TAG, "called surfaceCreated"); |
||||
synchronized (this) { |
||||
// initialize Mats before usage
|
||||
mGray = new Mat(); |
||||
mRgba = new Mat(); |
||||
mIntermediateMat = new Mat(); |
||||
} |
||||
|
||||
super.surfaceCreated(holder); |
||||
} |
||||
|
||||
@Override |
||||
protected Bitmap processFrame(VideoCapture capture) { |
||||
switch (Sample2NativeCamera.viewMode) { |
||||
case Sample2NativeCamera.VIEW_MODE_GRAY: |
||||
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME); |
||||
Imgproc.cvtColor(mGray, mRgba, Imgproc.COLOR_GRAY2RGBA, 4); |
||||
break; |
||||
case Sample2NativeCamera.VIEW_MODE_RGBA: |
||||
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); |
||||
Core.putText(mRgba, "OpenCV+Android", new Point(10, 50), 3, 1, new Scalar(255, 0, 0, 255), 2); |
||||
break; |
||||
case Sample2NativeCamera.VIEW_MODE_CANNY: |
||||
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME); |
||||
Imgproc.Canny(mGray, mIntermediateMat, 80, 100); |
||||
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4); |
||||
break; |
||||
} |
||||
|
||||
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888); |
||||
|
||||
try { |
||||
Utils.matToBitmap(mRgba, bmp); |
||||
return bmp; |
||||
} catch(Exception e) { |
||||
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage()); |
||||
bmp.recycle(); |
||||
return null; |
||||
} |
||||
} |
||||
|
||||
@Override |
||||
public void run() { |
||||
super.run(); |
||||
|
||||
synchronized (this) { |
||||
// Explicitly deallocate Mats
|
||||
if (mRgba != null) |
||||
mRgba.release(); |
||||
if (mGray != null) |
||||
mGray.release(); |
||||
if (mIntermediateMat != null) |
||||
mIntermediateMat.release(); |
||||
|
||||
mRgba = null; |
||||
mGray = null; |
||||
mIntermediateMat = null; |
||||
} |
||||
} |
||||
} |
@ -1,117 +0,0 @@ |
||||
package org.opencv.samples.tutorial2; |
||||
|
||||
import java.util.List; |
||||
|
||||
import org.opencv.core.Size; |
||||
import org.opencv.highgui.VideoCapture; |
||||
import org.opencv.highgui.Highgui; |
||||
|
||||
import android.content.Context; |
||||
import android.graphics.Bitmap; |
||||
import android.graphics.Canvas; |
||||
import android.util.Log; |
||||
import android.view.SurfaceHolder; |
||||
import android.view.SurfaceView; |
||||
|
||||
public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable { |
||||
private static final String TAG = "OCVSample::BaseView"; |
||||
|
||||
private SurfaceHolder mHolder; |
||||
private VideoCapture mCamera; |
||||
|
||||
public SampleCvViewBase(Context context) { |
||||
super(context); |
||||
mHolder = getHolder(); |
||||
mHolder.addCallback(this); |
||||
Log.i(TAG, "Instantiated new " + this.getClass()); |
||||
} |
||||
|
||||
public synchronized boolean openCamera() { |
||||
Log.i(TAG, "Opening Camera"); |
||||
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID); |
||||
if (!mCamera.isOpened()) { |
||||
releaseCamera(); |
||||
Log.e(TAG, "Can't open native camera"); |
||||
return false; |
||||
} |
||||
return true; |
||||
} |
||||
|
||||
public synchronized void releaseCamera() { |
||||
Log.i(TAG, "Releasing Camera"); |
||||
if (mCamera != null) { |
||||
mCamera.release(); |
||||
mCamera = null; |
||||
} |
||||
} |
||||
|
||||
public synchronized void setupCamera(int width, int height) { |
||||
if (mCamera != null && mCamera.isOpened()) { |
||||
Log.i(TAG, "Setup Camera - " + width + "x" + height); |
||||
List<Size> sizes = mCamera.getSupportedPreviewSizes(); |
||||
int mFrameWidth = width; |
||||
int mFrameHeight = height; |
||||
|
||||
// selecting optimal camera preview size
|
||||
{ |
||||
double minDiff = Double.MAX_VALUE; |
||||
for (Size size : sizes) { |
||||
if (Math.abs(size.height - height) < minDiff) { |
||||
mFrameWidth = (int) size.width; |
||||
mFrameHeight = (int) size.height; |
||||
minDiff = Math.abs(size.height - height); |
||||
} |
||||
} |
||||
} |
||||
|
||||
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth); |
||||
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight); |
||||
} |
||||
} |
||||
|
||||
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { |
||||
Log.i(TAG, "called surfaceChanged"); |
||||
setupCamera(width, height); |
||||
} |
||||
|
||||
public void surfaceCreated(SurfaceHolder holder) { |
||||
Log.i(TAG, "called surfaceCreated"); |
||||
(new Thread(this)).start(); |
||||
} |
||||
|
||||
public void surfaceDestroyed(SurfaceHolder holder) { |
||||
Log.i(TAG, "called surfaceDestroyed"); |
||||
} |
||||
|
||||
protected abstract Bitmap processFrame(VideoCapture capture); |
||||
|
||||
public void run() { |
||||
Log.i(TAG, "Started processing thread"); |
||||
while (true) { |
||||
Bitmap bmp = null; |
||||
|
||||
synchronized (this) { |
||||
if (mCamera == null) |
||||
break; |
||||
|
||||
if (!mCamera.grab()) { |
||||
Log.e(TAG, "mCamera.grab() failed"); |
||||
break; |
||||
} |
||||
|
||||
bmp = processFrame(mCamera); |
||||
} |
||||
|
||||
if (bmp != null) { |
||||
Canvas canvas = mHolder.lockCanvas(); |
||||
if (canvas != null) { |
||||
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR); |
||||
canvas.drawBitmap(bmp, (canvas.getWidth() - bmp.getWidth()) / 2, (canvas.getHeight() - bmp.getHeight()) / 2, null); |
||||
mHolder.unlockCanvasAndPost(canvas); |
||||
} |
||||
bmp.recycle(); |
||||
} |
||||
} |
||||
Log.i(TAG, "Finished processing thread"); |
||||
} |
||||
} |
@ -1,8 +1,8 @@ |
||||
<?xml version="1.0" encoding="UTF-8"?> |
||||
<classpath> |
||||
<classpathentry kind="src" path="src"/> |
||||
<classpathentry kind="src" path="gen"/> |
||||
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/> |
||||
<classpathentry exported="true" kind="con" path="com.android.ide.eclipse.adt.LIBRARIES"/> |
||||
<classpathentry kind="src" path="src"/> |
||||
<classpathentry kind="src" path="gen"/> |
||||
<classpathentry kind="output" path="bin/classes"/> |
||||
</classpath> |
||||
|
@ -0,0 +1,11 @@ |
||||
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android" |
||||
xmlns:tools="http://schemas.android.com/tools" |
||||
android:layout_width="match_parent" |
||||
android:layout_height="match_parent" > |
||||
|
||||
<org.opencv.android.JavaCameraView |
||||
android:layout_width="fill_parent" |
||||
android:layout_height="fill_parent" |
||||
android:id="@+id/tutorial4_activity_surface_view" /> |
||||
|
||||
</LinearLayout> |
@ -1,49 +0,0 @@ |
||||
package org.opencv.samples.tutorial3; |
||||
|
||||
import android.content.Context; |
||||
import android.graphics.Bitmap; |
||||
import android.util.Log; |
||||
|
||||
class Sample3View extends SampleViewBase { |
||||
private static final String TAG = "OCVSample::View"; |
||||
|
||||
private int mFrameSize; |
||||
private Bitmap mBitmap; |
||||
private int[] mRGBA; |
||||
|
||||
public Sample3View(Context context) { |
||||
super(context); |
||||
Log.i(TAG, "Instantiated new " + this.getClass()); |
||||
} |
||||
|
||||
@Override |
||||
protected void onPreviewStarted(int previewWidth, int previewHeight) { |
||||
Log.i(TAG, "called onPreviewStarted("+previewWidth+", "+previewHeight+")"); |
||||
|
||||
mFrameSize = previewWidth * previewHeight; |
||||
mRGBA = new int[mFrameSize]; |
||||
mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888); |
||||
} |
||||
|
||||
@Override |
||||
protected void onPreviewStopped() { |
||||
if(mBitmap != null) { |
||||
mBitmap.recycle(); |
||||
mBitmap = null; |
||||
} |
||||
mRGBA = null; |
||||
} |
||||
|
||||
@Override |
||||
protected Bitmap processFrame(byte[] data) { |
||||
int[] rgba = mRGBA; |
||||
|
||||
FindFeatures(getFrameWidth(), getFrameHeight(), data, rgba); |
||||
|
||||
Bitmap bmp = mBitmap; |
||||
bmp.setPixels(rgba, 0/* offset */, getFrameWidth() /* stride */, 0, 0, getFrameWidth(), getFrameHeight()); |
||||
return bmp; |
||||
} |
||||
|
||||
public native void FindFeatures(int width, int height, byte yuv[], int[] rgba); |
||||
} |
@ -1,229 +0,0 @@ |
||||
package org.opencv.samples.tutorial3; |
||||
|
||||
import java.io.IOException; |
||||
import java.util.List; |
||||
import android.content.Context; |
||||
import android.graphics.Bitmap; |
||||
import android.graphics.Canvas; |
||||
import android.graphics.ImageFormat; |
||||
import android.graphics.SurfaceTexture; |
||||
import android.hardware.Camera; |
||||
import android.hardware.Camera.PreviewCallback; |
||||
import android.os.Build; |
||||
import android.util.Log; |
||||
import android.view.SurfaceHolder; |
||||
import android.view.SurfaceView; |
||||
|
||||
public abstract class SampleViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable { |
||||
private static final String TAG = "OCVSample::BaseView"; |
||||
|
||||
private Camera mCamera; |
||||
private SurfaceHolder mHolder; |
||||
private int mFrameWidth; |
||||
private int mFrameHeight; |
||||
private byte[] mFrame; |
||||
private volatile boolean mThreadRun; |
||||
private byte[] mBuffer; |
||||
private SurfaceTexture mSf; |
||||
|
||||
|
||||
public SampleViewBase(Context context) { |
||||
super(context); |
||||
mHolder = getHolder(); |
||||
mHolder.addCallback(this); |
||||
Log.i(TAG, "Instantiated new " + this.getClass()); |
||||
} |
||||
|
||||
public int getFrameWidth() { |
||||
return mFrameWidth; |
||||
} |
||||
|
||||
public int getFrameHeight() { |
||||
return mFrameHeight; |
||||
} |
||||
|
||||
public void setPreview() throws IOException { |
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { |
||||
mSf = new SurfaceTexture(10); |
||||
mCamera.setPreviewTexture( mSf ); |
||||
} |
||||
else |
||||
mCamera.setPreviewDisplay(null); |
||||
} |
||||
|
||||
public boolean openCamera() { |
||||
Log.i(TAG, "Opening Camera"); |
||||
mCamera = null; |
||||
|
||||
try { |
||||
mCamera = Camera.open(); |
||||
} |
||||
catch (Exception e){ |
||||
Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage()); |
||||
} |
||||
|
||||
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) { |
||||
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) { |
||||
try { |
||||
mCamera = Camera.open(camIdx); |
||||
} |
||||
catch (RuntimeException e) { |
||||
Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage()); |
||||
} |
||||
} |
||||
} |
||||
|
||||
if(mCamera == null) { |
||||
Log.e(TAG, "Can't open any camera"); |
||||
return false; |
||||
} |
||||
|
||||
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() { |
||||
public void onPreviewFrame(byte[] data, Camera camera) { |
||||
synchronized (SampleViewBase.this) { |
||||
System.arraycopy(data, 0, mFrame, 0, data.length); |
||||
SampleViewBase.this.notify(); |
||||
} |
||||
camera.addCallbackBuffer(mBuffer); |
||||
} |
||||
}); |
||||
|
||||
return true; |
||||
} |
||||
|
||||
public void releaseCamera() { |
||||
Log.i(TAG, "Releasing Camera"); |
||||
mThreadRun = false; |
||||
synchronized (this) { |
||||
if (mCamera != null) { |
||||
mCamera.stopPreview(); |
||||
mCamera.setPreviewCallback(null); |
||||
mCamera.release(); |
||||
mCamera = null; |
||||
} |
||||
} |
||||
onPreviewStopped(); |
||||
} |
||||
|
||||
public synchronized void setupCamera(int width, int height) { |
||||
if (mCamera != null) { |
||||
Log.i(TAG, "Setup Camera - " + width + "x" + height); |
||||
Camera.Parameters params = mCamera.getParameters(); |
||||
List<Camera.Size> sizes = params.getSupportedPreviewSizes(); |
||||
mFrameWidth = width; |
||||
mFrameHeight = height; |
||||
|
||||
// selecting optimal camera preview size
|
||||
{ |
||||
int minDiff = Integer.MAX_VALUE; |
||||
for (Camera.Size size : sizes) { |
||||
if (Math.abs(size.height - height) < minDiff) { |
||||
mFrameWidth = size.width; |
||||
mFrameHeight = size.height; |
||||
minDiff = Math.abs(size.height - height); |
||||
} |
||||
} |
||||
} |
||||
|
||||
params.setPreviewSize(getFrameWidth(), getFrameHeight()); |
||||
|
||||
List<String> FocusModes = params.getSupportedFocusModes(); |
||||
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) |
||||
{ |
||||
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); |
||||
} |
||||
|
||||
mCamera.setParameters(params); |
||||
|
||||
/* Now allocate the buffer */ |
||||
params = mCamera.getParameters(); |
||||
int size = params.getPreviewSize().width * params.getPreviewSize().height; |
||||
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8; |
||||
mBuffer = new byte[size]; |
||||
/* The buffer where the current frame will be copied */ |
||||
mFrame = new byte [size]; |
||||
mCamera.addCallbackBuffer(mBuffer); |
||||
|
||||
/* Notify that the preview is about to be started and deliver preview size */ |
||||
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height); |
||||
|
||||
try { |
||||
setPreview(); |
||||
} catch (IOException e) { |
||||
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e); |
||||
} |
||||
|
||||
/* Now we can start a preview */ |
||||
mCamera.startPreview(); |
||||
} |
||||
} |
||||
|
||||
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { |
||||
Log.i(TAG, "called surfaceChanged"); |
||||
// stop preview before making changes
|
||||
try { |
||||
mCamera.stopPreview(); |
||||
} catch (Exception e){ |
||||
// ignore: tried to stop a non-existent preview
|
||||
} |
||||
|
||||
// start preview with new settings
|
||||
setupCamera(width, height); |
||||
} |
||||
|
||||
public void surfaceCreated(SurfaceHolder holder) { |
||||
Log.i(TAG, "called surfaceCreated"); |
||||
(new Thread(this)).start(); |
||||
} |
||||
|
||||
public void surfaceDestroyed(SurfaceHolder holder) { |
||||
Log.i(TAG, "called surfaceDestroyed"); |
||||
} |
||||
|
||||
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */ |
||||
protected abstract Bitmap processFrame(byte[] data); |
||||
|
||||
/** |
||||
* This method is called when the preview process is being started. It is called before the first frame delivered and processFrame is called |
||||
* It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing. |
||||
* @param previewWidth - the width of the preview frames that will be delivered via processFrame |
||||
* @param previewHeight - the height of the preview frames that will be delivered via processFrame |
||||
*/ |
||||
protected abstract void onPreviewStarted(int previewWidtd, int previewHeight); |
||||
|
||||
/** |
||||
* This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed. |
||||
* If the Bitmap object returned via processFrame is cached - it is a good time to recycle it. |
||||
* Any other resources used during the preview can be released. |
||||
*/ |
||||
protected abstract void onPreviewStopped(); |
||||
|
||||
public void run() { |
||||
mThreadRun = true; |
||||
Log.i(TAG, "Started processing thread"); |
||||
while (mThreadRun) { |
||||
Bitmap bmp = null; |
||||
|
||||
synchronized (this) { |
||||
try { |
||||
this.wait(); |
||||
if (!mThreadRun) |
||||
break; |
||||
bmp = processFrame(mFrame); |
||||
} catch (InterruptedException e) { |
||||
e.printStackTrace(); |
||||
} |
||||
} |
||||
|
||||
if (bmp != null) { |
||||
Canvas canvas = mHolder.lockCanvas(); |
||||
if (canvas != null) { |
||||
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR); |
||||
canvas.drawBitmap(bmp, (canvas.getWidth() - getFrameWidth()) / 2, (canvas.getHeight() - getFrameHeight()) / 2, null); |
||||
mHolder.unlockCanvasAndPost(canvas); |
||||
} |
||||
} |
||||
} |
||||
Log.i(TAG, "Finished processing thread"); |
||||
} |
||||
} |
@ -1,8 +1,8 @@ |
||||
<?xml version="1.0" encoding="UTF-8"?> |
||||
<classpath> |
||||
<classpathentry kind="src" path="src"/> |
||||
<classpathentry kind="src" path="gen"/> |
||||
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/> |
||||
<classpathentry kind="con" path="com.android.ide.eclipse.adt.LIBRARIES"/> |
||||
<classpathentry kind="src" path="src"/> |
||||
<classpathentry kind="src" path="gen"/> |
||||
<classpathentry kind="output" path="bin/classes"/> |
||||
</classpath> |
||||
|
@ -0,0 +1,11 @@ |
||||
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android" |
||||
xmlns:tools="http://schemas.android.com/tools" |
||||
android:layout_width="match_parent" |
||||
android:layout_height="match_parent" > |
||||
|
||||
<org.opencv.android.JavaCameraView |
||||
android:layout_width="fill_parent" |
||||
android:layout_height="fill_parent" |
||||
android:id="@+id/tutorial4_activity_surface_view" /> |
||||
|
||||
</LinearLayout> |
@ -1,116 +0,0 @@ |
||||
package org.opencv.samples.tutorial4; |
||||
|
||||
import org.opencv.android.Utils; |
||||
import org.opencv.core.CvType; |
||||
import org.opencv.core.Mat; |
||||
import org.opencv.imgproc.Imgproc; |
||||
|
||||
import android.content.Context; |
||||
import android.graphics.Bitmap; |
||||
import android.util.Log; |
||||
|
||||
class Sample4View extends SampleViewBase { |
||||
private static final String TAG = "OCVSample::View"; |
||||
|
||||
public static final int VIEW_MODE_RGBA = 0; |
||||
public static final int VIEW_MODE_GRAY = 1; |
||||
public static final int VIEW_MODE_CANNY = 2; |
||||
public static final int VIEW_MODE_FEATURES = 5; |
||||
|
||||
private Mat mYuv; |
||||
private Mat mRgba; |
||||
private Mat mGraySubmat; |
||||
private Mat mIntermediateMat; |
||||
private Bitmap mBitmap; |
||||
private int mViewMode; |
||||
|
||||
public Sample4View(Context context) { |
||||
super(context); |
||||
} |
||||
|
||||
@Override |
||||
protected void onPreviewStarted(int previewWidth, int previewHeight) { |
||||
Log.i(TAG, "called onPreviewStarted("+previewWidth+", "+previewHeight+")"); |
||||
|
||||
// initialize Mats before usage
|
||||
mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1); |
||||
mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth()); |
||||
|
||||
mRgba = new Mat(); |
||||
mIntermediateMat = new Mat(); |
||||
|
||||
mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888); |
||||
} |
||||
|
||||
@Override |
||||
protected void onPreviewStopped() { |
||||
Log.i(TAG, "called onPreviewStopped"); |
||||
|
||||
if (mBitmap != null) { |
||||
mBitmap.recycle(); |
||||
mBitmap = null; |
||||
} |
||||
|
||||
synchronized (this) { |
||||
// Explicitly deallocate Mats
|
||||
if (mYuv != null) |
||||
mYuv.release(); |
||||
if (mRgba != null) |
||||
mRgba.release(); |
||||
if (mGraySubmat != null) |
||||
mGraySubmat.release(); |
||||
if (mIntermediateMat != null) |
||||
mIntermediateMat.release(); |
||||
|
||||
mYuv = null; |
||||
mRgba = null; |
||||
mGraySubmat = null; |
||||
mIntermediateMat = null; |
||||
} |
||||
|
||||
} |
||||
|
||||
|
||||
@Override |
||||
protected Bitmap processFrame(byte[] data) { |
||||
mYuv.put(0, 0, data); |
||||
|
||||
final int viewMode = mViewMode; |
||||
|
||||
switch (viewMode) { |
||||
case VIEW_MODE_GRAY: |
||||
Imgproc.cvtColor(mGraySubmat, mRgba, Imgproc.COLOR_GRAY2RGBA, 4); |
||||
break; |
||||
case VIEW_MODE_RGBA: |
||||
Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420sp2RGB, 4); |
||||
break; |
||||
case VIEW_MODE_CANNY: |
||||
Imgproc.Canny(mGraySubmat, mIntermediateMat, 80, 100); |
||||
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4); |
||||
break; |
||||
case VIEW_MODE_FEATURES: |
||||
Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420sp2RGB, 4); |
||||
FindFeatures(mGraySubmat.getNativeObjAddr(), mRgba.getNativeObjAddr()); |
||||
break; |
||||
} |
||||
|
||||
Bitmap bmp = mBitmap; |
||||
|
||||
try { |
||||
Utils.matToBitmap(mRgba, bmp); |
||||
} catch(Exception e) { |
||||
Log.e("org.opencv.samples.puzzle15", "Utils.matToBitmap() throws an exception: " + e.getMessage()); |
||||
bmp.recycle(); |
||||
bmp = null; |
||||
} |
||||
|
||||
return bmp; |
||||
} |
||||
|
||||
public native void FindFeatures(long matAddrGr, long matAddrRgba); |
||||
|
||||
public void setViewMode(int viewMode) { |
||||
Log.i(TAG, "called setViewMode("+viewMode+")"); |
||||
mViewMode = viewMode; |
||||
} |
||||
} |
@ -1,229 +0,0 @@ |
||||
package org.opencv.samples.tutorial4; |
||||
|
||||
import java.io.IOException; |
||||
import java.util.List; |
||||
|
||||
import android.content.Context; |
||||
import android.graphics.Bitmap; |
||||
import android.graphics.Canvas; |
||||
import android.graphics.ImageFormat; |
||||
import android.graphics.SurfaceTexture; |
||||
import android.hardware.Camera; |
||||
import android.hardware.Camera.PreviewCallback; |
||||
import android.os.Build; |
||||
import android.util.Log; |
||||
import android.view.SurfaceHolder; |
||||
import android.view.SurfaceView; |
||||
|
||||
public abstract class SampleViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable { |
||||
private static final String TAG = "OCVSample::BaseView"; |
||||
|
||||
private Camera mCamera; |
||||
private SurfaceHolder mHolder; |
||||
private int mFrameWidth; |
||||
private int mFrameHeight; |
||||
private byte[] mFrame; |
||||
private volatile boolean mThreadRun; |
||||
private byte[] mBuffer; |
||||
private SurfaceTexture mSf; |
||||
|
||||
|
||||
public SampleViewBase(Context context) { |
||||
super(context); |
||||
mHolder = getHolder(); |
||||
mHolder.addCallback(this); |
||||
Log.i(TAG, "Instantiated new " + this.getClass()); |
||||
} |
||||
|
||||
public int getFrameWidth() { |
||||
return mFrameWidth; |
||||
} |
||||
|
||||
public int getFrameHeight() { |
||||
return mFrameHeight; |
||||
} |
||||
|
||||
public void setPreview() throws IOException { |
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { |
||||
mSf = new SurfaceTexture(10); |
||||
mCamera.setPreviewTexture( mSf ); |
||||
} |
||||
else |
||||
mCamera.setPreviewDisplay(null); |
||||
} |
||||
|
||||
public boolean openCamera() { |
||||
Log.i(TAG, "Opening Camera"); |
||||
mCamera = null; |
||||
|
||||
try { |
||||
mCamera = Camera.open(); |
||||
} |
||||
catch (Exception e){ |
||||
Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage()); |
||||
} |
||||
|
||||
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) { |
||||
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) { |
||||
try { |
||||
mCamera = Camera.open(camIdx); |
||||
} |
||||
catch (RuntimeException e) { |
||||
Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage()); |
||||
} |
||||
} |
||||
} |
||||
|
||||
if(mCamera == null) { |
||||
Log.e(TAG, "Can't open any camera"); |
||||
return false; |
||||
} |
||||
|
||||
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() { |
||||
public void onPreviewFrame(byte[] data, Camera camera) { |
||||
synchronized (SampleViewBase.this) { |
||||
System.arraycopy(data, 0, mFrame, 0, data.length); |
||||
SampleViewBase.this.notify(); |
||||
} |
||||
camera.addCallbackBuffer(mBuffer); |
||||
} |
||||
}); |
||||
|
||||
return true; |
||||
} |
||||
|
||||
public void releaseCamera() { |
||||
Log.i(TAG, "Releasing Camera"); |
||||
mThreadRun = false; |
||||
synchronized (this) { |
||||
if (mCamera != null) { |
||||
mCamera.stopPreview(); |
||||
mCamera.release(); |
||||
mCamera = null; |
||||
} |
||||
} |
||||
onPreviewStopped(); |
||||
} |
||||
|
||||
public synchronized void setupCamera(int width, int height) { |
||||
if (mCamera != null) { |
||||
Log.i(TAG, "Setup Camera - " + width + "x" + height); |
||||
Camera.Parameters params = mCamera.getParameters(); |
||||
List<Camera.Size> sizes = params.getSupportedPreviewSizes(); |
||||
mFrameWidth = width; |
||||
mFrameHeight = height; |
||||
|
||||
// selecting optimal camera preview size
|
||||
{ |
||||
int minDiff = Integer.MAX_VALUE; |
||||
for (Camera.Size size : sizes) { |
||||
if (Math.abs(size.height - height) < minDiff) { |
||||
mFrameWidth = size.width; |
||||
mFrameHeight = size.height; |
||||
minDiff = Math.abs(size.height - height); |
||||
} |
||||
} |
||||
} |
||||
|
||||
params.setPreviewSize(getFrameWidth(), getFrameHeight()); |
||||
|
||||
List<String> FocusModes = params.getSupportedFocusModes(); |
||||
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) |
||||
{ |
||||
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); |
||||
} |
||||
|
||||
mCamera.setParameters(params); |
||||
|
||||
/* Now allocate the buffer */ |
||||
params = mCamera.getParameters(); |
||||
int size = params.getPreviewSize().width * params.getPreviewSize().height; |
||||
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8; |
||||
mBuffer = new byte[size]; |
||||
/* The buffer where the current frame will be copied */ |
||||
mFrame = new byte [size]; |
||||
mCamera.addCallbackBuffer(mBuffer); |
||||
|
||||
/* Notify that the preview is about to be started and deliver preview size */ |
||||
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height); |
||||
|
||||
try { |
||||
setPreview(); |
||||
} catch (IOException e) { |
||||
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e); |
||||
} |
||||
|
||||
/* Now we can start a preview */ |
||||
mCamera.startPreview(); |
||||
} |
||||
} |
||||
|
||||
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { |
||||
Log.i(TAG, "called surfaceChanged"); |
||||
// stop preview before making changes
|
||||
try { |
||||
mCamera.stopPreview(); |
||||
} catch (Exception e){ |
||||
// ignore: tried to stop a non-existent preview
|
||||
} |
||||
|
||||
// start preview with new settings
|
||||
setupCamera(width, height); |
||||
} |
||||
|
||||
public void surfaceCreated(SurfaceHolder holder) { |
||||
Log.i(TAG, "called surfaceCreated"); |
||||
(new Thread(this)).start(); |
||||
} |
||||
|
||||
public void surfaceDestroyed(SurfaceHolder holder) { |
||||
Log.i(TAG, "called surfaceDestroyed"); |
||||
} |
||||
|
||||
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */ |
||||
protected abstract Bitmap processFrame(byte[] data); |
||||
|
||||
/** |
||||
* This method is called when the preview process is being started. It is called before the first frame delivered and processFrame is called |
||||
* It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing. |
||||
* @param previewWidth - the width of the preview frames that will be delivered via processFrame |
||||
* @param previewHeight - the height of the preview frames that will be delivered via processFrame |
||||
*/ |
||||
protected abstract void onPreviewStarted(int previewWidtd, int previewHeight); |
||||
|
||||
/** |
||||
* This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed. |
||||
* If the Bitmap object returned via processFrame is cached - it is a good time to recycle it. |
||||
* Any other resources used during the preview can be released. |
||||
*/ |
||||
protected abstract void onPreviewStopped(); |
||||
|
||||
public void run() { |
||||
mThreadRun = true; |
||||
Log.i(TAG, "Started processing thread"); |
||||
while (mThreadRun) { |
||||
Bitmap bmp = null; |
||||
|
||||
synchronized (this) { |
||||
try { |
||||
this.wait(); |
||||
if (!mThreadRun) |
||||
break; |
||||
bmp = processFrame(mFrame); |
||||
} catch (InterruptedException e) { |
||||
e.printStackTrace(); |
||||
} |
||||
} |
||||
|
||||
if (bmp != null) { |
||||
Canvas canvas = mHolder.lockCanvas(); |
||||
if (canvas != null) { |
||||
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR); |
||||
canvas.drawBitmap(bmp, (canvas.getWidth() - getFrameWidth()) / 2, (canvas.getHeight() - getFrameHeight()) / 2, null); |
||||
mHolder.unlockCanvasAndPost(canvas); |
||||
} |
||||
} |
||||
} |
||||
Log.i(TAG, "Finished processing thread"); |
||||
} |
||||
} |
Loading…
Reference in new issue