Merge pull request #188 from asmorkalov:app_framework

pull/190/merge
Andrey Kamaev 12 years ago committed by OpenCV Buildbot
commit 03f402892d
  1. 11
      modules/java/android_lib/res/values/attrs.xml
  2. 68
      modules/java/generator/src/java/android+CameraBridgeViewBase.java
  3. 66
      modules/java/generator/src/java/android+FpsMeter.java
  4. 52
      modules/java/generator/src/java/android+JavaCameraView.java
  5. 20
      modules/java/generator/src/java/android+NativeCameraView.java
  6. 9
      samples/android/tutorial-1-addopencv/res/layout/tutorial1_surface_view.xml
  7. 2
      samples/android/tutorial-2-opencvcamera/src/org/opencv/samples/tutorial2/Sample2NativeCamera.java

@ -0,0 +1,11 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<declare-styleable name = "CameraBridgeViewBase" >
<attr name="show_fps" format="boolean"/>
<attr name="camera_id" format="integer" >
<enum name="any" value="-1" />
<enum name="back" value="0" />
<enum name="front" value="1" />
</attr>
</declare-styleable>
</resources>

@ -2,6 +2,7 @@ package org.opencv.android;
import java.util.List;
import org.opencv.R;
import org.opencv.android.Utils;
import org.opencv.core.Mat;
import org.opencv.core.Size;
@ -11,6 +12,7 @@ import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.util.AttributeSet;
@ -26,22 +28,44 @@ import android.view.SurfaceView;
* The clients shall implement CvCameraViewListener.
*/
public abstract class CameraBridgeViewBase extends SurfaceView implements SurfaceHolder.Callback {
//TODO: add method to control the format in which the frames will be delivered to CvCameraViewListener
private static final String TAG = "CameraBridge";
private static final int MAX_UNSPECIFIED = -1;
private static final int STOPPED = 0;
private static final int STARTED = 1;
private int mState = STOPPED;
private Bitmap mCacheBitmap;
private CvCameraViewListener mListener;
private boolean mSurfaceExist;
private Object mSyncObject = new Object();
protected int mFrameWidth;
protected int mFrameHeight;
protected int mMaxHeight;
protected int mMaxWidth;
protected int mPreviewFormat = Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA;
protected int mCameraIndex = -1;
protected boolean mEnabled;
protected FpsMeter mFpsMeter = null;
private Bitmap mCacheBitmap;
public CameraBridgeViewBase(Context context, int cameraId) {
super(context);
mCameraIndex = cameraId;
}
public CameraBridgeViewBase(Context context, AttributeSet attrs) {
super(context, attrs);
int count = attrs.getAttributeCount();
Log.d(TAG, "Attr count: " + Integer.valueOf(count));
TypedArray styledAttrs = getContext().obtainStyledAttributes(attrs, R.styleable.CameraBridgeViewBase);
if (styledAttrs.getBoolean(R.styleable.CameraBridgeViewBase_show_fps, false))
enableFpsMeter();
mCameraIndex = styledAttrs.getInt(R.styleable.CameraBridgeViewBase_camera_id, -1);
getHolder().addCallback(this);
mMaxWidth = MAX_UNSPECIFIED;
mMaxHeight = MAX_UNSPECIFIED;
@ -71,19 +95,6 @@ public abstract class CameraBridgeViewBase extends SurfaceView implements Surfac
}
private static final int STOPPED = 0;
private static final int STARTED = 1;
private static final String TAG = "CameraBridge";
private CvCameraViewListener mListener;
private int mState = STOPPED;
private boolean mEnabled;
private boolean mSurfaceExist;
private Object mSyncObject = new Object();
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) {
Log.d(TAG, "call surfaceChanged event");
synchronized(mSyncObject) {
@ -135,6 +146,25 @@ public abstract class CameraBridgeViewBase extends SurfaceView implements Surfac
}
}
/**
* This method enables label with fps value on the screen
*/
public void enableFpsMeter() {
if (mFpsMeter == null) {
mFpsMeter = new FpsMeter();
mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
}
}
public void disableFpsMeter() {
mFpsMeter = null;
}
/**
*
* @param listener
*/
public void setCvCameraViewListener(CvCameraViewListener listener) {
mListener = listener;
}
@ -272,6 +302,10 @@ public abstract class CameraBridgeViewBase extends SurfaceView implements Surfac
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
canvas.drawBitmap(mCacheBitmap, (canvas.getWidth() - mCacheBitmap.getWidth()) / 2, (canvas.getHeight() - mCacheBitmap.getHeight()) / 2, null);
if (mFpsMeter != null) {
mFpsMeter.measure();
mFpsMeter.draw(canvas, 20, 30);
}
getHolder().unlockCanvasAndPost(canvas);
}
}

@ -0,0 +1,66 @@
package org.opencv.android;
import java.text.DecimalFormat;
import org.opencv.core.Core;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.util.Log;
public class FpsMeter {
private static final String TAG = "FpsMeter";
private static final int STEP = 20;
private static final DecimalFormat FPS_FORMAT = new DecimalFormat("0.00");
private int mFramesCouner;
private double mFrequency;
private long mprevFrameTime;
private String mStrfps;
Paint mPaint;
boolean mIsInitialized = false;
int mWidth = 0;
int mHeight = 0;
public void init() {
mFramesCouner = 0;
mFrequency = Core.getTickFrequency();
mprevFrameTime = Core.getTickCount();
mStrfps = "";
mPaint = new Paint();
mPaint.setColor(Color.BLUE);
mPaint.setTextSize(20);
}
public void measure() {
if (!mIsInitialized) {
init();
mIsInitialized = true;
} else {
mFramesCouner++;
if (mFramesCouner % STEP == 0) {
long time = Core.getTickCount();
double fps = STEP * mFrequency / (time - mprevFrameTime);
mprevFrameTime = time;
if (mWidth != 0 && mHeight != 0)
mStrfps = FPS_FORMAT.format(fps) + " FPS@" + Integer.valueOf(mWidth) + "x" + Integer.valueOf(mHeight);
else
mStrfps = FPS_FORMAT.format(fps) + " FPS";
Log.i(TAG, mStrfps);
}
}
}
public void setResolution(int width, int height) {
mWidth = width;
mHeight = height;
}
public void draw(Canvas canvas, float offsetx, float offsety) {
Log.d(TAG, mStrfps);
canvas.drawText(mStrfps, offsetx, offsety, mPaint);
}
}

@ -40,6 +40,8 @@ public class JavaCameraView extends CameraBridgeViewBase implements PreviewCallb
private Thread mThread;
private boolean mStopThread;
protected Camera mCamera;
private SurfaceTexture mSurfaceTexture;
public static class JavaCameraSizeAccessor implements ListItemAccessor {
@ -55,7 +57,9 @@ public class JavaCameraView extends CameraBridgeViewBase implements PreviewCallb
}
}
private Camera mCamera;
public JavaCameraView(Context context, int cameraId) {
super(context, cameraId);
}
public JavaCameraView(Context context, AttributeSet attrs) {
super(context, attrs);
@ -69,25 +73,36 @@ public class JavaCameraView extends CameraBridgeViewBase implements PreviewCallb
synchronized (this) {
mCamera = null;
Log.d(TAG, "Trying to open camera with old open()");
try {
mCamera = Camera.open();
}
catch (Exception e){
Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
}
if (mCameraIndex == -1) {
Log.d(TAG, "Trying to open camera with old open()");
try {
mCamera = Camera.open();
}
catch (Exception e){
Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
}
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
boolean connected = false;
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(camIdx) + ")");
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
boolean connected = false;
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(camIdx) + ")");
try {
mCamera = Camera.open(camIdx);
connected = true;
} catch (RuntimeException e) {
Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
}
if (connected) break;
}
}
} else {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(mCameraIndex) + ")");
try {
mCamera = Camera.open(camIdx);
connected = true;
mCamera = Camera.open(mCameraIndex);
} catch (RuntimeException e) {
Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
Log.e(TAG, "Camera #" + mCameraIndex + "failed to open: " + e.getLocalizedMessage());
}
if (connected) break;
}
}
@ -120,6 +135,10 @@ public class JavaCameraView extends CameraBridgeViewBase implements PreviewCallb
mFrameWidth = params.getPreviewSize().width;
mFrameHeight = params.getPreviewSize().height;
if (mFpsMeter != null) {
mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
}
int size = mFrameWidth * mFrameHeight;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
@ -217,6 +236,7 @@ public class JavaCameraView extends CameraBridgeViewBase implements PreviewCallb
releaseCamera();
}
@TargetApi(Build.VERSION_CODES.FROYO)
public void onPreviewFrame(byte[] frame, Camera arg1) {
Log.i(TAG, "Preview Frame received. Need to create MAT and deliver it to clients");
Log.i(TAG, "Frame size is " + frame.length);

@ -19,7 +19,12 @@ public class NativeCameraView extends CameraBridgeViewBase {
public static final String TAG = "NativeCameraView";
private boolean mStopThread;
private Thread mThread;
private VideoCapture mCamera;
protected VideoCapture mCamera;
public NativeCameraView(Context context, int cameraId) {
super(context, cameraId);
}
public NativeCameraView(Context context, AttributeSet attrs) {
super(context, attrs);
@ -77,12 +82,17 @@ public class NativeCameraView extends CameraBridgeViewBase {
private boolean initializeCamera(int width, int height) {
synchronized (this) {
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (mCameraIndex == -1)
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
else
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID + mCameraIndex);
if (mCamera == null)
return false;
//TODO: improve error handling
if (mCamera.isOpened() == false)
return false;
java.util.List<Size> sizes = mCamera.getSupportedPreviewSizes();
@ -92,6 +102,10 @@ public class NativeCameraView extends CameraBridgeViewBase {
mFrameWidth = (int)frameSize.width;
mFrameHeight = (int)frameSize.height;
if (mFpsMeter != null) {
mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
}
AllocateCache();
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, frameSize.width);

@ -1,5 +1,6 @@
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
xmlns:opencv="http://schemas.android.com/apk/res-auto"
android:layout_width="match_parent"
android:layout_height="match_parent" >
@ -7,12 +8,16 @@
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:visibility="gone"
android:id="@+id/tutorial1_activity_java_surface_view" />
android:id="@+id/tutorial1_activity_java_surface_view"
opencv:show_fps="true"
opencv:camera_id="any" />
<org.opencv.android.NativeCameraView
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:visibility="gone"
android:id="@+id/tutorial1_activity_native_surface_view" />
android:id="@+id/tutorial1_activity_native_surface_view"
opencv:show_fps="true"
opencv:camera_id="any" />
</LinearLayout>

@ -110,7 +110,7 @@ public class Sample2NativeCamera extends Activity implements CvCameraViewListene
case Sample2NativeCamera.VIEW_MODE_RGBA:
{
inputFrame.copyTo(mRgba);
Core.putText(mRgba, "OpenCV+Android", new Point(10, 50), 3, 1, new Scalar(255, 0, 0, 255), 2);
Core.putText(mRgba, "OpenCV+Android", new Point(10, inputFrame.rows() - 10), 3, 1, new Scalar(255, 0, 0, 255), 2);
} break;
case Sample2NativeCamera.VIEW_MODE_CANNY:
{

Loading…
Cancel
Save