Merged Android samples from trunk r8423 r8533

pull/13383/head
Andrey Kamaev 13 years ago
parent 0bd287a8f2
commit 50c2443691
  1. 2
      samples/android/color-blob-detection/src/org/opencv/samples/colorblobdetect/ColorBlobDetectionActivity.java
  2. 159
      samples/android/color-blob-detection/src/org/opencv/samples/colorblobdetect/ColorBlobDetectionView.java
  3. 119
      samples/android/color-blob-detection/src/org/opencv/samples/colorblobdetect/ColorBlobDetector.java
  4. 6
      samples/android/face-detection/jni/Android.mk
  5. 61
      samples/android/face-detection/jni/DetectionBaseTracker.h
  6. 61
      samples/android/face-detection/jni/DetectionBasedTracker_jni.cpp
  7. 61
      samples/android/face-detection/jni/DetectionBasedTracker_jni.h
  8. 52
      samples/android/face-detection/src/org/opencv/samples/fd/DetectionBaseTracker.java
  9. 52
      samples/android/face-detection/src/org/opencv/samples/fd/DetectionBasedTracker.java
  10. 9
      samples/android/face-detection/src/org/opencv/samples/fd/FdActivity.java
  11. 95
      samples/android/face-detection/src/org/opencv/samples/fd/FdView.java

@ -9,7 +9,7 @@ import android.view.Window;
public class ColorBlobDetectionActivity extends Activity { public class ColorBlobDetectionActivity extends Activity {
private static final String TAG = "Example/CollorBlobDetection"; private static final String TAG = "Example/ColorBlobDetection";
private ColorBlobDetectionView mView; private ColorBlobDetectionView mView;
public ColorBlobDetectionActivity() public ColorBlobDetectionActivity()

@ -1,17 +1,14 @@
package org.opencv.samples.colorblobdetect; package org.opencv.samples.colorblobdetect;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List; import java.util.List;
import org.opencv.android.Utils; import org.opencv.android.Utils;
import org.opencv.core.Core; import org.opencv.core.Core;
import org.opencv.core.CvType; import org.opencv.core.CvType;
import org.opencv.core.Mat; import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint; import org.opencv.core.MatOfPoint;
import org.opencv.core.Point;
import org.opencv.core.Rect; import org.opencv.core.Rect;
import org.opencv.core.Scalar; import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.highgui.Highgui; import org.opencv.highgui.Highgui;
import org.opencv.highgui.VideoCapture; import org.opencv.highgui.VideoCapture;
import org.opencv.imgproc.Imgproc; import org.opencv.imgproc.Imgproc;
@ -29,24 +26,16 @@ public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchL
private Mat mRgba; private Mat mRgba;
private boolean mIsColorSelected = false; private boolean mIsColorSelected = false;
private Scalar mSelectedColorRgba = new Scalar(255); private Scalar mBlobColorRgba = new Scalar(255);
private Scalar mSelectedColorHsv = new Scalar(255); private Scalar mBlobColorHsv = new Scalar(255);
private ColorBlobDetector mDetector = new ColorBlobDetector();
// Lower and Upper bounds for range checking in HSV color space
private Scalar mLowerBound = new Scalar(0);
private Scalar mUpperBound = new Scalar(0);
private Mat mSpectrum = new Mat(); private Mat mSpectrum = new Mat();
private int mSpectrumScale = 4; private static Size SPECTRUM_SIZE = new Size(200, 32);
// Color radius for range checking in HSV color space
private static final Scalar COLOR_RADIUS = new Scalar(25,50,50,0);
// Minimum contour area in percent for contours filtering
private static final double MIN_CONTOUR_AREA = 0.1;
// Logcat tag // Logcat tag
private static final String TAG = "Example/CollorBlobDetection"; private static final String TAG = "Example/ColorBlobDetection";
private static final Scalar CONTOUR_COLOR = new Scalar(255,0,0,255);
public ColorBlobDetectionView(Context context) public ColorBlobDetectionView(Context context)
@ -85,63 +74,30 @@ public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchL
touchedRect.x = (x>4) ? x-4 : 0; touchedRect.x = (x>4) ? x-4 : 0;
touchedRect.y = (y>4) ? y-4 : 0; touchedRect.y = (y>4) ? y-4 : 0;
touchedRect.width = (x+4<mRgba.cols()) ? x + 4 - touchedRect.x : mRgba.width() - touchedRect.x; touchedRect.width = (x+4 < cols) ? x + 4 - touchedRect.x : cols - touchedRect.x;
touchedRect.height = (y+4 < mRgba.rows()) ? y + 4 - touchedRect.y : mRgba.rows() - touchedRect.y; touchedRect.height = (y+4 < rows) ? y + 4 - touchedRect.y : rows - touchedRect.y;
Mat touchedRegionMatRgba = mRgba.submat(touchedRect); Mat touchedRegionRgba = mRgba.submat(touchedRect);
Mat touchedRegionMatHsv = new Mat();
Imgproc.cvtColor(touchedRegionMatRgba, touchedRegionMatHsv, Imgproc.COLOR_RGB2HSV_FULL); Mat touchedRegionHsv = new Mat();
Imgproc.cvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL);
mSelectedColorHsv = Core.sumElems(touchedRegionMatHsv); // Calculate average color of touched region
mBlobColorHsv = Core.sumElems(touchedRegionHsv);
int pointCount = touchedRect.width*touchedRect.height; int pointCount = touchedRect.width*touchedRect.height;
for (int i = 0; i < mSelectedColorHsv.val.length; i++) for (int i = 0; i < mBlobColorHsv.val.length; i++)
{ {
mSelectedColorHsv.val[i] /= pointCount; mBlobColorHsv.val[i] /= pointCount;
} }
Mat pointMapRgba = new Mat(); mBlobColorRgba = converScalarHsv2Rgba(mBlobColorHsv);
Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3);
byte[] buf = {(byte)mSelectedColorHsv.val[0], (byte)mSelectedColorHsv.val[1], (byte)mSelectedColorHsv.val[2]};
pointMatHsv.put(0, 0, buf); Log.i(TAG, "Touched rgba color: (" + mBlobColorRgba.val[0] + ", " + mBlobColorRgba.val[1] +
Imgproc.cvtColor(pointMatHsv, pointMapRgba, Imgproc.COLOR_HSV2RGB_FULL, 4); ", " + mBlobColorRgba.val[2] + ", " + mBlobColorRgba.val[3] + ")");
mSelectedColorRgba.val = pointMapRgba.get(0, 0);
Log.i(TAG, "Touched rgba color: (" + mSelectedColorRgba.val[0] + ", " + mSelectedColorRgba.val[1] +
", " + mSelectedColorRgba.val[2] + ", " + mSelectedColorRgba.val[3] + ")");
double minH = (mSelectedColorHsv.val[0] >= COLOR_RADIUS.val[0]) ? mSelectedColorHsv.val[0]-COLOR_RADIUS.val[0] : 0;
double maxH = (mSelectedColorHsv.val[0]+COLOR_RADIUS.val[0] <= 255) ? mSelectedColorHsv.val[0]+COLOR_RADIUS.val[0] : 255;
mLowerBound.val[0] = minH;
mUpperBound.val[0] = maxH;
mLowerBound.val[1] = mSelectedColorHsv.val[1] - COLOR_RADIUS.val[1]; mDetector.setHsvColor(mBlobColorHsv);
mUpperBound.val[1] = mSelectedColorHsv.val[1] + COLOR_RADIUS.val[1];
mLowerBound.val[2] = mSelectedColorHsv.val[2] - COLOR_RADIUS.val[2]; Imgproc.resize(mDetector.getSpectrum(), mSpectrum, SPECTRUM_SIZE);
mUpperBound.val[2] = mSelectedColorHsv.val[2] + COLOR_RADIUS.val[2];
Log.d(TAG, "Bounds: " + mLowerBound + "x" + mUpperBound);
Mat spectrumHsv = new Mat(32, (int)(maxH-minH)*mSpectrumScale, CvType.CV_8UC3);
for (int i = 0; i < 32; i++)
{
for (int k = 0; k < mSpectrumScale; k++)
{
for (int j = 0; j < maxH-minH; j++)
{
byte[] tmp = {(byte)(minH+j), (byte)255, (byte)255};
spectrumHsv.put(i, j*mSpectrumScale + k, tmp);
}
}
}
Imgproc.cvtColor(spectrumHsv, mSpectrum, Imgproc.COLOR_HSV2RGB_FULL, 4);
mIsColorSelected = true; mIsColorSelected = true;
@ -155,61 +111,17 @@ public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchL
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888); Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);
if (mIsColorSelected) if (mIsColorSelected)
{ {
Mat PyrDownMat = new Mat(); mDetector.process(mRgba);
List<MatOfPoint> contours = mDetector.getContours();
Imgproc.pyrDown(mRgba, PyrDownMat); Log.e(TAG, "Contours count: " + contours.size());
Imgproc.pyrDown(PyrDownMat, PyrDownMat); Imgproc.drawContours(mRgba, contours, -1, CONTOUR_COLOR);
Mat hsvMat = new Mat();
Imgproc.cvtColor(PyrDownMat, hsvMat, Imgproc.COLOR_RGB2HSV_FULL);
Mat rangedHsvMat = new Mat();
Core.inRange(hsvMat, mLowerBound, mUpperBound, rangedHsvMat);
Mat dilatedMat = new Mat();
Imgproc.dilate(rangedHsvMat, dilatedMat, new Mat());
List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
Mat hierarchy = new Mat();
Imgproc.findContours(dilatedMat, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
// Find max contour area Mat colorLabel = mRgba.submat(2, 34, 2, 34);
double maxArea = 0; colorLabel.setTo(mBlobColorRgba);
Iterator<MatOfPoint> it = contours.iterator();
while (it.hasNext())
{
MatOfPoint wrapper = it.next();
double area = Imgproc.contourArea(wrapper);
if (area > maxArea)
maxArea = area;
}
// Filter contours by area and resize to fit the original image size Mat spectrumLabel = mRgba.submat(2, 2 + mSpectrum.rows(), 38, 38 + mSpectrum.cols());
List<MatOfPoint> filteredContours = new ArrayList<MatOfPoint>(); mSpectrum.copyTo(spectrumLabel);
it = contours.iterator();
while (it.hasNext())
{
MatOfPoint wrapper = it.next();
if (Imgproc.contourArea(wrapper) > MIN_CONTOUR_AREA*maxArea);
Point[] contour = wrapper.toArray();
for (int i = 0; i < contour.length; i++)
{
// Original image was pyrDown twice
contour[i].x *= 4;
contour[i].y *= 4;
}
filteredContours.add(new MatOfPoint(contour));
}
Imgproc.drawContours(mRgba, filteredContours, -1, new Scalar(255,0,0,255));
Mat testColorMat = mRgba.submat(2, 34, 2, 34);
testColorMat.setTo(mSelectedColorRgba);
Mat testSpectrumMat = mRgba.submat(2, 34, 38, 38 + mSpectrum.cols());
mSpectrum.copyTo(testSpectrumMat);
} }
try { try {
@ -223,6 +135,15 @@ public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchL
return bmp; return bmp;
} }
private Scalar converScalarHsv2Rgba(Scalar hsvColor)
{
Mat pointMatRgba = new Mat();
Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);
return new Scalar(pointMatRgba.get(0, 0));
}
@Override @Override
public void run() { public void run() {
super.run(); super.run();

@ -0,0 +1,119 @@
package org.opencv.samples.colorblobdetect;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;
public class ColorBlobDetector
{
public void setColorRadius(Scalar radius)
{
mColorRadius = radius;
}
public void setHsvColor(Scalar hsvColor)
{
double minH = (hsvColor.val[0] >= mColorRadius.val[0]) ? hsvColor.val[0]-mColorRadius.val[0] : 0;
double maxH = (hsvColor.val[0]+mColorRadius.val[0] <= 255) ? hsvColor.val[0]+mColorRadius.val[0] : 255;
mLowerBound.val[0] = minH;
mUpperBound.val[0] = maxH;
mLowerBound.val[1] = hsvColor.val[1] - mColorRadius.val[1];
mUpperBound.val[1] = hsvColor.val[1] + mColorRadius.val[1];
mLowerBound.val[2] = hsvColor.val[2] - mColorRadius.val[2];
mUpperBound.val[2] = hsvColor.val[2] + mColorRadius.val[2];
mLowerBound.val[3] = 0;
mUpperBound.val[3] = 255;
Mat spectrumHsv = new Mat(1, (int)(maxH-minH), CvType.CV_8UC3);
for (int j = 0; j < maxH-minH; j++)
{
byte[] tmp = {(byte)(minH+j), (byte)255, (byte)255};
spectrumHsv.put(0, j, tmp);
}
Imgproc.cvtColor(spectrumHsv, mSpectrum, Imgproc.COLOR_HSV2RGB_FULL, 4);
}
public Mat getSpectrum()
{
return mSpectrum;
}
public void setMinContourArea(double area)
{
mMinContourArea = area;
}
public void process(Mat rgbaImage)
{
Mat pyrDownMat = new Mat();
Imgproc.pyrDown(rgbaImage, pyrDownMat);
Imgproc.pyrDown(pyrDownMat, pyrDownMat);
Mat hsvMat = new Mat();
Imgproc.cvtColor(pyrDownMat, hsvMat, Imgproc.COLOR_RGB2HSV_FULL);
Mat Mask = new Mat();
Core.inRange(hsvMat, mLowerBound, mUpperBound, Mask);
Mat dilatedMask = new Mat();
Imgproc.dilate(Mask, dilatedMask, new Mat());
List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
Mat hierarchy = new Mat();
Imgproc.findContours(dilatedMask, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);
// Find max contour area
double maxArea = 0;
Iterator<MatOfPoint> each = contours.iterator();
while (each.hasNext())
{
MatOfPoint wrapper = each.next();
double area = Imgproc.contourArea(wrapper);
if (area > maxArea)
maxArea = area;
}
// Filter contours by area and resize to fit the original image size
mContours.clear();
each = contours.iterator();
while (each.hasNext())
{
MatOfPoint contour = each.next();
if (Imgproc.contourArea(contour) > mMinContourArea*maxArea)
{
Core.multiply(contour, new Scalar(4,4), contour);
mContours.add(contour);
}
}
}
public List<MatOfPoint> getContours()
{
return mContours;
}
// Lower and Upper bounds for range checking in HSV color space
private Scalar mLowerBound = new Scalar(0);
private Scalar mUpperBound = new Scalar(0);
// Minimum contour area in percent for contours filtering
private static double mMinContourArea = 0.1;
// Color radius for range checking in HSV color space
private Scalar mColorRadius = new Scalar(25,50,50,0);
private Mat mSpectrum = new Mat();
private List<MatOfPoint> mContours = new ArrayList<MatOfPoint>();;
}

@ -12,10 +12,10 @@ else
include $(OPENCV_MK_PATH) include $(OPENCV_MK_PATH)
endif endif
LOCAL_SRC_FILES := DetectionBaseTracker.cpp LOCAL_SRC_FILES := DetectionBasedTracker_jni.cpp
LOCAL_C_INCLUDES := $(LOCAL_PATH) LOCAL_C_INCLUDES := $(LOCAL_PATH)
LOCAL_LDLIBS += -llog -ldl LOCAL_LDLIBS += -llog -ldl
LOCAL_MODULE := detection_base_tacker LOCAL_MODULE := detection_based_tacker
include $(BUILD_SHARED_LIBRARY) include $(BUILD_SHARED_LIBRARY)

@ -1,61 +0,0 @@
/* DO NOT EDIT THIS FILE - it is machine generated */
#include <jni.h>
/* Header for class org_opencv_samples_fd_DetectionBaseTracker */
#ifndef _Included_org_opencv_samples_fd_DetectionBaseTracker
#define _Included_org_opencv_samples_fd_DetectionBaseTracker
#ifdef __cplusplus
extern "C" {
#endif
/*
* Class: org_opencv_samples_fd_DetectionBaseTracker
* Method: nativeCreateObject
* Signature: (Ljava/lang/String;F)J
*/
JNIEXPORT jlong JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeCreateObject
(JNIEnv *, jclass, jstring, jint);
/*
* Class: org_opencv_samples_fd_DetectionBaseTracker
* Method: nativeDestroyObject
* Signature: (J)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeDestroyObject
(JNIEnv *, jclass, jlong);
/*
* Class: org_opencv_samples_fd_DetectionBaseTracker
* Method: nativeStart
* Signature: (J)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeStart
(JNIEnv *, jclass, jlong);
/*
* Class: org_opencv_samples_fd_DetectionBaseTracker
* Method: nativeStop
* Signature: (J)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeStop
(JNIEnv *, jclass, jlong);
/*
* Class: org_opencv_samples_fd_DetectionBaseTracker
* Method: nativeSetFaceSize
* Signature: (JI)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeSetFaceSize
(JNIEnv *, jclass, jlong, jint);
/*
* Class: org_opencv_samples_fd_DetectionBaseTracker
* Method: nativeDetect
* Signature: (JJJ)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeDetect
(JNIEnv *, jclass, jlong, jlong, jlong);
#ifdef __cplusplus
}
#endif
#endif

@ -1,5 +1,5 @@
#include <DetectionBaseTracker.h> #include <DetectionBasedTracker_jni.h>
#include <opencv2/core/core.hpp> #include <opencv2/core/core.hpp>
#include <opencv2/contrib/detection_based_tracker.hpp> #include <opencv2/contrib/detection_based_tracker.hpp>
#include <string> #include <string>
@ -13,14 +13,12 @@
using namespace std; using namespace std;
using namespace cv; using namespace cv;
vector<Rect> RectFaces;
inline void vector_Rect_to_Mat(vector<Rect>& v_rect, Mat& mat) inline void vector_Rect_to_Mat(vector<Rect>& v_rect, Mat& mat)
{ {
mat = Mat(v_rect, true); mat = Mat(v_rect, true);
} }
JNIEXPORT jlong JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeCreateObject JNIEXPORT jlong JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeCreateObject
(JNIEnv * jenv, jclass jobj, jstring jFileName, jint faceSize) (JNIEnv * jenv, jclass jobj, jstring jFileName, jint faceSize)
{ {
const char* jnamestr = jenv->GetStringUTFChars(jFileName, NULL); const char* jnamestr = jenv->GetStringUTFChars(jFileName, NULL);
@ -42,11 +40,18 @@ JNIEXPORT jlong JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeCr
je = jenv->FindClass("java/lang/Exception"); je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what()); jenv->ThrowNew(je, e.what());
} }
catch (...)
{
LOGD("nativeCreateObject catched unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
return 0;
}
return result; return result;
} }
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeDestroyObject JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDestroyObject
(JNIEnv * jenv, jclass jobj, jlong thiz) (JNIEnv * jenv, jclass jobj, jlong thiz)
{ {
try try
@ -62,9 +67,15 @@ JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeDes
je = jenv->FindClass("java/lang/Exception"); je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what()); jenv->ThrowNew(je, e.what());
} }
catch (...)
{
LOGD("nativeDestroyObject catched unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
}
} }
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeStart JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStart
(JNIEnv * jenv, jclass jobj, jlong thiz) (JNIEnv * jenv, jclass jobj, jlong thiz)
{ {
try try
@ -79,10 +90,15 @@ JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeSta
je = jenv->FindClass("java/lang/Exception"); je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what()); jenv->ThrowNew(je, e.what());
} }
catch (...)
{
LOGD("nativeStart catched unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
}
} }
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeStop JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStop
(JNIEnv * jenv, jclass jobj, jlong thiz) (JNIEnv * jenv, jclass jobj, jlong thiz)
{ {
try try
@ -97,9 +113,15 @@ JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeSto
je = jenv->FindClass("java/lang/Exception"); je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what()); jenv->ThrowNew(je, e.what());
} }
catch (...)
{
LOGD("nativeStop catched unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
}
} }
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeSetFaceSize JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSetFaceSize
(JNIEnv * jenv, jclass jobj, jlong thiz, jint faceSize) (JNIEnv * jenv, jclass jobj, jlong thiz, jint faceSize)
{ {
try try
@ -120,15 +142,22 @@ JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeSet
if(!je) if(!je)
je = jenv->FindClass("java/lang/Exception"); je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what()); jenv->ThrowNew(je, e.what());
} }
catch (...)
{
LOGD("nativeSetFaceSize catched unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
}
} }
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeDetect JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDetect
(JNIEnv * jenv, jclass jobj, jlong thiz, jlong imageGray, jlong faces) (JNIEnv * jenv, jclass jobj, jlong thiz, jlong imageGray, jlong faces)
{ {
try try
{ {
vector<Rect> RectFaces;
((DetectionBasedTracker*)thiz)->process(*((Mat*)imageGray)); ((DetectionBasedTracker*)thiz)->process(*((Mat*)imageGray));
((DetectionBasedTracker*)thiz)->getObjects(RectFaces); ((DetectionBasedTracker*)thiz)->getObjects(RectFaces);
vector_Rect_to_Mat(RectFaces, *((Mat*)faces)); vector_Rect_to_Mat(RectFaces, *((Mat*)faces));
@ -137,8 +166,14 @@ JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBaseTracker_nativeDet
{ {
LOGD("nativeCreateObject catched cv::Exception: %s", e.what()); LOGD("nativeCreateObject catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException"); jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je) if(!je)
je = jenv->FindClass("java/lang/Exception"); je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what()); jenv->ThrowNew(je, e.what());
} }
catch (...)
{
LOGD("nativeDetect catched unknown exception");
jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
}
} }

@ -0,0 +1,61 @@
/* DO NOT EDIT THIS FILE - it is machine generated */
#include <jni.h>
/* Header for class org_opencv_samples_fd_DetectionBasedTracker */
#ifndef _Included_org_opencv_samples_fd_DetectionBasedTracker
#define _Included_org_opencv_samples_fd_DetectionBasedTracker
#ifdef __cplusplus
extern "C" {
#endif
/*
* Class: org_opencv_samples_fd_DetectionBasedTracker
* Method: nativeCreateObject
* Signature: (Ljava/lang/String;F)J
*/
JNIEXPORT jlong JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeCreateObject
(JNIEnv *, jclass, jstring, jint);
/*
* Class: org_opencv_samples_fd_DetectionBasedTracker
* Method: nativeDestroyObject
* Signature: (J)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDestroyObject
(JNIEnv *, jclass, jlong);
/*
* Class: org_opencv_samples_fd_DetectionBasedTracker
* Method: nativeStart
* Signature: (J)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStart
(JNIEnv *, jclass, jlong);
/*
* Class: org_opencv_samples_fd_DetectionBasedTracker
* Method: nativeStop
* Signature: (J)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStop
(JNIEnv *, jclass, jlong);
/*
* Class: org_opencv_samples_fd_DetectionBasedTracker
* Method: nativeSetFaceSize
* Signature: (JI)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSetFaceSize
(JNIEnv *, jclass, jlong, jint);
/*
* Class: org_opencv_samples_fd_DetectionBasedTracker
* Method: nativeDetect
* Signature: (JJJ)V
*/
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDetect
(JNIEnv *, jclass, jlong, jlong, jlong);
#ifdef __cplusplus
}
#endif
#endif

@ -1,52 +0,0 @@
package org.opencv.samples.fd;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
public class DetectionBaseTracker
{
public DetectionBaseTracker(String filename, int faceSize)
{
mNativeObj = nativeCreateObject(filename, faceSize);
}
public void start()
{
nativeStart(mNativeObj);
}
public void stop()
{
nativeStop(mNativeObj);
}
public void setMinFaceSize(int faceSize)
{
nativeSetFaceSize(mNativeObj, faceSize);
}
public void detect(Mat imageGray, MatOfRect faces)
{
nativeDetect(mNativeObj, imageGray.getNativeObjAddr(), faces.getNativeObjAddr());
}
public void release()
{
nativeDestroyObject(mNativeObj);
mNativeObj = 0;
}
protected long mNativeObj = 0;
protected static native long nativeCreateObject(String filename, int faceSize);
protected static native void nativeDestroyObject(long thiz);
protected static native void nativeStart(long thiz);
protected static native void nativeStop(long thiz);
protected static native void nativeSetFaceSize(long thiz, int faceSize);
protected static native void nativeDetect(long thiz, long inputImage, long resultMat);
static
{
System.loadLibrary("detection_base_tacker");
}
}

@ -0,0 +1,52 @@
package org.opencv.samples.fd;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
public class DetectionBasedTracker
{
public DetectionBasedTracker(String cascadeName, int minFaceSize)
{
mNativeObj = nativeCreateObject(cascadeName, minFaceSize);
}
public void start()
{
nativeStart(mNativeObj);
}
public void stop()
{
nativeStop(mNativeObj);
}
public void setMinFaceSize(int size)
{
nativeSetFaceSize(mNativeObj, size);
}
public void detect(Mat imageGray, MatOfRect faces)
{
nativeDetect(mNativeObj, imageGray.getNativeObjAddr(), faces.getNativeObjAddr());
}
public void release()
{
nativeDestroyObject(mNativeObj);
mNativeObj = 0;
}
private long mNativeObj = 0;
private static native long nativeCreateObject(String cascadeName, int minFaceSize);
private static native void nativeDestroyObject(long thiz);
private static native void nativeStart(long thiz);
private static native void nativeStop(long thiz);
private static native void nativeSetFaceSize(long thiz, int size);
private static native void nativeDetect(long thiz, long inputImage, long faces);
static
{
System.loadLibrary("detection_based_tacker");
}
}

@ -26,8 +26,8 @@ public class FdActivity extends Activity {
public FdActivity() { public FdActivity() {
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
mDetectorName = new String[2]; mDetectorName = new String[2];
mDetectorName[0] = "Cascade"; mDetectorName[FdView.JAVA_DETECTOR] = "Java";
mDetectorName[1] = "DBT"; mDetectorName[FdView.NATIVE_DETECTOR] = "Native (tracking)";
} }
@Override @Override
@ -62,7 +62,8 @@ public class FdActivity extends Activity {
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE); requestWindowFeature(Window.FEATURE_NO_TITLE);
mView = new FdView(this); mView = new FdView(this);
mView.setDtetectorType(mDetectorType); mView.setDetectorType(mDetectorType);
mView.setMinFaceSize(0.2f);
setContentView(mView); setContentView(mView);
} }
@ -93,7 +94,7 @@ public class FdActivity extends Activity {
{ {
mDetectorType = (mDetectorType + 1) % mDetectorName.length; mDetectorType = (mDetectorType + 1) % mDetectorName.length;
item.setTitle(mDetectorName[mDetectorType]); item.setTitle(mDetectorName[mDetectorType]);
mView.setDtetectorType(mDetectorType); mView.setDetectorType(mDetectorType);
} }
return true; return true;
} }

@ -22,45 +22,44 @@ import android.util.Log;
import android.view.SurfaceHolder; import android.view.SurfaceHolder;
class FdView extends SampleCvViewBase { class FdView extends SampleCvViewBase {
private static final String TAG = "Sample::FdView"; private static final String TAG = "Sample::FdView";
private Mat mRgba; private Mat mRgba;
private Mat mGray; private Mat mGray;
private File mCascadeFile; private File mCascadeFile;
private CascadeClassifier mCascade; private CascadeClassifier mJavaDetector;
private DetectionBaseTracker mTracker; private DetectionBasedTracker mNativeDetector;
public final int CASCADE_DETECTOR = 0; private static final Scalar FACE_RECT_COLOR = new Scalar(0, 255, 0, 255);
public final int DBT_DETECTOR = 1;
private int mDetectorType = CASCADE_DETECTOR; public static final int JAVA_DETECTOR = 0;
public static final int NATIVE_DETECTOR = 1;
private int mDetectorType = JAVA_DETECTOR;
public static int mFaceSize = 200; private float mRelativeFaceSize = 0;
private int mAbsoluteFaceSize = 0;
public void setMinFaceSize(float faceSize) public void setMinFaceSize(float faceSize)
{ {
int height = mGray.rows(); mRelativeFaceSize = faceSize;
if (Math.round(height * faceSize) > 0); mAbsoluteFaceSize = 0;
{
mFaceSize = Math.round(height * faceSize);
}
mTracker.setMinFaceSize(mFaceSize);
} }
public void setDtetectorType(int type) public void setDetectorType(int type)
{ {
if (mDetectorType != type) if (mDetectorType != type)
{ {
mDetectorType = type; mDetectorType = type;
if (type == DBT_DETECTOR) if (type == NATIVE_DETECTOR)
{ {
Log.i(TAG, "Detection Base Tracker enabled"); Log.i(TAG, "Detection Based Tracker enabled");
mTracker.start(); mNativeDetector.start();
} }
else else
{ {
Log.i(TAG, "Cascade detectior enabled"); Log.i(TAG, "Cascade detector enabled");
mTracker.stop(); mNativeDetector.stop();
} }
} }
} }
@ -82,14 +81,14 @@ class FdView extends SampleCvViewBase {
is.close(); is.close();
os.close(); os.close();
mCascade = new CascadeClassifier(mCascadeFile.getAbsolutePath()); mJavaDetector = new CascadeClassifier(mCascadeFile.getAbsolutePath());
if (mCascade.empty()) { if (mJavaDetector.empty()) {
Log.e(TAG, "Failed to load cascade classifier"); Log.e(TAG, "Failed to load cascade classifier");
mCascade = null; mJavaDetector = null;
} else } else
Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath()); Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());
mTracker = new DetectionBaseTracker(mCascadeFile.getAbsolutePath(), 0); mNativeDetector = new DetectionBasedTracker(mCascadeFile.getAbsolutePath(), 0);
cascadeDir.delete(); cascadeDir.delete();
@ -115,37 +114,49 @@ class FdView extends SampleCvViewBase {
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME); capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
if (mAbsoluteFaceSize == 0)
{
int height = mGray.rows();
if (Math.round(height * mRelativeFaceSize) > 0);
{
mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
}
mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
}
MatOfRect faces = new MatOfRect(); MatOfRect faces = new MatOfRect();
if (mDetectorType == CASCADE_DETECTOR) if (mDetectorType == JAVA_DETECTOR)
{ {
if (mCascade != null) if (mJavaDetector != null)
mCascade.detectMultiScale(mGray, faces, 1.1, 2, 2 // TODO: objdetect.CV_HAAR_SCALE_IMAGE mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2 // TODO: objdetect.CV_HAAR_SCALE_IMAGE
, new Size(mFaceSize, mFaceSize), new Size()); , new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
} }
else if (mDetectorType == DBT_DETECTOR) else if (mDetectorType == NATIVE_DETECTOR)
{ {
if (mTracker != null) if (mNativeDetector != null)
mTracker.detect(mGray, faces); mNativeDetector.detect(mGray, faces);
} }
else else
{ {
Log.e(TAG, "Detection method is not selected!"); Log.e(TAG, "Detection method is not selected!");
} }
for (Rect r : faces.toArray()) Rect[] facesArray = faces.toArray();
Core.rectangle(mRgba, r.tl(), r.br(), new Scalar(0, 255, 0, 255), 3); for (int i = 0; i < facesArray.length; i++)
Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.RGB_565/*.ARGB_8888*/); Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);
try { try {
Utils.matToBitmap(mRgba, bmp); Utils.matToBitmap(mRgba, bmp);
return bmp;
} catch(Exception e) { } catch(Exception e) {
Log.e("org.opencv.samples.puzzle15", "Utils.matToBitmap() throws an exception: " + e.getMessage()); Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle(); bmp.recycle();
return null; bmp = null;
} }
return bmp;
} }
@Override @Override
@ -160,8 +171,8 @@ class FdView extends SampleCvViewBase {
mGray.release(); mGray.release();
if (mCascadeFile != null) if (mCascadeFile != null)
mCascadeFile.delete(); mCascadeFile.delete();
if (mTracker != null) if (mNativeDetector != null)
mTracker.release(); mNativeDetector.release();
mRgba = null; mRgba = null;
mGray = null; mGray = null;

Loading…
Cancel
Save