Android接入OpenCv實現人臉識別
導入OpenCv
獲取資源
下載地址:https://opencv.org/releases/
文件目錄:
opencv-4.1.0-android-sdk/OpenCV-android-sdk/sdk/native
--- jni/include 頭文件
--- libs 動態庫
Cmake
cmake_minimum_required(VERSION 3.4.1)
add_library(
native-lib
SHARED
native-lib.cpp)
#導入頭文件
include_directories(include)
#導入庫文件
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -L${CMAKE_SOURCE_DIR}/libs/${ANDROID_ABI}")
find_library(
log-lib
log)
target_link_libraries(
native-lib
opencv_java4
android
${log-lib})
build.gradle
android{
...
sourceSets {
main {
jniLibs.srcDirs = ['src/main/cpp/libs']
}
}
}
人臉識別
獲取人臉識別模型
OpenCV-android-sdk/sdk/etc/lbpcascades/lbpcascade_frontalface.xml
將文件放在assets文件下,然後拷貝到應用中
public static void copyAssets(Context context, String path) {
File model = new File(path);
File file = new File(context.getFilesDir(), model.getName());
if (file.exists()) {
file.delete();
}
try {
FileOutputStream fos = new FileOutputStream(file);
InputStream inputStream = context.getAssets().open(path);
int len;
byte[] b = new byte[2048];
while ((len = inputStream.read(b)) != -1) {
fos.write(b, 0, len);
}
fos.close();
inputStream.close();
} catch (Exception e) {
e.printStackTrace();
}
}
打開攝像頭
<uses-permission android:name="android.permission.CAMERA"/>
...
public class CameraHelper implements Camera.PreviewCallback {
public static final int WIDTH = 640;
public static final int HEIGHT = 480;
private int mCameraId;
private Camera mCamera;
private byte[] buffer;
private Camera.PreviewCallback mPreviewCallback;
public CameraHelper(int cameraId) {
mCameraId = cameraId;
}
public void switchCamera() {
if (mCameraId == Camera.CameraInfo.CAMERA_FACING_BACK) {
mCameraId = Camera.CameraInfo.CAMERA_FACING_FRONT;
} else {
mCameraId = Camera.CameraInfo.CAMERA_FACING_BACK;
}
stopPreview();
startPreview();
}
public int getCameraId() {
return mCameraId;
}
public void stopPreview() {
if (mCamera != null) {
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
public void startPreview() {
try {
mCamera = Camera.open(mCameraId);
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPreviewFormat(ImageFormat.NV21);
parameters.setPreviewSize(WIDTH, HEIGHT);
setFocusMode(parameters);
mCamera.setParameters(parameters);
buffer = new byte[WIDTH * HEIGHT * 3 / 2];
mCamera.addCallbackBuffer(buffer);
mCamera.setPreviewCallbackWithBuffer(this);
SurfaceTexture surfaceTexture = new SurfaceTexture(11);
mCamera.setPreviewTexture(surfaceTexture);
mCamera.startPreview();
} catch (Exception ex) {
ex.printStackTrace();
}
}
public void setPreviewCallback(Camera.PreviewCallback previewCallback) {
mPreviewCallback = previewCallback;
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
mPreviewCallback.onPreviewFrame(data, camera);
camera.addCallbackBuffer(buffer);
}
private void setFocusMode(Camera.Parameters parameters) {
List<String> focusModes = parameters.getSupportedFocusModes();
if (focusModes != null
&& focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
}
}
}
SurfaceView 顯示
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical"
tools:context=".MainActivity">
<SurfaceView
android:id="@+id/surfaceView"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
<LinearLayout
android:layout_width="match_parent"
android:orientation="horizontal"
android:layout_height="wrap_content">
<Button
android:text="切換攝像頭"
android:onClick="switchCamera"
android:layout_width="match_parent"
android:layout_height="wrap_content" />
</LinearLayout>
</RelativeLayout>
public class MainActivity extends AppCompatActivity implements SurfaceHolder.Callback, Camera.PreviewCallback{
private OpenCvJni openCvJni;
private CameraHelper cameraHelper;
int cameraId = Camera.CameraInfo.CAMERA_FACING_FRONT;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
openCvJni = new OpenCvJni();
SurfaceView surfaceView = findViewById(R.id.surfaceView);
surfaceView.getHolder().addCallback(this);
cameraHelper = new CameraHelper(cameraId);
cameraHelper.setPreviewCallback(this);
Utils.copyAssets(this, "lbpcascade_frontalface.xml");
}
@Override
protected void onResume() {
super.onResume();
String path = new File(Environment.getExternalStorageDirectory(),
"lbpcascade_frontalface.xml").getAbsolutePath();
cameraHelper.startPreview();
openCvJni.init(path);
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
openCvJni.postData(data, CameraHelper.WIDTH, CameraHelper.HEIGHT, cameraId);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
openCvJni.setSurface(holder.getSurface());
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
public void switchCamera(View view) {
cameraHelper.switchCamera();
cameraId = cameraHelper.getCameraId();
}
}
溝通android和OpenCv之Java層
public class OpenCvJni {
static {
System.loadLibrary("native-lib");
}
public native void init(String path) ;
public native void postData(byte[] data, int width, int height, int cameraId);
public native void setSurface(Surface surface);
}
溝通android和OpenCv之C++層
#include <jni.h>
#include <string>
#include "opencv2/opencv.hpp"
#include <android/native_window_jni.h>
ANativeWindow *window = 0;
using namespace cv;
DetectionBasedTracker *tracker = 0;
class CascadeDetectorAdapter : public DetectionBasedTracker::IDetector {
public:
CascadeDetectorAdapter(cv::Ptr<cv::CascadeClassifier> detector) :
IDetector(),
Detector(detector) {
}
void detect(const cv::Mat &Image, std::vector<cv::Rect> &objects) {
Detector->detectMultiScale(Image, objects, scaleFactor, minNeighbours, 0, minObjSize,
maxObjSize);
}
virtual ~CascadeDetectorAdapter() {
}
private:
CascadeDetectorAdapter();
cv::Ptr<cv::CascadeClassifier> Detector;
};
extern "C"
JNIEXPORT void JNICALL
Java_com_barray_opencvlib_OpenCvJni_init(JNIEnv *env, jobject thiz, jstring path_) {
const char *path = env->GetStringUTFChars(path_, 0);
Ptr<CascadeClassifier> mainClassifier = makePtr<CascadeClassifier>(path);
Ptr<CascadeDetectorAdapter> mainDetector = makePtr<CascadeDetectorAdapter>(mainClassifier);
Ptr<CascadeClassifier> trackClassifier = makePtr<CascadeClassifier>(path);
Ptr<CascadeDetectorAdapter> trackingDetector = makePtr<CascadeDetectorAdapter>(trackClassifier);
DetectionBasedTracker::Parameters DetectorParams;
tracker = new DetectionBasedTracker(mainDetector, trackingDetector, DetectorParams);
tracker->run();
env->ReleaseStringUTFChars(path_, path);
}
extern "C"
JNIEXPORT void JNICALL
Java_com_barray_opencvlib_OpenCvJni_postData(JNIEnv *env, jobject thiz, jbyteArray data_, jint width,
jint height, jint cameraId) {
jbyte *data = env->GetByteArrayElements(data_, NULL);
Mat src(height*3/2, width, CV_8UC1, data);
cvtColor(src, src, COLOR_YUV2RGBA_NV21);
if (cameraId == 1) {
rotate(src, src, ROTATE_90_COUNTERCLOCKWISE);
flip(src, src, 1);
} else {
rotate(src, src, ROTATE_90_CLOCKWISE);
}
Mat gray;
cvtColor(src, gray, COLOR_RGBA2GRAY);
equalizeHist(gray, gray);
std::vector<Rect> faces;
tracker->process(gray);
tracker->getObjects(faces);
for (Rect face : faces) {
rectangle(src, face, Scalar(255, 0, 0));
}
if (window) {
ANativeWindow_setBuffersGeometry(window, src.cols, src.rows, WINDOW_FORMAT_RGBA_8888);
ANativeWindow_Buffer window_buffer;
do {
if (ANativeWindow_lock(window, &window_buffer, 0)) {
ANativeWindow_release(window);
window = 0;
break;
}
uint8_t *dst_data = static_cast<uint8_t *>(window_buffer.bits);
int dst_linesize = window_buffer.stride * 4;
for (int i = 0; i < window_buffer.height; ++i) {
memcpy(dst_data + i * dst_linesize, src.data + i * src.cols * 4, dst_linesize);
}
ANativeWindow_unlockAndPost(window);
} while (0);
}
src.release();
gray.release();
env->ReleaseByteArrayElements(data_, data, 0);
}
extern "C"
JNIEXPORT void JNICALL
Java_com_barray_opencvlib_OpenCvJni_setSurface(JNIEnv *env, jobject thiz, jobject surface) {
if (window) {
ANativeWindow_release(window);
window = 0;
}
window = ANativeWindow_fromSurface(env, surface);
}
源碼下載地址