人脸识别app源码

package com.e.opencv;

import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageFormat;
import android.graphics.PixelFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.hardware.Camera;
import android.os.Build;
import android.support.annotation.RequiresApi;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.WindowManager;
import android.widget.FrameLayout;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;
import static android.Manifest.permission.CAMERA;
import static org.opencv.android.Utils.bitmapToMat;

public class CameraMain extends AppCompatActivity {

private static final String    TAG                 = "OCVSample::Activity";
private Mat                    mRgba;
private Mat                    mGray;
private File                   mCascadeFile;
private CascadeClassifier      mJavaDetector;
private float                  mRelativeFaceSize   = 0.2f;
private int                    mAbsoluteFaceSize   = 0;
private Camera camera;
private boolean isPreview = false;
private View mView;
private Mat locface;

private BaseLoaderCallback  mLoaderCallback = new BaseLoaderCallback(this) {
    @Override
    public void onManagerConnected(int status) {
        switch (status) {
            case LoaderCallbackInterface.SUCCESS:
            {
                Log.i(TAG, "OpenCV loaded successfully");
                // Load native library after(!) OpenCV initialization
                System.loadLibrary("detection_based_tracker");

                try {
                    // load cascade file from application resources
                    InputStream is = getResources().openRawResource(R.raw.lbpcascade_frontalface);
                    File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
                    mCascadeFile = new File(cascadeDir, "lbpcascade_frontalface.xml");
                    FileOutputStream os = new FileOutputStream(mCascadeFile);

                    byte[] buffer = new byte[4096];
                    int bytesRead;
                    while ((bytesRead = is.read(buffer)) != -1) {
                        os.write(buffer, 0, bytesRead);
                    }
                    is.close();
                    os.close();

                    mJavaDetector = new CascadeClassifier(mCascadeFile.getAbsolutePath());
                    if (mJavaDetector.empty()) {
                        Log.e(TAG, "Failed to load cascade classifier");
                        mJavaDetector = null;
                    } else {
                        Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());
                    }
                    cascadeDir.delete();
                } catch (IOException e) {
                    e.printStackTrace();
                    Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
                }
            } break;
            default:
            {
                super.onManagerConnected(status);
            } break;
        }
    }
};



@RequiresApi(api = Build.VERSION_CODES.M)
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
    setContentView(R.layout.cameramain);
    requestPermissions(new String[]{CAMERA}, 200);
    SurfaceView mSurfaceView = (SurfaceView) findViewById(R.id.surfaceView);
    mView=findViewById(R.id.myview);


    // 获得 SurfaceHolder 对象
    SurfaceHolder mSurfaceHolder = mSurfaceView.getHolder();
    // 设置 Surface 格式
    // 参数: PixelFormat中定义的 int 值 ,详细参见 PixelFormat.java
    mSurfaceHolder.setFormat(PixelFormat.TRANSPARENT);

    // 如果需要,保持屏幕常亮
    mSurfaceHolder.setKeepScreenOn(true);

    // 设置 Surface 的分辨率
    // mSurfaceHolder.setFixedSize(width,height);
    // 设置 Surface 类型
    // 参数:
    //        SURFACE_TYPE_NORMAL       : 用 RAM 缓存原生数据的普通 Surface
    //        SURFACE_TYPE_HARDWARE     : 适用于 DMA(Direct memory access )引擎和硬件加速的Surface
    //        SURFACE_TYPE_GPU          : 适用于 GPU 加速的 Surface
    //        SURFACE_TYPE_PUSH_BUFFERS :表明该 Surface 不包含原生数据,Surface用到的数据由其他对象提供
    // 在 Camera 图像预览中就使用 SURFACE_TYPE_PUSH_BUFFERS 类型的 Surface,有 Camera 负责提供给预览 Surface 数据,这样图像预览会比较流
    mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
    // 添加 Surface 的 callback 接口
    mSurfaceHolder.addCallback(mSurfaceCallback);
}

private SurfaceHolder.Callback mSurfaceCallback = new SurfaceHolder.Callback() {

    /**
     *  在 Surface 首次创建时被立即调用:获得焦点时。一般在这里开启画图的线程
     * @param surfaceHolder 持有当前 Surface 的 SurfaceHolder 对象
     */
    @Override
    public void surfaceCreated(final SurfaceHolder surfaceHolder) {
        try {
            // Camera,open() 默认返回的后置摄像头信息
            camera = Camera.open();//打开硬件摄像头,这里导包得时候一定要注意是android.hardware.Camera
            //此处也可以设置摄像头参数
            /**
             WindowManager wm = (WindowManager) getSystemService(Context.WINDOW_SERVICE);//得到窗口管理器
             Display display  = wm.getDefaultDisplay();//得到当前屏幕
             Camera.Parameters parameters = camera.getParameters();//得到摄像头的参数
             parameters.setPictureFormat(PixelFormat.RGB_888);//设置照片的格式
             parameters.setJpegQuality(85);//设置照片的质量
             parameters.setPictureSize(display.getHeight(), display.getWidth());//设置照片的大小,默认是和     屏幕一样大
             camera.setParameters(parameters);
             **/
            //设置角度,此处 CameraId 我默认 为 0 (后置)
            // CameraId 也可以 通过 参考 Camera.open() 源码 方法获取
            Camera.Parameters parameters=camera.getParameters();
            parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
            camera.setParameters(parameters);
            //自动聚焦:参考blog  https://blog.csdn.net/yanzi1225627/article/details/8577682方法5
            setCameraDisplayOrientation(CameraMain.this,0,camera);

            camera.setPreviewDisplay(surfaceHolder);//通过SurfaceView显示取景画面
            camera.startPreview();//开始预览
            isPreview = true;//设置是否预览参数为真
            mRgba=new Mat();
            mGray=new Mat();

            if (mAbsoluteFaceSize == 0) {
                int height = mGray.rows();
                if (Math.round(height * mRelativeFaceSize) > 0) {
                    mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
                }
            }

            locface=new Mat();
            Bitmap locbit=BitmapFactory.decodeResource(getApplicationContext().getResources(),R.raw.timg);
            bitmapToMat(locbit,locface,true);

            MatOfRect facestemp = new MatOfRect();

            mJavaDetector.detectMultiScale(locface, facestemp, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                    new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
            org.opencv.core.Rect[] facesArraytemp = facestemp.toArray();
            if(facesArraytemp.length!=0) {
                locface=onFace(locface, facesArraytemp[0]);
                Imgproc.cvtColor(locface,locface,Imgproc.COLOR_BGR2GRAY);
                locface.convertTo(locface, CvType.CV_32F);

            }



            camera.setPreviewCallback(new Camera.PreviewCallback(){
                @Override
                public void onPreviewFrame(byte[] data, Camera camera) {
                    Camera.Size size = camera.getParameters().getPreviewSize();
                    try{
                        YuvImage image = new YuvImage(data, ImageFormat.NV21, size.width, size.height, null);
                        if(image!=null){
                            ByteArrayOutputStream stream = new ByteArrayOutputStream();
                            image.compressToJpeg(new Rect(0, 0, size.width, size.height), 80, stream);

                            Bitmap bmp = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
                            //facedect
                            //mNativeDetector.detect(mGray, faces);参考opencv
                            stream.close();
                            bitmapToMat(bmp,mRgba,true);
                            Log.d("mrgba",mRgba.toString());
                            MatOfRect faces = new MatOfRect();

                            mJavaDetector.detectMultiScale(mRgba, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                                    new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
                            //t
                            org.opencv.core.Rect[] facesArray = faces.toArray();
                            //保存人脸位置的数组

                            if(facesArray.length!=0) {
                                Log.d("facepos","x:"+facesArray[0].x+"y:"+facesArray[0].y);
                                FrameLayout.LayoutParams focusItemParams = new FrameLayout.LayoutParams(mView.getLayoutParams());
                                focusItemParams.leftMargin = facesArray[0].x * ((int) getApplicationContext().getResources().getDisplayMetrics().density);
                                focusItemParams.topMargin = (facesArray[0].y - 65) * ((int) getApplicationContext().getResources().getDisplayMetrics().density);
                                focusItemParams.width = facesArray[0].width * ((int) getApplicationContext().getResources().getDisplayMetrics().density);
                                focusItemParams.height = (facesArray[0].height) * ((int) getApplicationContext().getResources().getDisplayMetrics().density);


                                //减去65的理由:返回的位置是眼睛所以看上去框框有点偏下,所以往上移动一点距离框住人脸。
                                //*后面参数的理由:opencv返回的单位是用的dp,如果用来设置view要用px,如果不转换会导致误差有点大。
                                //如果用到数组的话:
                                mView.setLayoutParams(focusItemParams);//focusView为你需要设置位置的VIEW
                             }

                            if(facesArray.length!=0) {
                                Mat imgface = new Mat();
                                imgface = onFace(mRgba, facesArray[0]);

                                Imgproc.cvtColor(imgface,imgface,Imgproc.COLOR_BGR2GRAY);
                                imgface.convertTo(imgface, CvType.CV_32F);

                                double target = Imgproc.compareHist(locface, imgface, Imgproc.CV_COMP_CORREL);

                                if(target<0){
                                    target=-target;
                                }
                                //有可能到catch去了
                                System.out.println(target);
                            }

// Log.d("comaa","xiangsidu:"+percent);
//把现在的读取拿到create里面去。

                        }
                        Log.d("image","get image success");
                    }catch(Exception ex){
                        Log.e("Sys","Error:"+ex.getMessage());
                    }
                }
            });
            /**
             @param format ss
             * @author ldz
            */


        } catch (IOException e) {
            Log.e(TAG, e.toString());
        }
    }
    /**
     *  在 Surface 格式 和 大小发生变化时会立即调用,可以在这个方法中更新 Surface
     * @param surfaceHolder   持有当前 Surface 的 SurfaceHolder 对象
     * @param format          surface 的新格式
     * @param width           surface 的新宽度
     * @param height          surface 的新高度
     */
    @Override
    public void surfaceChanged(SurfaceHolder surfaceHolder, int format, int width, int height) {


    }

    /**
     *  在 Surface 被销毁时立即调用:失去焦点时。一般在这里将画图的线程停止销毁
     * @param surfaceHolder 持有当前 Surface 的 SurfaceHolder 对象
     */
    @Override
    public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
        if(camera != null){
            if(isPreview){//正在预览
                mGray.release();
                mRgba.release();
                camera.setPreviewCallback(null);
                camera.stopPreview();
                camera.release();
            }
        }
    }
};

/**
 * 设置 摄像头的角度
 *
 * @param activity 上下文
 * @param cameraId 摄像头ID(假如手机有N个摄像头,cameraId 的值 就是 0 ~ N-1)
 * @param camera   摄像头对象
 */
public static void setCameraDisplayOrientation(Activity activity,
                                               int cameraId, android.hardware.Camera camera) {

    Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
    //获取摄像头信息
    android.hardware.Camera.getCameraInfo(cameraId, info);
    int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
    //获取摄像头当前的角度
    int degrees = 0;
    switch (rotation) {
        case Surface.ROTATION_0:
            degrees = 0;
            break;
        case Surface.ROTATION_90:
            degrees = 90;
            break;
        case Surface.ROTATION_180:
            degrees = 180;
            break;
        case Surface.ROTATION_270:
            degrees = 270;
            break;
    }

    int result;
    if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
        //前置摄像头
        result = (info.orientation + degrees) % 360;
        result = (360 - result) % 360; // compensate the mirror
    } else {
        // back-facing  后置摄像头
        result = (info.orientation - degrees + 360) % 360;
    }
    camera.setDisplayOrientation(result);
}

@Override
protected void onResume() {
    super.onResume();
    if (!OpenCVLoader.initDebug()) {
        Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
        OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
    } else {
        Log.d(TAG, "OpenCV library found inside package. Using it!");
        mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
    }
}
//openCV4Android 需要加载用到


public Mat onFace(Mat mat, org.opencv.core.Rect rect) {
    Mat imgface=null;
    Mat mat1= new Mat();
    //todo:传入一个mat和位置得到一个固定大小的mat图像。
    imgface=mat.submat(rect);
    Size commonsize=new Size(100,100);
    Imgproc.resize(imgface,mat1,commonsize);
    return mat1;
}

}

全部评论

相关推荐

点赞 评论 收藏
分享
评论
点赞
收藏
分享

创作者周榜

更多
牛客网
牛客企业服务