自定义获取人脸view
/**
* 作者:created by meixi
* 邮箱:[email protected]
* 日期:2018/8/29 15
*/
public class FaceOverlayView extends View {
private Paint mPaint;
private Paint mTextPaint;
private int mDisplayOrientation;
private int mOrientation;
private int previewWidth;
private int previewHeight;
private FaceResult[] mFaces;
private double fps;
private boolean isFront = false;
public FaceOverlayView(Context context) {
super(context);
initialize();
}
private void initialize() {
// We want a green box around the face:
DisplayMetrics metrics = getResources().getDisplayMetrics();
int stroke = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 2, metrics);
mPaint = new Paint();
mPaint.setAntiAlias(true);
mPaint.setDither(true);
mPaint.setColor(Color.GREEN);
mPaint.setStrokeWidth(stroke);
mPaint.setStyle(Paint.Style.STROKE);
mTextPaint = new Paint();
mTextPaint.setAntiAlias(true);
mTextPaint.setDither(true);
int size = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 15, metrics);
mTextPaint.setTextSize(size);
mTextPaint.setColor(Color.GREEN);
mTextPaint.setStyle(Paint.Style.FILL);
}
public void setFPS(double fps) {
this.fps = fps;
}
public void setFaces(FaceResult[] faces) {
mFaces = faces;
invalidate();
}
public void setOrientation(int orientation) {
mOrientation = orientation;
}
public void setDisplayOrientation(int displayOrientation) {
mDisplayOrientation = displayOrientation;
invalidate();
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (mFaces != null && mFaces.length > 0) {
float scaleX = (float) getWidth() / (float) previewWidth;
float scaleY = (float) getHeight() / (float) previewHeight;
switch (mDisplayOrientation) {
case 90:
case 270:
scaleX = (float) getWidth() / (float) previewHeight;
scaleY = (float) getHeight() / (float) previewWidth;
break;
}
canvas.save();
canvas.rotate(-mOrientation);
RectF rectF = new RectF();
for (FaceResult face : mFaces) {
PointF mid = new PointF();
face.getMidPoint(mid);
if (mid.x != 0.0f && mid.y != 0.0f) {
float eyesDis = face.eyesDistance();
rectF.set(new RectF(
(mid.x - eyesDis * 1.2f) * scaleX,
(mid.y - eyesDis * 0.65f) * scaleY,
(mid.x + eyesDis * 1.2f) * scaleX,
(mid.y + eyesDis * 1.75f) * scaleY));
if (isFront) {
float left = rectF.left;
float right = rectF.right;
rectF.left = getWidth() - right;
rectF.right = getWidth() - left;
}
canvas.drawRect(rectF, mPaint);
canvas.drawText("ID " + face.getId(), rectF.left, rectF.bottom + mTextPaint.getTextSize(), mTextPaint);
canvas.drawText("Confidence " + face.getConfidence(), rectF.left, rectF.bottom + mTextPaint.getTextSize() * 2, mTextPaint);
canvas.drawText("EyesDistance " + face.eyesDistance(), rectF.left, rectF.bottom + mTextPaint.getTextSize() * 3, mTextPaint);
}
}
canvas.restore();
}
DecimalFormat df2 = new DecimalFormat(".##");
canvas.drawText("Detected_Frame/s: " + df2.format(fps) + " @ " + previewWidth + "x" + previewHeight, mTextPaint.getTextSize(), mTextPaint.getTextSize(), mTextPaint);
}
public void setPreviewWidth(int previewWidth) {
this.previewWidth = previewWidth;
}
public void setPreviewHeight(int previewHeight) {
this.previewHeight = previewHeight;
}
public void setFront(boolean front) {
isFront = front;
}
}
完整人脸识别拍照类
/**
* 作者:created by meixi
* 邮箱:[email protected]
* 日期:2018/8/29 14
*/
public final class FaceDetectRGBActivity extends Activity implements SurfaceHolder.Callback, Camera.PreviewCallback {
// Number of Cameras in device.
private int numberOfCameras;
public static final String TAG = FaceDetectRGBActivity.class.getSimpleName();
private Camera mCamera;
private int cameraId = 1;//前置1、后置0
// Let's keep track of the display rotation and orientation also:
private int mDisplayRotation;
private int mDisplayOrientation;
private int previewWidth;
private int previewHeight;
// The surface view for the camera data
private SurfaceView mView;
// Draw rectangles and other fancy stuff:
private FaceOverlayView mFaceView;
// Log all errors:
private final CameraErrorCallback mErrorCallback = new CameraErrorCallback();
private static final int MAX_FACE = 10;
private boolean isThreadWorking = false;
private Handler handler;
private FaceDetectThread detectThread = null;
private int prevSettingWidth;
private int prevSettingHeight;
private android.media.FaceDetector fdet;
private FaceResult faces[];
private FaceResult faces_previous[];
private int Id = 0;
private String BUNDLE_CAMERA_ID = "camera";
private ImageView borfimage;
//RecylerView face image
private HashMap<Integer, Integer> facesCount = new HashMap<>();
private ArrayList<Bitmap> facesBitmap;
//==============================================================================================
// Activity Methods
//==============================================================================================
/**
* Initializes the UI and initiates the creation of a face detector.
*/
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.activity_camera_viewer);
mView = (SurfaceView) findViewById(R.id.surfaceview);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
// Now create the OverlayView:
mFaceView = new FaceOverlayView(this);
addContentView(mFaceView, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT));
// Create and Start the OrientationListener:
handler = new Handler();
faces = new FaceResult[MAX_FACE];
faces_previous = new FaceResult[MAX_FACE];
for (int i = 0; i < MAX_FACE; i++) {
faces[i] = new FaceResult();
faces_previous[i] = new FaceResult();
}
borfimage =(ImageView)findViewById(R.id.borf);
borfimage.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
cameraId = (cameraId + 1) % numberOfCameras;
recreate();
}
});
if (icicle != null)
cameraId = icicle.getInt(BUNDLE_CAMERA_ID, 0);
}
@Override
protected void onPostCreate(Bundle savedInstanceState) {
super.onPostCreate(savedInstanceState);
// Check for the camera permission before accessing the camera. If the
// permission is not granted yet, request permission.
SurfaceHolder holder = mView.getHolder();
holder.addCallback(this);
holder.setFormat(ImageFormat.NV21);
}
/**
* Restarts the camera.
*/
@Override
protected void onResume() {
super.onResume();
Log.i(TAG, "onResume");
startPreview();
}
/**
* Stops the camera.
*/
@Override
protected void onPause() {
super.onPause();
Log.i(TAG, "onPause");
if (mCamera != null) {
mCamera.stopPreview();
}
}
/**
* Releases the resources associated with the camera source, the associated detector, and the
* rest of the processing pipeline.
*/
@Override
protected void onDestroy() {
super.onDestroy();
// resetData();
}
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putInt(BUNDLE_CAMERA_ID, cameraId);
}
@Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
// resetData();
//Find the total number of cameras available
numberOfCameras = Camera.getNumberOfCameras();
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
if (cameraId == 0) cameraId = i;
}
}
mCamera = Camera.open(cameraId);
Camera.getCameraInfo(cameraId, cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
mFaceView.setFront(true);
}
try {
mCamera.setPreviewDisplay(mView.getHolder());
} catch (Exception e) {
Log.e(TAG, "Could not preview the image.", e);
}
}
@Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int format, int width, int height) {
// We have no surface, return immediately:
if (surfaceHolder.getSurface() == null) {
return;
}
// Try to stop the current preview:
try {
mCamera.stopPreview();
} catch (Exception e) {
// Ignore...
}
configureCamera(width, height);
setDisplayOrientation();
setErrorCallback();
// Create media.FaceDetector
float aspect = (float) previewHeight / (float) previewWidth;
fdet = new android.media.FaceDetector(prevSettingWidth, (int) (prevSettingWidth * aspect), MAX_FACE);
// Everything is configured! Finally start the camera preview again:
startPreview();
}
private void setErrorCallback() {
mCamera.setErrorCallback(mErrorCallback);
}
private void setDisplayOrientation() {
// Now set the display orientation:
mDisplayRotation = Util.getDisplayRotation(FaceDetectRGBActivity.this);
mDisplayOrientation = Util.getDisplayOrientation(mDisplayRotation, cameraId);
mCamera.setDisplayOrientation(mDisplayOrientation);
if (mFaceView != null) {
mFaceView.setDisplayOrientation(mDisplayOrientation);
}
}
private void configureCamera(int width, int height) {
Camera.Parameters parameters = mCamera.getParameters();
// Set the PreviewSize and AutoFocus:
setOptimalPreviewSize(parameters, width, height);
setAutoFocus(parameters);
// And set the parameters:
mCamera.setParameters(parameters);
}
private void setOptimalPreviewSize(Camera.Parameters cameraParameters, int width, int height) {
List<Camera.Size> previewSizes = cameraParameters.getSupportedPreviewSizes();
float targetRatio = (float) width / height;
Camera.Size previewSize = Util.getOptimalPreviewSize(this, previewSizes, targetRatio);
previewWidth = previewSize.width;
previewHeight = previewSize.height;
Log.e(TAG, "previewWidth" + previewWidth);
Log.e(TAG, "previewHeight" + previewHeight);
/**
* Calculate size to scale full frame bitmap to smaller bitmap
* Detect face in scaled bitmap have high performance than full bitmap.
* The smaller image size -> detect faster, but distance to detect face shorter,
* so calculate the size follow your purpose
*/
if (previewWidth / 4 > 360) {
prevSettingWidth = 360;
prevSettingHeight = 270;
} else if (previewWidth / 4 > 320) {
prevSettingWidth = 320;
prevSettingHeight = 240;
} else if (previewWidth / 4 > 240) {
prevSettingWidth = 240;
prevSettingHeight = 160;
} else {
prevSettingWidth = 160;
prevSettingHeight = 120;
}
cameraParameters.setPreviewSize(previewSize.width, previewSize.height);
mFaceView.setPreviewWidth(previewWidth);
mFaceView.setPreviewHeight(previewHeight);
}
private void setAutoFocus(Camera.Parameters cameraParameters) {
List<String> focusModes = cameraParameters.getSupportedFocusModes();
if (focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE))
cameraParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
}
private void startPreview() {
if (mCamera != null) {
isThreadWorking = false;
mCamera.startPreview();
mCamera.setPreviewCallback(this);
counter = 0;
}
}
@Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
mCamera.setPreviewCallbackWithBuffer(null);
mCamera.setErrorCallback(null);
mCamera.release();
mCamera = null;
}
@Override
public void onPreviewFrame(byte[] _data, Camera _camera) {
if (!isThreadWorking) {
if (counter == 0)
start = System.currentTimeMillis();
isThreadWorking = true;
waitForFdetThreadComplete();
detectThread = new FaceDetectThread(handler, this);
detectThread.setData(_data);
detectThread.start();
}
}
private void waitForFdetThreadComplete() {
if (detectThread == null) {
return;
}
if (detectThread.isAlive()) {
try {
detectThread.join();
detectThread = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
// fps detect face (not FPS of camera)
long start, end;
int counter = 0;
double fps;
/**
* Do face detect in thread
*/
private class FaceDetectThread extends Thread {
private Handler handler;
private byte[] data = null;
private Context ctx;
private Bitmap faceCroped;
static final int REFRESH_COMPLETE = 0X1112;
private Handler mHandler = new Handler() {
public void handleMessage(android.os.Message msg) {
switch (msg.what) {
case REFRESH_COMPLETE:
Intent intent = new Intent(FaceDetectRGBActivity.this, MainActivity.class);
startActivity( intent);
break;
}
}
};
public FaceDetectThread(Handler handler, Context ctx) {
this.ctx = ctx;
this.handler = handler;
}
public void setData(byte[] data) {
this.data = data;
}
public void run() {
// Log.i("FaceDetectThread", "running");
float aspect = (float) previewHeight / (float) previewWidth;
int w = prevSettingWidth;
int h = (int) (prevSettingWidth * aspect);
Bitmap bitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.RGB_565);
// face detection: first convert the image from NV21 to RGB_565
YuvImage yuv = new YuvImage(data, ImageFormat.NV21,
bitmap.getWidth(), bitmap.getHeight(), null);
// TODO: make rect a member and use it for width and height values above
Rect rectImage = new Rect(0, 0, bitmap.getWidth(), bitmap.getHeight());
// TODO: use a threaded option or a circular buffer for converting streams?
//see http://ostermiller.org/convert_java_outputstream_inputstream.html
ByteArrayOutputStream baout = new ByteArrayOutputStream();
if (!yuv.compressToJpeg(rectImage, 100, baout)) {
Log.e("CreateBitmap", "compressToJpeg failed");
}
BitmapFactory.Options bfo = new BitmapFactory.Options();
bfo.inPreferredConfig = Bitmap.Config.RGB_565;
bitmap = BitmapFactory.decodeStream(
new ByteArrayInputStream(baout.toByteArray()), null, bfo);
Bitmap bmp = Bitmap.createScaledBitmap(bitmap, w, h, false);
float xScale = (float) previewWidth / (float) prevSettingWidth;
float yScale = (float) previewHeight / (float) h;
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, info);
int rotate = mDisplayOrientation;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT && mDisplayRotation % 180 == 0) {
if (rotate + 180 > 360) {
rotate = rotate - 180;
} else
rotate = rotate + 180;
}
switch (rotate) {
case 90:
bmp = ImageUtils.rotate(bmp, 90);
xScale = (float) previewHeight / bmp.getWidth();
yScale = (float) previewWidth / bmp.getHeight();
break;
case 180:
bmp = ImageUtils.rotate(bmp, 180);
break;
case 270:
bmp = ImageUtils.rotate(bmp, 270);
xScale = (float) previewHeight / (float) h;
yScale = (float) previewWidth / (float) prevSettingWidth;
break;
}
fdet = new android.media.FaceDetector(bmp.getWidth(), bmp.getHeight(), MAX_FACE);
android.media.FaceDetector.Face[] fullResults = new android.media.FaceDetector.Face[MAX_FACE];
fdet.findFaces(bmp, fullResults);
for (int i = 0; i < MAX_FACE; i++) {
if (fullResults[i] == null) {
faces[i].clear();
} else {
PointF mid = new PointF();
fullResults[i].getMidPoint(mid);
mid.x *= xScale;
mid.y *= yScale;
float eyesDis = fullResults[i].eyesDistance() * xScale;
float confidence = fullResults[i].confidence();
float pose = fullResults[i].pose(android.media.FaceDetector.Face.EULER_Y);
int idFace = Id;
Rect rect = new Rect(
(int) (mid.x - eyesDis * 1.20f),
(int) (mid.y - eyesDis * 0.55f),
(int) (mid.x + eyesDis * 1.20f),
(int) (mid.y + eyesDis * 1.85f));
/**
* Only detect face size > 100x100
*/
if (rect.height() * rect.width() > 100 * 100) {
for (int j = 0; j < MAX_FACE; j++) {
float eyesDisPre = faces_previous[j].eyesDistance();
PointF midPre = new PointF();
faces_previous[j].getMidPoint(midPre);
RectF rectCheck = new RectF(
(midPre.x - eyesDisPre * 1.5f),
(midPre.y - eyesDisPre * 1.15f),
(midPre.x + eyesDisPre * 1.5f),
(midPre.y + eyesDisPre * 1.85f));
if (rectCheck.contains(mid.x, mid.y) && (System.currentTimeMillis() - faces_previous[j].getTime()) < 1000) {
idFace = faces_previous[j].getId();
break;
}
}
if (idFace == Id) Id++;
faces[i].setFace(idFace, mid, eyesDis, confidence, pose, System.currentTimeMillis());
faces_previous[i].set(faces[i].getId(), faces[i].getMidEye(), faces[i].eyesDistance(), faces[i].getConfidence(), faces[i].getPose(), faces[i].getTime());
//
// if focus in a face 5 frame -> take picture face display in RecyclerView
// because of some first frame have low quality
//
if (facesCount.get(idFace) == null) {
facesCount.put(idFace, 0);
} else {
int count = facesCount.get(idFace) + 1;
if (count <= 5)
facesCount.put(idFace, count);
//
// Crop Face to display in RecylerView
//
if (count == 5) {
faceCroped = ImageUtils.cropFace(faces[i], bitmap, rotate);
if (faceCroped != null) {
handler.post(new Runnable() {
public void run() {
TestObServernotice.getInstance().notifyObserver(0,faceCroped);
mHandler.sendEmptyMessageDelayed(REFRESH_COMPLETE, 0);
// imagePreviewAdapter.add(faceCroped);
}
});
}
}
}
}
}
}
handler.post(new Runnable() {
public void run() {
//send face to FaceView to draw rect
mFaceView.setFaces(faces);
//calculate FPS
end = System.currentTimeMillis();
counter++;
double time = (double) (end - start) / 1000;
if (time != 0)
fps = counter / time;
mFaceView.setFPS(fps);
if (counter == (Integer.MAX_VALUE - 1000))
counter = 0;
isThreadWorking = false;
}
});
}
}
}
附上demo下载地址:
链接:https://pan.baidu.com/s/1KoivkiJWie3EQMk13HEFAg 密码:QQ回答1085220040