一 简介
1.1 Camera API:
这是旧版本的相机API,也称为Camera1 API。它提供了较简单的使用方式,适用于旧版Android设备。但它存在一些限制,如性能不佳、操作复杂等
1.2 Camera2 API:
这是新版本的相机API,引入自Android 5.0(Lollipop)以后的版本。它提供了更强大和灵活的控制能力,并改善了性能
1.3 Camer2对比camera1优势:
- 灵活度:Camera2 API提供了更多的手动设置选项,例如曝光时间、ISO感光度、焦距等。
- 性能优化:Camera2 API支持并行拍摄和预览,使得在同时进行多个操作时表现更好。
- 特殊特性支持:Camera2 API支持RAW图像捕获和高速连拍模式等新功能。
- 能力检测:通过CameraCharacteristics类,可以检查设备相机的各种特性和功能。
1.4 Camera2 API中主要涉及以下几个关键类:
- CameraManager:摄像头管理器,用于打开和关闭系统摄像头
- CameraCharacteristics:描述摄像头的各种特性,我们可以通过CameraManager的getCameraCharacteristics(@NonNull String cameraId)方法来获取。
- CameraDevice:描述系统摄像头,类似于早期的Camera。
- CameraCaptureSession:Session类,当需要拍照、预览等功能时,需要先创建该类的实例,然后通过该实例里的方法进行控制(例如:拍照 capture())。
- CaptureRequest:描述了一次操作请求,拍照、预览等操作都需要先传入CaptureRequest参数,具体的参数控制也是通过CameraRequest的成员变量来设置。
- CaptureResult:描述拍照完成后的结果。
二 camera2实现步骤
2.1 定义TextureView作为预览界面:
(TextureView) findViewById(R.id.textureView).setSurfaceTextureListener(textureListener);
当SurfaceTexture准备好会回调onSurfaceTextureAvailable()方法:
java
TextureView.SurfaceTextureListener textureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
//当SurefaceTexture可用的时候,设置相机参数并打开相机
setupCamera(width, height);
openCamera();
}
};
2.2 设置相机参数
java
private void setupCamera(int width, int height) {
//获取摄像头的管理者CameraManager
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
//遍历所有摄像头
for (String cameraId: manager.getCameraIdList()) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
//默认打开后置摄像头
if (characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT)
continue;
//获取StreamConfigurationMap,它是管理摄像头支持的所有输出格式和尺寸
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
//根据TextureView的尺寸设置预览尺寸
mPreviewSize = getOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height);
mCameraId = cameraId;
break;
}
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
2.3 开启相机
java
private void openCamera() {
//获取摄像头的管理者CameraManager
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
//检查权限
try {
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
//打开相机,第一个参数指示打开哪个摄像头,第二个参数stateCallback为相机的状态回调接口,第三个参数用来确定Callback在哪个线程执行,为null的话就在当前线程执行
manager.openCamera(mCameraId, stateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
实现StateCallback 接口,当相机打开后会回调onOpened方法,在这个方法里面开启预览
java
private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice camera) {
mCameraDevice = camera;
//开启预览
startPreview();
}
}
2.4 开启预览
java
private void startPreview() {
SurfaceTexture mSurfaceTexture = mTextureView.getSurfaceTexture();
//设置TextureView的缓冲区大小
mSurfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
//获取Surface显示预览数据
Surface mSurface = new Surface(mSurfaceTexture);
try {
//创建CaptureRequestBuilder,TEMPLATE_PREVIEW比表示预览请求
mCaptureRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
//设置Surface作为预览数据的显示界面
mCaptureRequestBuilder.addTarget(mSurface);
//创建相机捕获会话,第一个参数是捕获数据的输出Surface列表,第二个参数是CameraCaptureSession的状态回调接口,当它创建好后会回调onConfigured方法,第三个参数用来确定Callback在哪个线程执行,为null的话就在当前线程执行
mCameraDevice.createCaptureSession(Arrays.asList(mSurface), new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession session) {
try {
//创建捕获请求
mCaptureRequest = mCaptureRequestBuilder.build();
mPreviewSession = session;
//设置反复捕获数据的请求,这样预览界面就会一直有数据显示
mPreviewSession.setRepeatingRequest(mCaptureRequest, mSessionCaptureCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
2.5 预览回调
java
private void setupImageReader() {
//前三个参数分别是需要的尺寸和格式,最后一个参数代表每次最多获取几帧数据,本例的2代表ImageReader中最多可以获取两帧图像流
mImageReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(),
ImageFormat.JPEG, 2);
//监听ImageReader的事件,当有图像流数据可用时会回调onImageAvailable方法,它的参数就是预览帧数据,可以对这帧数据进行处理
mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
//我们可以将这帧数据转成字节数组,类似于Camera1的PreviewCallback回调的预览帧数据
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] data = new byte[buffer.remaining()];
buffer.get(data);
image.close();
}
}, null);
}
三,上面就是基本的使用流程,下面是拍照流程
3.1 Camera2拍照也是通过ImageReader来实现的
第一步,设置拍照参数,如方向、尺寸等
java
private static final SparseIntArray ORIENTATION = new SparseIntArray();
static {
//将不同的Surface旋转常量与相应的角度值进行映射
ORIENTATION.append(Surface.ROTATION_0, 90); //无旋转,设备屏幕处于纵向(竖直)方向。
ORIENTATION.append(Surface.ROTATION_90, 0); //顺时针旋转90度,设备屏幕处于横向(水平)方向,宽度大于高度。
ORIENTATION.append(Surface.ROTATION_180, 270); //顺时针旋转180度,设备屏幕处于纵向(竖直)方向。
ORIENTATION.append(Surface.ROTATION_270, 180); //顺时针旋转270度,设备屏幕处于横向(水平)方向,宽度大于高度。
}
3.2 第二步,设置拍照尺寸,可以跟预览尺寸一起设置,然后ImageReader初始化使用此尺寸
java
mCaptureSize = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new Comparator<Size>() {
@Override
public int compare(Size lhs, Size rhs) {
return Long.signum(lhs.getWidth() * lhs.getHeight() - rhs.getHeight() * rhs.getWidth());
}
});
3.3 第三步,创建保存图片的线程
java
public static class imageSaver implements Runnable {
private Image mImage;
public imageSaver(Image image) {
mImage = image;
}
@Override
public void run() {
ByteBuffer buffer = mImage.getPlanes()[0].getBuffer();
byte[] data = new byte[buffer.remaining()];
buffer.get(data);
mImageFile = new File(Environment.getExternalStorageDirectory() + "/DCIM/myPicture.jpg");
FileOutputStream fos = null;
try {
fos = new FileOutputStream(mImageFile);
fos.write(data, 0 ,data.length);
} catch (IOException e) {
e.printStackTrace();
} finally {
mImageFile = null;
if (fos != null) {
try {
fos.close();
fos = null;
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
3.4 第四步,然后当ImageReader有数据时,通过此线程保存图片
java
//使用前面获取的拍照尺寸
mImageReader = ImageReader.newInstance(mCaptureSize.getWidth(), mCaptureSize.getHeight(),
ImageFormat.JPEG, 2);
mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
//执行图像保存子线程
mCameraHandler.post(new imageSaver(reader.acquireNextImage()));
}
}, mCameraHandler);
3.5 第五步, 然后开启预览创建CaptureSession时把ImageReader添加进去
java
mCameraDevice.createCaptureSession(Arrays.asList(previewSurface, mImageReader.getSurface()), new CameraCaptureSession.StateCallback() {
}
3.6 第六步,响应点击拍照事件,我们设置点击拍照按钮调用capture()方法,capture()方法即实现拍照
java
private void capture() {
try {
//首先我们创建请求拍照的CaptureRequest
final CaptureRequest.Builder mCaptureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
//获取屏幕方向
int rotation = getWindowManager().getDefaultDisplay().getRotation();
//设置CaptureRequest输出到mImageReader
mCaptureBuilder.addTarget(mImageReader.getSurface());
//设置拍照方向,ORIENTATION.get(rotation)根据设备屏幕的旋转状态获取相应的角度值。
mCaptureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATION.get(rotation));
//这个回调接口用于拍照结束时重启预览,因为拍照会导致预览停止
CameraCaptureSession.CaptureCallback mImageSavedCallback = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
Toast.makeText(getApplicationContext(), "Image Saved!", Toast.LENGTH_SHORT).show();
//重启预览
restartPreview();
}
};
//停止预览
mCameraCaptureSession.stopRepeating();
//开始拍照,然后回调上面的接口重启预览,因为mCaptureBuilder设置ImageReader作为target,所以会自动回调ImageReader的onImageAvailable()方法保存图片
mCameraCaptureSession.capture(mCaptureBuilder.build(), mImageSavedCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
3.7 第七步,拍照后需要重启预览
java
private void restartPreview() {
try {
//执行setRepeatingRequest方法就行了,注意mCaptureRequest是之前开启预览设置的请求
mCameraCaptureSession.setRepeatingRequest(mCaptureRequest, null, mCameraHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
四,完整示例,预览数据+人脸检测+人脸相框
4.1 创建activity_face_camera3.xml,两个TextureView,一个预览,一个人脸框
XML
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
xmlns:app="http://schemas.android.com/apk/res-auto">
<!-- TODO: Update blank fragment layout -->
<TextureView
android:id="@+id/textureView"
android:layout_width="match_parent"
android:layout_height="0dp"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintDimensionRatio="9:16"/>
<TextureView
android:id="@+id/facetextureView"
android:layout_width="match_parent"
android:layout_height="0dp"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintDimensionRatio="9:16"/>
</androidx.constraintlayout.widget.ConstraintLayout>
4.2 创建FaceCamera3Activity.java
java
package com.xixia.aiimageupload.opcv;
public class FaceCamera3Activity extends Activity {
private TextureView textureView;
private TextureView faceTextureView;//用于标注人脸
private String[] permissions = {Manifest.permission.CAMERA};
private List<String> permissionList = new ArrayList();
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_face_camera3);
textureView = (TextureView) findViewById(R.id.textureView);
faceTextureView = (TextureView) findViewById(R.id.facetextureView);
//初始化
Camera2Utils.getInstance().init(getWindowManager(), this, textureView, faceTextureView);
Camera2Utils.getInstance().setOnPreviewFrameListener(new Camera2Utils.OnPreviewFrameListener() {
@Override
public void previewFrame(byte[] data, int width, int height) {
Log.e("FFF", "previewFrame: " + width);
decodeSynQrCode(data, width, height);
}
});
//动态授权
getPermission();
}
private void getPermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
for (String permission : permissions) {
if (ContextCompat.checkSelfPermission(this, permission) != PackageManager.PERMISSION_GRANTED) {
permissionList.add(permission);
}
}
if (!permissionList.isEmpty()) {
//进行授权
ActivityCompat.requestPermissions(this, permissionList.toArray(new String[permissionList.size()]), 1);
} else {
textureView.setSurfaceTextureListener(textureListener);
}
}
}
//只能写在Activity中,下次把授权写到activity中,减少麻烦
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == 1) {
if (grantResults.length != 0) {
//表示有权限没有授权
getPermission();
} else {
//表示都授权
textureView.setSurfaceTextureListener(textureListener);
}
}
}
/*SurfaceView状态回调*/
TextureView.SurfaceTextureListener textureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
Camera2Utils.getInstance().startPreview();
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
@Override
protected void onDestroy() {
super.onDestroy();
Camera2Utils.getInstance().closeCamera();
}
/**
* 预览数据回调
*/
private void decodeSynQrCode(byte[] data, int width, int height) {
//获取预览数据
//...............
//可以保存bitmap或者进行二维码识别
}
}
4.3 Camera2工具类
java
package com.xixia.aiimageupload;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.Face;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.Surface;
import android.view.TextureView;
import android.view.WindowManager;
import androidx.annotation.RequiresApi;
import com.luck.picture.lib.tools.ToastUtils;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
public class Camera2Utils {
private static final String TAG = "Camera2Utils";
private static Camera2Utils mCameraUtils;
private CameraManager cManager;
private Size cPixelSize;//相机成像尺寸
private int cOrientation;
private Size captureSize;
private int[] faceDetectModes;
private TextureView cView;//用于相机预览
private TextureView faceTextView;//用于相机预览
private Surface previewSurface;//预览Surface
private ImageReader cImageReader;
private Surface captureSurface;//拍照Surface
private HandlerThread cHandlerThread;//相机处理线程
private Handler cHandler;//相机处理
private CameraDevice cDevice;
private CameraCaptureSession cSession;
private CameraDevice.StateCallback cDeviceOpenCallback = null;//相机开启回调
private CaptureRequest.Builder previewRequestBuilder;//预览请求构建
private CaptureRequest previewRequest;//预览请求
private CameraCaptureSession.CaptureCallback previewCallback;//预览回调
private CaptureRequest captureRequest;
private CameraCaptureSession.CaptureCallback captureCallback;
private Context mContext;
private WindowManager mWindowManager;
private boolean isFront = false;
//为了使照片竖直显示
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
public static Camera2Utils getInstance() {
if (mCameraUtils == null) {
synchronized (Camera2Utils.class) {
if (mCameraUtils == null) {
mCameraUtils = new Camera2Utils();
}
}
}
return mCameraUtils;
}
public void init(WindowManager windowManager, Context context, TextureView textureView, TextureView faceTextView) {
this.mWindowManager = windowManager;
this.mContext = context;
this.cView = textureView;
this.faceTextView = faceTextView;
}
@SuppressLint("MissingPermission")
public void startPreview() {
//前置摄像头
String cId;
if (isFront) {
cId = CameraCharacteristics.LENS_FACING_BACK + "";
} else {
cId = CameraCharacteristics.LENS_FACING_FRONT + "";
}
cManager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE);
//根据摄像头ID,开启摄像头
try {
//获取开启相机的相关参数
CameraCharacteristics characteristics = cManager.getCameraCharacteristics(cId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size[] previewSizes = map.getOutputSizes(SurfaceTexture.class);//获取预览尺寸
Size[] captureSizes = map.getOutputSizes(ImageFormat.JPEG);//获取拍照尺寸
cOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);//获取相机角度
cPixelSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_PIXEL_ARRAY_SIZE);//获取成像区域尺寸,同上
//可用于判断是否支持人脸检测,以及支持到哪种程度,支持的人脸检测模式
faceDetectModes = characteristics.get(CameraCharacteristics
.STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES);
//支持的最大检测人脸数量
int maxFaceCount = characteristics.get(CameraCharacteristics.STATISTICS_INFO_MAX_FACE_COUNT);
int mFaceDetectMode = CaptureRequest.STATISTICS_FACE_DETECT_MODE_OFF;
for (int i = 0; i < faceDetectModes.length; i++) {
int face = faceDetectModes[i];
if (face == CaptureRequest.STATISTICS_FACE_DETECT_MODE_FULL || face == CaptureRequest.STATISTICS_FACE_DETECT_MODE_SIMPLE) {
mFaceDetectMode = CaptureRequest.STATISTICS_FACE_DETECT_MODE_FULL;
break;
}
}
if (mFaceDetectMode == CaptureRequest.STATISTICS_FACE_DETECT_MODE_OFF) {
//Log.i(TAG, "相机硬件不支持人脸检测");
ToastUtils.s(mContext, "相机硬件不支持人脸检测");
return;
}
//此处写死640*480,实际从预览尺寸列表选择
Size previewSize = new Size(1920, 1080);
//设置预览尺寸(避免控件尺寸与预览画面尺寸不一致时画面变形)
//transformImage(previewSizes, cView.getWidth(), cView.getHeight());
cView.getSurfaceTexture().setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
cManager.openCamera(cId, getCDeviceOpenCallback(), getCHandler());
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
//设置预览尺寸(避免控件尺寸与预览画面尺寸不一致时画面变形)
private void transformImage(Size[] previewSizes, int width, int height) {
Size mPreviewSize = getOptimalSize(previewSizes, width, height);
if (mPreviewSize == null || cView == null) {
return;
}
Matrix matrix = new Matrix();
int rotation = mWindowManager.getDefaultDisplay().getRotation();
RectF textureRectF = new RectF(0, 0, width, height);
RectF previewRectF = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = textureRectF.centerX();
float centery = textureRectF.centerY();
if (rotation == Surface.ROTATION_90 || rotation == Surface.ROTATION_270) {
previewRectF.offset(centerX - previewRectF.centerX(), centery - previewRectF.centerY());
matrix.setRectToRect(textureRectF, previewRectF, Matrix.ScaleToFit.FILL);
float scale = Math.max((float) width / mPreviewSize.getWidth(), (float) height / mPreviewSize.getHeight());
matrix.postScale(scale, scale, centerX, centery);
matrix.postRotate(90 * (rotation - 2), centerX, centery);
cView.setTransform(matrix);
}
}
/**
* 获取最佳尺寸,解决预览变形问题
*
* @param sizeMap
* @param width
* @param height
* @return
*/
//选择sizeMap中大于并且最接近width和height的size
private Size getOptimalSize(Size[] sizeMap, int width, int height) {
List<Size> sizeList = new ArrayList<>();
for (Size option : sizeMap) {
if (width > height) {
if (option.getWidth() > width && option.getHeight() > height) {
sizeList.add(option);
}
} else {
if (option.getWidth() > height && option.getHeight() > width) {
sizeList.add(option);
}
}
}
if (sizeList.size() > 0) {
return Collections.min(sizeList, new Comparator<Size>() {
@Override
public int compare(Size lhs, Size rhs) {
return Long.signum(lhs.getWidth() * lhs.getHeight() - rhs.getWidth() * rhs.getHeight());
}
});
}
return sizeMap[0];
}
private Size getOptimalPreviewSize(Size[] sizes, int w, int h) {
final double ASPECT_TOLERANCE = 0.1;
double targetRatio = (double) w / h;
if (sizes == null) return null;
Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
// Try to find an size match aspect ratio and size
Size size = null;
for (int i = 0; i < sizes.length; i++) {
size = sizes[i];
double ratio = (double) size.getWidth() / size.getHeight();
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
if (Math.abs(size.getHeight() - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.getHeight() - targetHeight);
}
}
// Cannot find the one match the aspect ratio, ignore the requirement
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (int i = 0; i < sizes.length; i++) {
size = sizes[i];
if (Math.abs(size.getHeight() - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.getHeight() - targetHeight);
}
}
}
return optimalSize;
}
private void configureTransform(int viewWidth, int viewHeight) {
int rotation = 1;
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
} else if (Surface.ROTATION_180 == rotation) {
matrix.postRotate(180, centerX, centerY);
}
cView.setTransform(matrix);
}
/**
* 初始化并获取相机开启回调对象。当准备就绪后,发起预览请求
*/
@SuppressLint("NewApi")
private CameraDevice.StateCallback getCDeviceOpenCallback() {
if (cDeviceOpenCallback == null) {
cDeviceOpenCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice camera) {//打开摄像头
cDevice = camera;
try {
//创建Session,需先完成画面呈现目标(此处为预览和拍照Surface)的初始化
camera.createCaptureSession(Arrays.asList(getPreviewSurface(), getCaptureSurface()), new
CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession session) {
cSession = session;
//构建预览请求,并发起请求
Log.i(TAG, "[发出预览请求]");
try {
session.setRepeatingRequest(getPreviewRequest(), getPreviewCallback(),
getCHandler());
} catch (CameraAccessException e) {
Log.i(TAG, "--" + e.getMessage());
}
}
@Override
public void onConfigureFailed(CameraCaptureSession session) {
session.close();
}
}, getCHandler());
} catch (CameraAccessException e) {
Log.i(TAG, "--" + e.getMessage());
}
}
@Override
public void onDisconnected(CameraDevice camera) {
//关闭摄像头
camera.close();
}
@Override
public void onError(CameraDevice camera, int error) {
//发生错误
camera.close();
}
};
}
return cDeviceOpenCallback;
}
/**
* 初始化并获取相机线程处理
*
* @return
*/
private Handler getCHandler() {
if (cHandler == null) {
//单独开一个线程给相机使用
cHandlerThread = new HandlerThread("cHandlerThread");
cHandlerThread.start();
cHandler = new Handler(cHandlerThread.getLooper());
}
return cHandler;
}
/**
* 获取支持的最高人脸检测级别
*
* @return
*/
private int getFaceDetectMode() {
if (faceDetectModes == null) {
Log.i(TAG, "getFaceDetectMode: ----");
return CaptureRequest.STATISTICS_FACE_DETECT_MODE_FULL;
} else {
Log.i(TAG, "getFaceDetectMode: --2--" + faceDetectModes[faceDetectModes.length - 1]);
return faceDetectModes[faceDetectModes.length - 1];
}
}
/**
* 初始化并获取预览回调对象
*
* @return
*/
@SuppressLint("NewApi")
private CameraCaptureSession.CaptureCallback getPreviewCallback() {
if (previewCallback == null) {
previewCallback = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest
request, TotalCaptureResult result) {
onCameraImagePreviewed(result);
}
};
}
return previewCallback;
}
/**
* 生成并获取预览请求
*
* @return
*/
@SuppressLint("NewApi")
private CaptureRequest getPreviewRequest() {
previewRequest = getPreviewRequestBuilder().build();
return previewRequest;
}
/**
* 初始化并获取预览请求构建对象,进行通用配置,并每次获取时进行人脸检测级别配置
*
* @return
*/
@SuppressLint("NewApi")
private CaptureRequest.Builder getPreviewRequestBuilder() {
if (previewRequestBuilder == null) {
try {
previewRequestBuilder = cSession.getDevice().createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
previewRequestBuilder.addTarget(getPreviewSurface());
previewRequestBuilder.addTarget(getCaptureSurface());
previewRequestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);//自动曝光、白平衡、对焦
} catch (CameraAccessException e) {
Log.i(TAG, "--" + e.getMessage());
}
}
// previewRequestBuilder.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, getFaceDetectMode());//设置人脸检测级别
previewRequestBuilder.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, CameraCharacteristics.STATISTICS_FACE_DETECT_MODE_SIMPLE);//设置人脸检测级别
previewRequestBuilder.set(CaptureRequest.JPEG_ORIENTATION, 0);
return previewRequestBuilder;
}
/**
* 获取预览Surface
*
* @return
*/
private Surface getPreviewSurface() {
if (previewSurface == null) {
previewSurface = new Surface(cView.getSurfaceTexture());
}
return previewSurface;
}
/**
* 处理相机画面处理完成事件,获取检测到的人脸坐标,换算并绘制方框
*
* @param result
*/
@SuppressLint({"NewApi", "LocalSuppress"})
private void onCameraImagePreviewed(CaptureResult result) {
Face[] faces = result.get(CaptureResult.STATISTICS_FACES);
if (faces.length > 0) {
Log.i(TAG, "检测到有人脸,-----------------------------------");
Log.i(TAG, "检测到有人脸,进行拍照操作:faceLength=" + faces.length);
//检测到有人脸,控制相机进行拍照操作
//executeCapture();
}
if (faceTextView != null) {
drawFace(faces);
}
}
/**
* 绘制人脸框
*/
private Paint facePaint;
private void drawFace(Face[] faces) {
if (facePaint == null) {
facePaint = new Paint();
facePaint.setColor(Color.BLUE);
facePaint.setStrokeWidth(10);
facePaint.setStyle(Paint.Style.STROKE);//使绘制的矩形中空
//隐藏背景色,以免标注人脸时挡住预览画面
faceTextView.setAlpha(0.9f);
}
Canvas canvas = faceTextView.lockCanvas();
if (canvas != null) {
canvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR);//旧画面清理覆盖
if (faces.length > 0) {
for (int i = 0; i < faces.length; i++) {
Rect fRect = faces[i].getBounds();
Log.e(TAG, "[R" + i + "]:[left:" + fRect.left + ",top:" + fRect.top + ",right:" + fRect.right + ",bottom:" + fRect.bottom + "]");
//人脸检测坐标基于相机成像画面尺寸以及坐标原点。此处进行比例换算
//成像画面与方框绘制画布长宽比比例(同画面角度情况下的长宽比例(此处前后摄像头成像画面相对预览画面倒置(±90°),计算比例时长宽互换))
float scaleWidth = canvas.getHeight() * 1.0f / cPixelSize.getWidth();
float scaleHeight = canvas.getWidth() * 1.0f / cPixelSize.getHeight();
//坐标缩放
int l = (int) (fRect.left * scaleWidth);
int t = (int) (fRect.top * scaleHeight);
int r = (int) (fRect.right * scaleWidth);
int b = (int) (fRect.bottom * scaleHeight);
Log.e(TAG, "[T" + i + "]:[left:" + l + ",top:" + t + ",right:" + r + ",bottom:" + b + "]");
//人脸检测坐标基于相机成像画面尺寸以及坐标原点。此处进行坐标转换以及原点(0,0)换算
//人脸检测:坐标原点为相机成像画面的左上角,left、top、bottom、right以成像画面左上下右为基准
//画面旋转后:原点位置不一样,根据相机成像画面的旋转角度需要换算到画布的左上角,left、top、bottom、right基准也与原先不一样,
//如相对预览画面相机成像画面角度为90°那么成像画面坐标的top,在预览画面就为left。如果再翻转,那成像画面的top就为预览画面的right,且坐标起点为右,需要换算到左边
if (isFront) {
//此处前置摄像头成像画面相对于预览画面顺时针90°+翻转。left、top、bottom、right变为bottom、right、top、left,并且由于坐标原点由左上角变为右下角,X,Y方向都要进行坐标换算
canvas.drawRect(canvas.getWidth() - b, canvas.getHeight() - r, canvas.getWidth() - t, canvas.getHeight() - l, facePaint);
} else {
//此处后置摄像头成像画面相对于预览画面顺时针270°,left、top、bottom、right变为bottom、left、top、right,并且由于坐标原点由左上角变为左下角,Y方向需要进行坐标换算
canvas.drawRect(canvas.getWidth() - b, l, canvas.getWidth() - t, r, facePaint);
}
}
}
faceTextView.unlockCanvasAndPost(canvas);
}
}
/**
* 初始化拍照相关
*/
@SuppressLint("NewApi")
private Surface getCaptureSurface() {
if (cImageReader == null) {
cImageReader = ImageReader.newInstance(getCaptureSize().getWidth(), getCaptureSize().getHeight(),
ImageFormat.YUV_420_888, 2);
cImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
//拍照最终回调
onCaptureFinished(reader);
}
}, getCHandler());
captureSurface = cImageReader.getSurface();
}
return captureSurface;
}
/**
* 获取拍照尺寸
*
* @return
*/
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
private Size getCaptureSize() {
if (captureSize != null) {
return captureSize;
} else {
return new Size(cView.getWidth(), cView.getHeight());
}
}
@SuppressLint("NewApi")
private void onCaptureFinished(ImageReader reader) {
if (reader != null) {
Image image = reader.acquireLatestImage();
if (image != null && image.getPlanes() != null && image.getPlanes().length > 0) {
int width = image.getWidth();
int height = image.getHeight();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] data = new byte[buffer.remaining()];
buffer.get(data);
image.close();
buffer.clear();
if (onPreviewFrameListener != null) {
onPreviewFrameListener.previewFrame(data,width, height);
}
//onPreviewFrameToBitmap(data);
}
}
}
/**
* 预览数据转为bitmap
*/
Bitmap takeBitmap = null;
Bitmap takeBitmap2 = null;
private void onPreviewFrameToBitmap(byte[] data) {
takeBitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
/**
* 为了解决预览和拍照左右颠倒问题
*/
Matrix m = new Matrix();
m.postScale(-1, 1); // 镜像水平翻转
takeBitmap2 = Bitmap.createBitmap(takeBitmap, 0, 0, takeBitmap.getWidth(), takeBitmap.getHeight(), m, true);
if (ioShowBitmapListener != null) {
ioShowBitmapListener.showBitmap(takeBitmap2);
}
}
@SuppressLint("NewApi")
public void closeCamera() {
if (cSession != null) {
try {
cSession.stopRepeating();
} catch (CameraAccessException e) {
e.printStackTrace();
}
cSession.close();
cSession = null;
}
if (cDevice != null) {
cDevice.close();
cDevice = null;
}
if (cImageReader != null) {
cImageReader.close();
cImageReader = null;
}
if (cHandlerThread != null) {
cHandlerThread.quitSafely();
try {
cHandlerThread.join();
cHandlerThread = null;
cHandler = null;
} catch (InterruptedException e) {
Log.i(TAG, "--" + e.getMessage());
}
}
// if (captureRequestBuilder != null) {
// captureRequestBuilder.removeTarget(captureSurface);
// captureRequestBuilder = null;
// }
if (captureSurface != null) {
captureSurface.release();
captureSurface = null;
}
if (previewRequestBuilder != null) {
previewRequestBuilder.removeTarget(previewSurface);
previewRequestBuilder = null;
}
if (previewSurface != null) {
previewSurface.release();
previewSurface = null;
}
if (takeBitmap != null) {
takeBitmap.recycle();
takeBitmap = null;
}
}
/**
* 帧数据回调
*/
private OnPreviewFrameListener onPreviewFrameListener;
public void setOnPreviewFrameListener(OnPreviewFrameListener onPreviewFrameListener) {
this.onPreviewFrameListener = onPreviewFrameListener;
}
public interface OnPreviewFrameListener {
void previewFrame(byte[] data, int width, int height);
}
/**
* 帧数据bitmap回调
*/
private IOShowBitmapListener ioShowBitmapListener;
public void setIoShowBitmapListener(IOShowBitmapListener ioShowBitmapListener) {
this.ioShowBitmapListener = ioShowBitmapListener;
}
public interface IOShowBitmapListener {
void showBitmap(Bitmap bitmap);
}
// /**
// * 执行拍照
// */
// private CaptureRequest.Builder captureRequestBuilder;
// @SuppressLint("NewApi")
// private void executeCapture() {
// try {
// Log.i(TAG, "发出请求");
// cSession.capture(getCaptureRequest(), getCaptureCallback(), getCHandler());
// } catch (CameraAccessException e) {
// Log.i(TAG, "--" + e.getMessage());
// }
// }
//
// @SuppressLint("NewApi")
// private CaptureRequest getCaptureRequest() {
// captureRequest = getCaptureRequestBuilder().build();
// return captureRequest;
// }
//
// @SuppressLint("NewApi")
// private CaptureRequest.Builder getCaptureRequestBuilder() {
// if (captureRequestBuilder == null) {
// try {
// captureRequestBuilder = cDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
// captureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
// //设置拍照回调接口
// captureRequestBuilder.addTarget(getCaptureSurface());
// //TODO 1 照片旋转
int rotation =getWindowManager().getDefaultDisplay().getRotation();
// int rotation = 0;
// int rotationTo = getOrientation(rotation);
//
// captureRequestBuilder.set(CaptureRequest.JPEG_ORIENTATION, rotationTo);
// } catch (CameraAccessException e) {
// Log.i(TAG, "--" + e.getMessage());
// }
// }
// return captureRequestBuilder;
// }
// @SuppressLint("NewApi")
// private CameraCaptureSession.CaptureCallback getCaptureCallback() {
// if (captureCallback == null) {
// captureCallback = new CameraCaptureSession.CaptureCallback() {
// @Override
// public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest
// request, TotalCaptureResult result) {
// }
// };
// }
// return captureCallback;
// }
// /**
// * Retrieves the JPEG orientation from the specified screen rotation.
// *
// * @param rotation The screen rotation.
// * @return The JPEG orientation (one of 0, 90, 270, and 360)
// */
// private int getOrientation(int rotation) {
// return (ORIENTATIONS.get(rotation) + cOrientation + 270) % 360;
// }
}