从 Android 5.0 开始,Google 引入了一套全新的相机框架 Camera2 api。现在主流机型基本上都在5.0,所以可以完全不用考虑camera1 api 兼容问题。Camera1 那寥寥无几的 API 和极差的灵活性早已不能满足日益复杂的相机功能开发。
Camera2 的出现给相机应用程序带来了巨大的变革,因为它的目的是为了给应用层提供更多的相机控制权限,从而构建出更高质量的相机应用程序。
camera2 api 优势:
1、可以获取更多的帧(预览/拍照)信息以及手动控制每一帧的参数
2、对Camera的控制更加完全(比如支持调整focus distance, 剪裁预览/拍照图片)
3、支持更多图片格式(yuv/raw)以及高速连拍
4、不用打开camera就可以获取相机参数
5、支持YUV后处理和Raw格式图片拍摄, 还支持额外的输出流配置
6、在不开启预览的情况下拍照
7、控制曝光时间
8、3A(AF、AE、AWB)的控制在 Camera2 上得到了最大化的放权
。。。。。。
流程图:
重要角色:
CameraManager
是一个负责查询和建立相机连接的系统服务,它的功能不多。
CameraCharacteristics
获取相机信息
Surface
是一块用于填充图像数据的内存空间
CameraCaptureSession
配置了目标 Surface 的 Pipeline 实例, 用来向相机发送请求的通道
CaptureRequest
是向 CameraCaptureSession 提交 Capture 请求时的信息载体
CaptureResult
Capture 操作的结果,里面包括了很多状态信息,包括闪光灯状态、对焦状态、时间戳等等。
接下来开始撸代码
我们使用TextureView作为预览媒介
设置监听:
//设置监听
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
// Surface状态回调
TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
setupCamera(width, height);
configureTransform(width, height);
openCamera();
Log.i(TAG, "onSurfaceTextureAvailable: jason === " + surface);
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
configureTransform(width, height);
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
Log.i(TAG, "onSurfaceTextureDestroyed: jason ===" + surface);
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
setupCamera(width, height); 负责获取camera参数做处理
private void setupCamera(int width, int height) {
// 获取摄像头的管理者CameraManager
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
// 遍历所有摄像头
for (String cameraId : manager.getCameraIdList()) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap streamConfigurationMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
boolean outputSupportedFor = streamConfigurationMap.isOutputSupportedFor(ImageFormat.YV12);
Log.i(TAG, "setupCamera: jason ==" + outputSupportedFor);
// 默认打开前置摄像头
if (characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_BACK)
continue;
// 获取StreamConfigurationMap,它是管理摄像头支持的所有输出格式和尺寸
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
mPreviewSize = getOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height);
int orientation = getResources().getConfiguration().orientation;
//修改TextureView size
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.changeSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.changeSize(mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
mCameraId = cameraId;
break;
}
} catch (Exception e) {
e.printStackTrace();
}
}
configureTransform(width, height); 设置textureview 的矩阵用于图像旋转
private void configureTransform(int viewWidth, int viewHeight) {
if (null == mTextureView || null == mPreviewSize) {
return;
}
int rotation = getWindowManager().getDefaultDisplay().getRotation();
Log.i(TAG, "configureTransform: rotation:" + rotation);
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mPreviewSize.getHeight(),
(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
} else if (Surface.ROTATION_180 == rotation) {
matrix.postRotate(180, centerX, centerY);
}
Log.i(TAG, "configureTransform: matrix:" + matrix.toString());
mTextureView.setTransform(matrix);
}
打开相机
openCamera() 打开操作
private void openCamera() {
//获取摄像头的管理者CameraManager
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
//检查权限
try {
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
Log.i(TAG, "openCamera: 没有camera权限");
return;
}
//打开相机,第一个参数指示打开哪个摄像头,第二个参数stateCallback为相机的状态回调接口,第三个参数用来确定Callback在哪个线程执行,为null的话就在当前线程执行
manager.openCamera(mCameraId, mStateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
mStateCallback 是回调接口
// 摄像头状态回调
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice camera) {
mCameraDevice = camera;
//开启预览
startPreview();
Log.i(TAG, "onOpened: jason ==" + camera);
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
Log.i(TAG, "CameraDevice jason == Disconnected");
}
@Override
public void onError(@NonNull CameraDevice camera, int error) {
Log.e(TAG, "CameraDevice jason == Error");
}
@Override
public void onClosed(@NonNull CameraDevice camera) {
super.onClosed(camera);
Log.i(TAG, "onClosed: jason ==");
}
};
打开相机后可以在onOpened 方法中获取camera device 对象
然后我们在打开预览
startPreview();
private void startPreview() {
setupImageReader();
setupPreviewReader();
try {
createPreviewRequestBuilder();
//创建相机捕获会话,第一个参数是捕获数据的输出Surface列表,第二个参数是CameraCaptureSession的状态回调接口,当它创建好后会回调onConfigured方法,第三个参数用来确定Callback在哪个线程执行,为null的话就在当前线程执行
mCameraDevice.createCaptureSession(Arrays.asList(mPreviewSurface, mImageReader.getSurface()), new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession session) {
mCaptureSession = session;
startRepeatPreview();
}
@Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
setupImageReader(); 用于初始图片数据接受回调和surface媒介
private void setupImageReader() {
//前三个参数分别是需要的尺寸和格式,最后一个参数代表每次最多获取几帧数据
mImageReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(), ImageFormat.YV12, 1);
//监听ImageReader的事件,当有图像流数据可用时会回调onImageAvailable方法,它的参数就是预览帧数据,可以对这帧数据进行处理
mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Log.i(TAG, "onImageAvailable: jason == " + Thread.currentThread());
final Image image = reader.acquireLatestImage();
// 开启线程异步保存图片
ImageSaver imageSaver = new ImageSaver(image, new Handler.Callback() {
@Override
public boolean handleMessage(Message msg) {
image.close();// 使用完毕后必须关闭
return false;
}
}, true);
mExecutorService.execute(imageSaver);
}
}, null);
}
setupPreviewReader(); 初始化用于预览的surface对象
private void setupPreviewReader() {
SurfaceTexture mSurfaceTexture = mTextureView.getSurfaceTexture();
//设置TextureView的缓冲区大小
mSurfaceTexture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
//获取Surface显示预览数据
mPreviewSurface = new Surface(mSurfaceTexture);
}
createPreviewRequestBuilder();用于构建预览请求builder
// 创建预览请求的Builder(TEMPLATE_PREVIEW表示预览请求)
private void createPreviewRequestBuilder() {
try {
mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
} catch (CameraAccessException e) {
e.printStackTrace();
}
//设置预览的显示界面
mPreviewRequestBuilder.addTarget(mPreviewSurface);
MeteringRectangle[] meteringRectangles = mPreviewRequestBuilder.get(CaptureRequest.CONTROL_AF_REGIONS);
if (meteringRectangles != null && meteringRectangles.length > 0) {
Log.d(TAG, "PreviewRequestBuilder: AF_REGIONS=" + meteringRectangles[0].getRect().toString());
}
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);
}
然后获取createCaptureSession 对象用于请求的发送的通道
在onConfigured中获取CameraCaptureSession 对象,然后重复发送预览请求。
private void startRepeatPreview() {
mPreviewRequestBuilder.setTag(TAG_PREVIEW);
//设置反复捕获数据的请求,这样预览界面就会一直有数据显示
try {
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), mPreviewCaptureCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
至此 预览的流程启动成功!!
接下来就开始拍照流程:
拍照相对而言就比较简单只要发送请求给通道即可
// 拍照
private void capture() {
try {
//首先我们创建请求拍照的CaptureRequest
final CaptureRequest.Builder mCaptureBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
//获取屏幕方向
int rotation = getWindowManager().getDefaultDisplay().getRotation();
mCaptureBuilder.addTarget(mPreviewSurface);
mCaptureBuilder.addTarget(mImageReader.getSurface());
//设置拍照方向
Log.i(TAG, "capture: orientation:" + ORIENTATION.get(rotation));
mCaptureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATION.get(rotation));
//停止预览
mCaptureSession.stopRepeating();
//开始拍照,然后回调上面的接口重启预览,因为mCaptureBuilder设置ImageReader作为target,所以会自动回调ImageReader的onImageAvailable()方法保存图片
CameraCaptureSession.CaptureCallback captureCallback = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
Log.i(TAG, "onCaptureCompleted: jason ==" + result.toString());
startRepeatPreview();
}
};
mCaptureSession.capture(mCaptureBuilder.build(), captureCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
在setupImageReader()方法中的回调节能够接收到图像数据,这里要要采集视频所以使用yv12 格式。
提出数据使用一个线程完成:
public static class ImageSaver implements Runnable {
private Image mImage;
private Handler.Callback mCallback;
private boolean isYV12;
public ImageSaver(Image image, Handler.Callback callback, boolean isYV12) {
mImage = image;
mCallback = callback;
this.isYV12 = isYV12;
}
@Override
public void run() {
if (isYV12) {
int len = mImage.getPlanes().length;
byte[][] bytes = new byte[len][];
int count = 0;
for (int i = 0; i < len; i++) {
ByteBuffer buffer = mImage.getPlanes()[i].getBuffer();
int remaining = buffer.remaining();
byte[] data = new byte[remaining];
byte[] _data = new byte[remaining];
buffer.get(data);
System.arraycopy(data, 0, _data, 0, remaining);
bytes[i] = _data;
count += remaining;
Log.i(TAG, "run: jason == " + buffer.remaining());
}
Log.i(TAG, "run: jason == count " + count);
} else {
ByteBuffer buffer = mImage.getPlanes()[0].getBuffer();
byte[] data = new byte[buffer.remaining()];
buffer.get(data);
File imageFile = new File(Environment.getExternalStorageDirectory() + "/DCIM/myPicture.jpg");
FileOutputStream fos = null;
try {
fos = new FileOutputStream(imageFile);
fos.write(data, 0, data.length);
} catch (IOException e) {
e.printStackTrace();
} finally {
if (fos != null) {
try {
fos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
Message obtain = Message.obtain();
obtain.what = 1;
mCallback.handleMessage(obtain);
}
}
如果要改成jpg 保存图片可以使用ImageFormat.JPEG 格式两种都兼容。
如果内容对你又帮助记得点赞 !!
补充代码:
static {
ORIENTATION.append(Surface.ROTATION_0, 90);
ORIENTATION.append(Surface.ROTATION_90, 0);
ORIENTATION.append(Surface.ROTATION_180, 270);
ORIENTATION.append(Surface.ROTATION_270, 180);
}
private CameraCaptureSession.CaptureCallback mPreviewCaptureCallback = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
}
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureResult partialResult) {
}
};
private ExecutorService mExecutorService = Executors.newSingleThreadExecutor();
//用于关闭方法
private void closeCamera() {
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mImageReader) {
mImageReader.close();
mImageReader = null;
}
}