要想保存camera中的图像就要用到ImageReader
ImageReader中有surface,将这个surface设置到
CaptureRequest和CameraCaptureSession中。
ImageReader的创建
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(backCameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)), new CompareSizesByArea());
Log.e("CameraActivity", "calculateCameraParameters width=" + largest.getWidth() + " height=" + largest.getHeight());
mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(),
ImageFormat.YUV_420_888, 2);
width = largest.getWidth();
height = largest.getHeight();
我们需要去摄像头里拿到相应的宽高。
这里特别要注意ImageReader不支持nv21
mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
++i;
Log.e("CameraActivity", "onImageAvailable");
Image image = mImageReader.acquireLatestImage();
if (image != null && i < 2) {
Image.Plane[] planes = image.getPlanes();
/** Y */
ByteBuffer bufferY = planes[0].getBuffer();
/** U(Cb) */
ByteBuffer bufferU = planes[1].getBuffer();
/** V(Cr) */
ByteBuffer bufferV = planes[2].getBuffer();
/** YUV数据集合 */
byte[] yuvData = yuv420ToNV21(bufferY, bufferU, bufferV);
mExecutorService.execute(new Runnable() {
@Override
public void run() {
try {
saveYUVtoPicture(yuvData, width, height);
} catch (IOException e) {
e.printStackTrace();
}
}
});
} else {
}
if(image != null){
image.close();
}
}
}, subHandler);
} catch (Exception e) {
e.printStackTrace();
}
onImageAvailable 中我们可以拿到Image对象,我们可以拿到yuv中y、u、v分量
这里要特别注意 y分量的像素步长是1,v、u分量的像素步长是2
所以yuv420转nv21
就有下面的计算方法
public void saveYUVtoPicture(byte[] data,int width,int height) throws IOException{
FileOutputStream outStream = null;
File file = new File(savePicPath) ;
if(!file.exists()){
file.mkdir();
}
try {
YuvImage yuvimage = new YuvImage(data, ImageFormat.NV21, width, height, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
yuvimage.compressToJpeg(new Rect(0, 0,width, height), 80, baos);
Bitmap bmp = BitmapFactory.decodeByteArray(baos.toByteArray(), 0, baos.toByteArray().length);
outStream = new FileOutputStream(
String.format(savePicPath + File.separator + "%d_%s_%s.jpg",
System.currentTimeMillis(),String.valueOf(width),String.valueOf(height)));
bmp.compress(Bitmap.CompressFormat.JPEG, 100, outStream);
outStream.write(baos.toByteArray());
outStream.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
}
由于YuvImage只支持NV21和YUY2
所以我们必须用上面的方法将Yuv420转化为NV21。
下面是完整的代码
package com.yuanxuzhen.androidmedia.video;
import android.Manifest;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.OutputConfiguration;
import android.hardware.camera2.params.SessionConfiguration;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.View;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import com.yuanxuzhen.androidmedia.DirUtil;
import com.yuanxuzhen.androidmedia.databinding.ActivityCameraLayoutBinding;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import permissions.dispatcher.NeedsPermission;
import permissions.dispatcher.OnNeverAskAgain;
import permissions.dispatcher.OnPermissionDenied;
import permissions.dispatcher.RuntimePermissions;
@RuntimePermissions
public class CameraActivity extends AppCompatActivity {
ActivityCameraLayoutBinding binding;
ExecutorService mExecutorService;
CameraManager cameraManager;
CameraDevice mCamera;
private String frontCameraId = "";
private String backCameraId = "";
CameraCaptureSession mCameraPreviewCaptureSession;
CameraCaptureSession mCameraRecordCaptureSession;
ImageReader mImageReader;
HandlerThread subHandlerThread;
Handler subHandler;
HandlerThread sub1HandlerThread;
Handler sub1Handler;
int width, height;
private String savePicPath = null;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
savePicPath = DirUtil.getCacheDir() + File.separator + "save";
cameraManager = (CameraManager) getSystemService(CAMERA_SERVICE);
mExecutorService = Executors.newCachedThreadPool();
binding = ActivityCameraLayoutBinding.inflate(getLayoutInflater());
setContentView(binding.getRoot());
subHandlerThread = new HandlerThread("sub");
subHandlerThread.start();
subHandler = new Handler(subHandlerThread.getLooper());
sub1HandlerThread = new HandlerThread("sub1");
sub1HandlerThread.start();
sub1Handler = new Handler(sub1HandlerThread.getLooper());
binding.startRecord.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
record();
}
});
binding.stopRecord.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
preview();
}
});
binding.surfaceView.getHolder().addCallback(new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(@NonNull SurfaceHolder holder) {
CameraActivityPermissionsDispatcher.startCameraWithPermissionCheck(CameraActivity.this);
}
@Override
public void surfaceChanged(@NonNull SurfaceHolder holder, int format, int width, int height) {
}
@Override
public void surfaceDestroyed(@NonNull SurfaceHolder holder) {
}
});
}
@NeedsPermission({Manifest.permission.RECORD_AUDIO, Manifest.permission.CAMERA})
public void startCamera() {
Log.e("CameraActivity", "startCamera");
mExecutorService.execute(new Runnable() {
@Override
public void run() {
try {
if (ActivityCompat.checkSelfPermission(CameraActivity.this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
// TODO: Consider calling
// ActivityCompat#requestPermissions
// here to request the missing permissions, and then overriding
// public void onRequestPermissionsResult(int requestCode, String[] permissions,
// int[] grantResults)
// to handle the case where the user grants the permission. See the documentation
// for ActivityCompat#requestPermissions for more details.
return;
}
String[] cameraIdArray = cameraManager.getCameraIdList();
for (String ele : cameraIdArray) {
CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(ele);
if (cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) {
frontCameraId = ele;
} else if (cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_BACK) {
backCameraId = ele;
}
}
calculateCameraParameters();
cameraManager.openCamera(backCameraId,
mExecutorService,
new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice camera) {
Log.e("CameraActivity", "onOpened");
mCamera = camera;
preview();
}
@Override
public void onDisconnected(@NonNull CameraDevice camera) {
Log.e("CameraActivity", "onDisconnected");
}
@Override
public void onError(@NonNull CameraDevice camera, int error) {
Log.e("CameraActivity", "onError");
}
}
);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
public void stopCamera() {
Log.e("CameraActivity", "stopCamera");
closeRecordiew();
closePreview();
if (mCamera != null) {
mCamera.close();
mCamera = null;
}
}
@OnPermissionDenied(Manifest.permission.RECORD_AUDIO)
public void onDeniedAudio() {
Toast.makeText(this, "录音权限拒绝", Toast.LENGTH_SHORT).show();
}
@OnNeverAskAgain(Manifest.permission.RECORD_AUDIO)
public void onNeverAskAgainAudio() {
Toast.makeText(this, "录音权限再不询问", Toast.LENGTH_SHORT).show();
}
@OnPermissionDenied(Manifest.permission.CAMERA)
public void onDeniedCamera() {
Toast.makeText(this, "录像权限拒绝", Toast.LENGTH_SHORT).show();
}
@OnNeverAskAgain(Manifest.permission.CAMERA)
public void onNeverAskAgainCamera() {
Toast.makeText(this, "录像权限再不询问", Toast.LENGTH_SHORT).show();
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions,
@NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
CameraActivityPermissionsDispatcher.onRequestPermissionsResult(this, requestCode, grantResults);
}
/**
* Check if this device has a camera
*/
private boolean checkCameraHardware() {
if (getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)) {
// this device has a camera
return true;
} else {
// no camera on this device
return false;
}
}
@Override
protected void onDestroy() {
stopCamera();
super.onDestroy();
}
@RequiresApi(api = Build.VERSION_CODES.N)
private void preview() {
try {
closeRecordiew();
final CaptureRequest.Builder previewBuilder
= mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
previewBuilder.addTarget(binding.surfaceView.getHolder().getSurface());
OutputConfiguration outputConfiguration = new OutputConfiguration(binding.surfaceView.getHolder().getSurface());
List<OutputConfiguration> outputs = new ArrayList<>();
outputs.add(outputConfiguration);
SessionConfiguration sessionConfiguration
= new SessionConfiguration(SessionConfiguration.SESSION_REGULAR,
outputs,
mExecutorService,
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
mCameraPreviewCaptureSession = session;
previewBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
CaptureRequest previewRequest = previewBuilder.build();
try {
mCameraPreviewCaptureSession.setRepeatingRequest(previewRequest, new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureStarted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, long timestamp, long frameNumber) {
super.onCaptureStarted(session, request, timestamp, frameNumber);
}
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureResult partialResult) {
super.onCaptureProgressed(session, request, partialResult);
}
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
}
@Override
public void onCaptureFailed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureFailure failure) {
super.onCaptureFailed(session, request, failure);
}
@Override
public void onCaptureSequenceCompleted(@NonNull CameraCaptureSession session, int sequenceId, long frameNumber) {
super.onCaptureSequenceCompleted(session, sequenceId, frameNumber);
}
@Override
public void onCaptureSequenceAborted(@NonNull CameraCaptureSession session, int sequenceId) {
super.onCaptureSequenceAborted(session, sequenceId);
}
@Override
public void onCaptureBufferLost(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull Surface target, long frameNumber) {
super.onCaptureBufferLost(session, request, target, frameNumber);
}
}, sub1Handler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
}
}
);
mCamera.createCaptureSession(sessionConfiguration);
} catch (Exception e) {
}
}
private void closePreview() {
if (mCameraPreviewCaptureSession != null) {
mCameraPreviewCaptureSession.close();
mCameraPreviewCaptureSession = null;
}
}
private void closeRecordiew() {
if (mCameraRecordCaptureSession != null) {
mCameraRecordCaptureSession.close();
mCameraRecordCaptureSession = null;
}
}
int i = 0;
@RequiresApi(api = Build.VERSION_CODES.N)
private void record() {
try {
Log.e("CameraActivity", "record");
i = 0;
File file = new File(savePicPath);
DirUtil.deleteDir(file);
final CaptureRequest.Builder builder
= mCamera.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
builder.addTarget(binding.surfaceView.getHolder().getSurface());
builder.addTarget(mImageReader.getSurface());
OutputConfiguration outputConfiguration = new OutputConfiguration(binding.surfaceView.getHolder().getSurface());
OutputConfiguration imageReaderOutputConfiguration = new OutputConfiguration(mImageReader.getSurface());
List<OutputConfiguration> outputs = new ArrayList<>();
outputs.add(outputConfiguration);
outputs.add(imageReaderOutputConfiguration);
SessionConfiguration sessionConfiguration
= new SessionConfiguration(SessionConfiguration.SESSION_REGULAR,
outputs,
mExecutorService,
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
mCameraRecordCaptureSession = session;
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
CaptureRequest request = builder.build();
try {
mCameraRecordCaptureSession.setRepeatingRequest(request, new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureStarted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, long timestamp, long frameNumber) {
super.onCaptureStarted(session, request, timestamp, frameNumber);
}
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureResult partialResult) {
super.onCaptureProgressed(session, request, partialResult);
}
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
}
@Override
public void onCaptureFailed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureFailure failure) {
super.onCaptureFailed(session, request, failure);
}
@Override
public void onCaptureSequenceCompleted(@NonNull CameraCaptureSession session, int sequenceId, long frameNumber) {
super.onCaptureSequenceCompleted(session, sequenceId, frameNumber);
}
@Override
public void onCaptureSequenceAborted(@NonNull CameraCaptureSession session, int sequenceId) {
super.onCaptureSequenceAborted(session, sequenceId);
}
@Override
public void onCaptureBufferLost(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull Surface target, long frameNumber) {
super.onCaptureBufferLost(session, request, target, frameNumber);
}
}, sub1Handler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession session) {
}
}
);
mCamera.createCaptureSession(sessionConfiguration);
} catch (Exception e) {
}
}
/**
* 根据当前摄像头计算所需参数
*/
private void calculateCameraParameters() {
try {
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(backCameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)), new CompareSizesByArea());
Log.e("CameraActivity", "calculateCameraParameters width=" + largest.getWidth() + " height=" + largest.getHeight());
mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(),
ImageFormat.YUV_420_888, 2);
width = largest.getWidth();
height = largest.getHeight();
mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
++i;
Log.e("CameraActivity", "onImageAvailable");
Image image = mImageReader.acquireLatestImage();
if (image != null && i < 2) {
Image.Plane[] planes = image.getPlanes();
/** Y */
ByteBuffer bufferY = planes[0].getBuffer();
/** U(Cb) */
ByteBuffer bufferU = planes[1].getBuffer();
/** V(Cr) */
ByteBuffer bufferV = planes[2].getBuffer();
/** YUV数据集合 */
byte[] yuvData = yuv420ToNV21(bufferY, bufferU, bufferV);
mExecutorService.execute(new Runnable() {
@Override
public void run() {
try {
saveYUVtoPicture(yuvData, width, height);
} catch (IOException e) {
e.printStackTrace();
}
}
});
} else {
}
if(image != null){
image.close();
}
}
}, subHandler);
} catch (Exception e) {
e.printStackTrace();
}
}
private byte[] yuv420ToNV21(ByteBuffer y, ByteBuffer u, ByteBuffer v){
ByteBuffer bufferY = y;
int bufferYSize = bufferY.remaining();
/** U(Cb) */
ByteBuffer bufferU = u;
int bufferUSize = bufferU.remaining();
/** V(Cr) */
ByteBuffer bufferV = v;
int bufferVSize = bufferV.remaining();
Log.e("CameraActivity", "bufferYSize=" + bufferYSize + " bufferUSize=" + bufferUSize + " bufferVSize=" + bufferVSize);
Log.e("CameraActivity", "yuv420ToNV21 size="+(bufferYSize + bufferUSize + bufferVSize));
byte[] yuvData = new byte[bufferYSize + bufferUSize];
byte[] uData = new byte[bufferUSize];
byte[] vData = new byte[bufferVSize];
bufferY.get(yuvData, 0, bufferYSize);
bufferU.get(uData, 0, bufferUSize);
bufferV.get(vData, 0, bufferVSize);
for(int i = 0; i < bufferUSize / 2 ; i++){
yuvData[bufferYSize + 2 * i] = vData[i * 2];
yuvData[bufferYSize + 2 * i + 1] = uData[i * 2];
}
return yuvData;
}
public void saveYUVtoPicture(byte[] data,int width,int height) throws IOException{
FileOutputStream outStream = null;
File file = new File(savePicPath) ;
if(!file.exists()){
file.mkdir();
}
try {
YuvImage yuvimage = new YuvImage(data, ImageFormat.NV21, width, height, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
yuvimage.compressToJpeg(new Rect(0, 0,width, height), 80, baos);
Bitmap bmp = BitmapFactory.decodeByteArray(baos.toByteArray(), 0, baos.toByteArray().length);
outStream = new FileOutputStream(
String.format(savePicPath + File.separator + "%d_%s_%s.jpg",
System.currentTimeMillis(),String.valueOf(width),String.valueOf(height)));
bmp.compress(Bitmap.CompressFormat.JPEG, 100, outStream);
outStream.write(baos.toByteArray());
outStream.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
}
}
}
gitee地址
https://gitee.com/creat151/android-media.git
|