我们首先要明白NV21的存储样式
NV21 YYYYYYYYVUVU => YUV420SP(ANDROID)
数据先存Y,再VU交替存储
提取yuv数据要用到下面的API
如果我们想获取YUV的数据那么就要用到
MediaFormat
int yuvFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible;
if (!isSupportYuvFormat(yuvFormat, codecCapabilities.colorFormats)) {
return ERROR_OPEN_CODEC;
}
Log.e(TAG, "openCodec, format yuvFormat MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible");
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, yuvFormat);
codecCapabilities.colorFormats 标识当前解码器支持的输出格式
MediaCodecInfo
/**
* Enumerates the capabilities of the codec component. Since a single
* component can support data of a variety of types, the type has to be
* specified to yield a meaningful result.
* @param type The MIME type to query
*/
public final CodecCapabilities getCapabilitiesForType(
String type) {
CodecCapabilities caps = mCaps.get(type);
if (caps == null) {
throw new IllegalArgumentException("codec does not support type");
}
// clone writable object
return caps.dup();
}
我们选择了要输出的yuv格式,那么也要判断下yuv格式当前解码器是否支持
有别于音频解码,视频解码要使用
public Image getOutputImage(int index)
index通过
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputIndex = codec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
和音频的获取是一致的
我们拿到image图像后
private byte[] getDataFromImage(Image image) {
Rect crop = image.getCropRect();
int format = image.getFormat();
int width = crop.width();
int height = crop.height();
Image.Plane[] planes = image.getPlanes();
/** Y */
ByteBuffer bufferY = planes[0].getBuffer();
Log.e(TAG, "111111 bufferY pixelStrade=" + planes[0].getPixelStride() + " rawStade=" + planes[0].getRowStride());
/** U(Cb) */
ByteBuffer bufferU = planes[1].getBuffer();
Log.e(TAG, "22222 bufferU pixelStrade=" + planes[1].getPixelStride() + " rawStade=" + planes[1].getRowStride());
/** V(Cr) */
ByteBuffer bufferV = planes[2].getBuffer();
Log.e(TAG, "3333 bufferV pixelStrade=" + planes[2].getPixelStride() + " rawStade=" + planes[2].getRowStride());
Log.e(TAG, "bufferYSize=" + bufferY.remaining() + " bufferUSize=" + bufferU.remaining() + " bufferVSize=" + bufferV.remaining());
/** YUV数据集合 */
byte[] yuvData = yuv420ToNV21(bufferY, bufferU, bufferV, width, height);
return yuvData;
}
private byte[] yuv420ToNV21(ByteBuffer y, ByteBuffer u, ByteBuffer v, int width, int height) {
ByteBuffer bufferY = y;
int bufferYSize = bufferY.remaining();
/** U(Cb) */
ByteBuffer bufferU = u;
int bufferUSize = bufferU.remaining();
/** V(Cr) */
ByteBuffer bufferV = v;
int bufferVSize = bufferV.remaining();
Log.e(TAG, "bufferYSize=" + bufferYSize + " bufferUSize=" + bufferUSize + " bufferVSize=" + bufferVSize);
Log.e(TAG, "yuv420ToNV21 size=" + (bufferYSize + bufferUSize + bufferVSize));
int length = 0;
byte[] yuvData = new byte[width * height + width * height / 2];
byte[] uData = new byte[bufferUSize];
byte[] vData = new byte[bufferVSize];
bufferY.get(yuvData, 0, width * height);
bufferU.get(uData, 0, bufferUSize);
bufferV.get(vData, 0, bufferVSize);
for (int i = 0; i < width * height /4; i++) {
yuvData[width * height + 2 * i] = vData[i];
yuvData[width * height + 2 * i + 1] = uData[i];
}
return yuvData;
}
这里我们必须明白?ByteBuffer的数据是由富余的,我们必须根据视频的宽高来取数据。
我们知道YUV420数据量比值?Y : U : V = 4:1:1
下面是完整的代码
package com.yuanxuzhen.androidmedia.decode;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.media.Image;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.os.Build;
import android.util.Log;
import com.yuanxuzhen.androidmedia.demux.AudioMediaInfo;
import com.yuanxuzhen.androidmedia.demux.MediaUtil;
import com.yuanxuzhen.androidmedia.demux.VideoMediaInfo;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
public class YuvDecoder {
public static final int ERROR_INPUT_INVALID = 100;
public static final int ERROR_OUTPUT_FAILED = 200;
public static final int ERROR_OPEN_CODEC = 300;
public static final int OK = 0;
private static final int TIMEOUT_USEC = 0;
public String TAG = "YuvDecoder";
private MediaExtractor mExtractor;
private MediaFormat mFormat;
private FileOutputStream mFos;
private MediaCodec mDecoder;
private ByteBuffer[] mInputBuffers;
private ByteBuffer[] mOutputBuffers;
private boolean mDecodeEnd = false;
public int decode(String path, String yuvPath) {
int index = 0;
int ret;
VideoMediaInfo videoMediaInfo = MediaUtil.getVideoMediaInfo(path);
Log.e(TAG, "decode " + videoMediaInfo);
if (OK != (ret = openInput(path))) {
return ret;
}
if (OK != (ret = openOutput(yuvPath))) {
return ret;
}
if (OK != (ret = openCodec(mFormat))) {
return ret;
}
mDecodeEnd = false;
while (!mDecodeEnd && index < 100) {
++index;
if (OK != (ret = decode(mDecoder, mExtractor))) {
Log.d(TAG, "decode failed, ret=" + ret);
break;
}
}
close();
Log.d(TAG, "decode end" + ret);
return ret;
}
private int decode(MediaCodec codec, MediaExtractor extractor) {
Log.d(TAG, "decode");
int inputIndex = codec.dequeueInputBuffer(TIMEOUT_USEC);
if (inputIndex >= 0) {
ByteBuffer inputBuffer;
if (Build.VERSION.SDK_INT >= 21) {
inputBuffer = codec.getInputBuffer(inputIndex);
} else {
inputBuffer = mInputBuffers[inputIndex];
}
inputBuffer.clear();
int sampleSize = extractor.readSampleData(inputBuffer, 0);
if (sampleSize < 0) {//read end
codec.queueInputBuffer(inputIndex, 0, 0, 0L,
MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
codec.queueInputBuffer(inputIndex, 0, sampleSize, extractor.getSampleTime(), 0);
extractor.advance();
}
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputIndex = codec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
if (outputIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {//TIMEOUT
Log.d(TAG, "INFO_TRY_AGAIN_LATER");//TODO how to declare this info
return OK;
} else if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
Log.d(TAG, "output format changed");
return OK;
} else if (outputIndex < 0) {
Log.d(TAG, "outputIndex=" + outputIndex);
return OK;
} else {
Image image = codec.getOutputImage(outputIndex);
byte[] data = getDataFromImage(image);
image.close();
try {
Log.d(TAG, "output write, size=" + bufferInfo.size);
mFos.write(data);
mFos.flush();
} catch (IOException e) {
e.printStackTrace();
return ERROR_OUTPUT_FAILED;
}
codec.releaseOutputBuffer(outputIndex, false);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
mDecodeEnd = true;
}
}
return OK;
}
private byte[] getDataFromImage(Image image) {
Rect crop = image.getCropRect();
int format = image.getFormat();
int width = crop.width();
int height = crop.height();
Image.Plane[] planes = image.getPlanes();
/** Y */
ByteBuffer bufferY = planes[0].getBuffer();
Log.e(TAG, "111111 bufferY pixelStrade=" + planes[0].getPixelStride() + " rawStade=" + planes[0].getRowStride());
/** U(Cb) */
ByteBuffer bufferU = planes[1].getBuffer();
Log.e(TAG, "22222 bufferU pixelStrade=" + planes[1].getPixelStride() + " rawStade=" + planes[1].getRowStride());
/** V(Cr) */
ByteBuffer bufferV = planes[2].getBuffer();
Log.e(TAG, "3333 bufferV pixelStrade=" + planes[2].getPixelStride() + " rawStade=" + planes[2].getRowStride());
Log.e(TAG, "bufferYSize=" + bufferY.remaining() + " bufferUSize=" + bufferU.remaining() + " bufferVSize=" + bufferV.remaining());
/** YUV数据集合 */
byte[] yuvData = yuv420ToNV21(bufferY, bufferU, bufferV, width, height);
return yuvData;
}
private byte[] yuv420ToNV21(ByteBuffer y, ByteBuffer u, ByteBuffer v, int width, int height) {
ByteBuffer bufferY = y;
int bufferYSize = bufferY.remaining();
/** U(Cb) */
ByteBuffer bufferU = u;
int bufferUSize = bufferU.remaining();
/** V(Cr) */
ByteBuffer bufferV = v;
int bufferVSize = bufferV.remaining();
Log.e(TAG, "bufferYSize=" + bufferYSize + " bufferUSize=" + bufferUSize + " bufferVSize=" + bufferVSize);
Log.e(TAG, "yuv420ToNV21 size=" + (bufferYSize + bufferUSize + bufferVSize));
int length = 0;
byte[] yuvData = new byte[width * height + width * height / 2];
byte[] uData = new byte[bufferUSize];
byte[] vData = new byte[bufferVSize];
bufferY.get(yuvData, 0, width * height);
bufferU.get(uData, 0, bufferUSize);
bufferV.get(vData, 0, bufferVSize);
for (int i = 0; i < width * height /4; i++) {
yuvData[width * height + 2 * i] = vData[i];
yuvData[width * height + 2 * i + 1] = uData[i];
}
return yuvData;
}
private int checkPath(String path) {
if (path == null || path.isEmpty()) {
Log.d(TAG, "invalid path, path is empty");
return ERROR_INPUT_INVALID;
}
File file = new File(path);
if (!file.isFile()) {
Log.d(TAG, "path is not a file, path:" + path);
return ERROR_INPUT_INVALID;
} else if (!file.exists()) {
Log.d(TAG, "file not exists, path:" + path);
return ERROR_INPUT_INVALID;
} else {
Log.d(TAG, "path is a file, path:" + path);
}
return OK;
}
private int openInput(String audioPath) {
Log.d(TAG, "openInput audioPath:" + audioPath);
int ret;
if (OK != (ret = checkPath(audioPath))) {
return ret;
}
mExtractor = new MediaExtractor();
int audioTrack = -1;
boolean hasAudio = false;
try {
mExtractor.setDataSource(audioPath);
for (int i = 0; i < mExtractor.getTrackCount(); ++i) {
MediaFormat format = mExtractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
Log.d(TAG, "mime=" + mime);
if (mime.startsWith("video/")) {
audioTrack = i;
hasAudio = true;
mFormat = format;
break;
}
}
if (!hasAudio) {
Log.d(TAG, "input contain no video");
return ERROR_INPUT_INVALID;
}
mExtractor.selectTrack(audioTrack);
} catch (IOException e) {
return ERROR_INPUT_INVALID;
}
return OK;
}
private int openOutput(String outputPath) {
Log.d(TAG, "openOutput outputPath:" + outputPath);
try {
mFos = new FileOutputStream(outputPath);
} catch (IOException e) {
return ERROR_OUTPUT_FAILED;
}
return OK;
}
private int openCodec(MediaFormat format) {
String mime = format.getString(MediaFormat.KEY_MIME);
Log.d(TAG, "openCodec, format mime:" + mime);
try {
mDecoder = MediaCodec.createDecoderByType(mime);
} catch (IOException e) {
e.printStackTrace();
return ERROR_OPEN_CODEC;
}
MediaCodecInfo.CodecCapabilities codecCapabilities = mDecoder.getCodecInfo().getCapabilitiesForType(mime);
for (int i = 0; i < codecCapabilities.colorFormats.length; i++) {
Log.e(TAG, "openCodec, format colorFormats:" + codecCapabilities.colorFormats[i]);
}
for (int i = 0; i < codecCapabilities.profileLevels.length; i++) {
Log.e(TAG, "openCodec, format colorFormats:" + codecCapabilities.profileLevels[i].profile);
Log.e(TAG, "openCodec, format colorFormats:" + codecCapabilities.profileLevels[i].level);
}
int yuvFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible;
if (!isSupportYuvFormat(yuvFormat, codecCapabilities.colorFormats)) {
return ERROR_OPEN_CODEC;
}
Log.e(TAG, "openCodec, format yuvFormat MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible");
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, yuvFormat);
mDecoder.configure(format, null, null, 0);
mDecoder.start();
if (Build.VERSION.SDK_INT < 21) {
mInputBuffers = mDecoder.getInputBuffers();
mOutputBuffers = mDecoder.getOutputBuffers();
}
return OK;
}
private boolean isSupportYuvFormat(int format, int[] supportFormat) {
if (supportFormat == null) {
return false;
}
for (int i = 0; i < supportFormat.length; i++) {
if (supportFormat[i] == format) {
return true;
}
}
return false;
}
private void close() {
mExtractor.release();
mDecoder.stop();
mDecoder.release();
try {
mFos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
gitee地址
https://gitee.com/creat151/android-media.git
|