Android 循环录制最近一段时间的视频
在日常开发测试中,往往发生问题了再去想办法复现录屏、抓取日志 的工作,往往会出现偶现问题很难复现,导致问题很难定位。在这里给出一个能抓取历史操作视频的解决方案:
- 将录屏的视频帧数据一帧帧的缓存到一块固定大小的内存中(空间循环利用)
- 发现问题时,触发混合器(
MediaMuxer )将指定时间范围的视频帧数据取出存储为指定的mp4文件
数据缓存
数据缓存用来解决历史数据保存,需要合理的分配内存大小,根据自己的实际情况(手机屏幕分辨率、多长时间的视频记录等等)选择合适的大小。
提供四个JNI函数:
object FrameDataCacheUtils {
external fun initCache(cacheSize: Int, isDebug: Boolean)
external fun addFrameData(
timestamp: Long,
isKeyFrame: Boolean,
frameData: ByteArray,
length: Int
)
external fun getFirstFrameData(
timestamp: Long,
curTimestamp: LongArray,
frameData: ByteArray,
length: IntArray
): Int
external fun getNextFrameData(
preTimestamp: Long,
curTimestamp: LongArray,
frameData: ByteArray,
length: IntArray,
isKeyFrame: BooleanArray
): Int
init {
System.loadLibrary("framedatacachejni")
}
}
缓存框架源码:https://download.csdn.net/download/lkl22/73404181
开启屏幕录屏
一、申请权限
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
二、创建service
<application>
<service
android:name=".service.ScreenCaptureService"
android:enabled="true"
android:exported="false"
android:foregroundServiceType="mediaProjection" />
</application>
override fun onStartCommand(intent: Intent, flags: Int, startId: Int): Int {
createNotificationChannel()
val resultCode = intent.getIntExtra(ScreenCapture.KEY_RESULT_CODE, -1)
val cacheSize = intent.getIntExtra(ScreenCapture.KEY_CACHE_SIZE, ScreenCapture.DEFAULT_CACHE_SIZE)
val resultData = intent.getParcelableExtra<Intent>(ScreenCapture.KEY_DATA)
resultData?.apply {
ScreenCaptureManager.instance.startRecord(resultCode, this, cacheSize)
LogUtils.e(TAG, "startRecord.")
}
return super.onStartCommand(intent, flags, startId)
}
private fun createNotificationChannel() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
val notificationManager = getSystemService(NOTIFICATION_SERVICE) as NotificationManager
if (notificationManager.getNotificationChannel(NOTIFICATION_CHANNEL_ID) == null) {
val channel = NotificationChannel(
NOTIFICATION_CHANNEL_ID,
NOTIFICATION_CHANNEL_NAME,
NotificationManager.IMPORTANCE_DEFAULT
)
notificationManager.createNotificationChannel(channel)
}
}
val builder =
NotificationCompat.Builder(this, NOTIFICATION_CHANNEL_ID)
.setContentTitle("ScreenCapture")
.setSmallIcon(R.mipmap.ic_launcher)
.setContentText("is running......")
.setVisibility(NotificationCompat.VISIBILITY_PUBLIC)
.setWhen(System.currentTimeMillis())
LogUtils.d(TAG, "startForeground")
startForeground(NOTIFICATION_ID, builder.build())
}
三、开启录屏
1、创建MediaProjectionManager对象
private val mProjectionManager: MediaProjectionManager = BaseApplication.context
.getSystemService(Context.MEDIA_PROJECTION_SERVICE) as MediaProjectionManager
2、创建createScreenCaptureIntent
fun createScreenCaptureIntent(): Intent {
return mProjectionManager.createScreenCaptureIntent()
}
3、跳转ScreenCapture
startActivityForResult(
ScreenCaptureManager.instance.createScreenCaptureIntent(),
SCREEN_CAPTURE_REQUEST_CODE
)
4、回调处理
override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
super.onActivityResult(requestCode, resultCode, data)
if (requestCode == SCREEN_CAPTURE_REQUEST_CODE) {
data?.apply {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
val service = Intent(this@MainActivity, ScreenCaptureService::class.java)
service.putExtra(ScreenCapture.KEY_RESULT_CODE, resultCode)
service.putExtra(ScreenCapture.KEY_DATA, data)
service.putExtra(ScreenCapture.KEY_CACHE_SIZE, cacheSize)
startForegroundService(service)
} else {
ScreenCaptureManager.instance.startRecord(resultCode, this, cacheSize)
}
}
}
}
处理录屏数据
开启录屏线程记录视频帧数据
fun startRecord(resultCode: Int, data: Intent, cacheSize: Int) {
mScreenCaptureThread = ScreenCaptureThread(
MediaFormatParams(
mDisplayMetrics.widthPixels,
mDisplayMetrics.heightPixels,
colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
),
mDisplayMetrics.densityDpi,
mProjectionManager.getMediaProjection(resultCode, data),
object : ScreenCaptureThread.Callback {
override fun prePrepare(mediaFormatParams: MediaFormatParams) {
FrameDataCacheUtils.initCache(
cacheSize, BuildConfig.DEBUG
)
isEnvReady.set(true)
}
override fun putFrameData(frameData: FrameData) {
FrameDataCacheUtils.addFrameData(
frameData.timestamp,
frameData.isKeyFrame,
frameData.data,
frameData.length
)
}
}
)
mScreenCaptureThread?.start()
}
ScreenCaptureThread核心代码
package com.lkl.medialib.core
import android.hardware.display.DisplayManager
import android.hardware.display.VirtualDisplay
import android.media.MediaCodec
import android.media.MediaFormat
import android.media.projection.MediaProjection
import android.util.Log
import android.view.Surface
import com.lkl.commonlib.util.LogUtils
import com.lkl.medialib.bean.FrameData
import com.lkl.medialib.bean.MediaFormatParams
import com.lkl.medialib.constant.ScreenCapture
import com.lkl.medialib.util.MediaUtils
import java.io.IOException
class ScreenCaptureThread(
private val mediaFormatParams: MediaFormatParams,
private val dpi: Int,
private val mediaProjection: MediaProjection,
private val callback: Callback,
threadName: String = TAG
) : BaseMediaThread(threadName) {
companion object {
private const val TAG = "ScreenRecordService"
private const val TIMEOUT_US = 10000L
}
private var mEncoder: MediaCodec? = null
private var mSurface: Surface? = null
private val mBufferInfo = MediaCodec.BufferInfo()
private var mVirtualDisplay: VirtualDisplay? = null
private var mMediaFormat: MediaFormat? = null
@Throws(IOException::class)
override fun prepare() {
callback.prePrepare(mediaFormatParams)
val format = MediaUtils.createVideoFormat(mediaFormatParams)
Log.d(TAG, "created video format: $format")
mEncoder = MediaCodec.createEncoderByType(mediaFormatParams.mimeType)
mEncoder?.apply {
configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
mSurface = createInputSurface()
Log.d(TAG, "created input surface: $mSurface")
start()
mVirtualDisplay = mediaProjection.createVirtualDisplay(
"$TAG-display", mediaFormatParams.width, mediaFormatParams.height, dpi,
DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC, mSurface, null, null
)
Log.d(TAG, "created virtual display: $mVirtualDisplay")
}
}
override fun drain() {
val index = mEncoder!!.dequeueOutputBuffer(mBufferInfo, TIMEOUT_US)
when {
index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED -> {
mMediaFormat = mEncoder?.outputFormat
}
index == MediaCodec.INFO_TRY_AGAIN_LATER -> {
waitTime(10)
}
index >= 0 -> {
encodeDataToCallback(index)
mEncoder?.releaseOutputBuffer(index, false)
}
}
}
private fun encodeDataToCallback(index: Int) {
var encodedData = mEncoder!!.getOutputBuffer(index)
if (mBufferInfo.flags and MediaCodec.BUFFER_FLAG_CODEC_CONFIG != 0) {
Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG")
mBufferInfo.size = 0
}
if (mBufferInfo.size == 0) {
Log.d(TAG, "info.size == 0, drop it.")
encodedData = null
}
if (encodedData != null) {
encodedData.position(mBufferInfo.offset)
encodedData.limit(mBufferInfo.offset + mBufferInfo.size)
val data = ByteArray(mBufferInfo.size)
encodedData[data]
callback.putFrameData(
FrameData(
data, mBufferInfo.size, System.currentTimeMillis(),
mBufferInfo.flags == MediaCodec.BUFFER_FLAG_KEY_FRAME
)
)
if (ScreenCapture.PRINT_DEBUG_LOG) {
LogUtils.d(
TAG,
"sent ${mBufferInfo.size} bytes callback, ts=${mBufferInfo.presentationTimeUs}"
)
}
}
}
fun getMediaFormat(): MediaFormat? {
return mMediaFormat
}
override fun release() {
LogUtils.d(TAG, "release")
mEncoder?.apply {
stop()
release()
mEncoder = null
}
mVirtualDisplay?.apply {
release()
mVirtualDisplay = null
}
mediaProjection.stop()
}
interface Callback {
fun prePrepare(mediaFormatParams: MediaFormatParams)
fun putFrameData(frameData: FrameData)
}
}
提取视频帧存储到MP4文件
fun startMuxer(fileName: String, startTime: Long, endTime: Long) {
val mediaFormat = getMediaFormat()
if (mediaFormat == null) {
LogUtils.e(TAG, "")
return
}
mVideoMuxerThread =
VideoMuxerThread(mediaFormat!!, fileName, object : VideoMuxerThread.Callback {
override fun getFirstIFrameData(): FrameData? {
val res = FrameDataCacheUtils.getFirstFrameData(
startTime,
mCurTimeStamp,
mFrameBuffer,
mLength
)
if (res == DataCacheCode.RES_SUCCESS) {
return FrameData(mFrameBuffer, mLength[0], mCurTimeStamp[0], true)
}
return null
}
override fun getNextFrameData(): FrameData? {
val res = FrameDataCacheUtils.getNextFrameData(
mCurTimeStamp[0],
mCurTimeStamp,
mFrameBuffer,
mLength,
mIsKeyFrame
)
if (res == DataCacheCode.RES_SUCCESS) {
if (mCurTimeStamp[0] > endTime) {
mVideoMuxerThread?.quit()
}
return FrameData(mFrameBuffer, mLength[0], mCurTimeStamp[0], mIsKeyFrame[0])
} else if (res == DataCacheCode.RES_FAILED) {
mVideoMuxerThread?.quit()
}
return null
}
override fun finished(fileName: String) {
ThreadUtils.runOnMainThread{
ToastUtils.showLong("视频录制完成。")
}
}
})
mVideoMuxerThread?.start()
}
VideoMuxerThread核心代码
package com.lkl.medialib.core
import android.media.MediaCodec
import android.media.MediaFormat
import android.media.MediaMuxer
import android.text.TextUtils
import com.lkl.commonlib.util.BitmapUtils
import com.lkl.commonlib.util.DateUtils
import com.lkl.commonlib.util.FileUtils
import com.lkl.commonlib.util.LogUtils
import com.lkl.medialib.bean.FrameData
import com.lkl.medialib.constant.ScreenCapture
import java.nio.ByteBuffer
import java.util.*
class VideoMuxerThread(
private val mediaFormat: MediaFormat,
private val saveFilePath: String? = null,
private val callback: Callback,
threadName: String = TAG
) : BaseMediaThread(threadName) {
companion object {
private const val TAG = "VideoMuxerCore"
}
private var mMuxer: MediaMuxer? = null
private val mBufferInfo = MediaCodec.BufferInfo()
private var mMuxerStarted = false
private var mTimeStamp: Long = -1
private var mFirstTimeStamp: Long = -1
private var mOutputFileName = ""
private var mTrackIndex = -1
override fun prepare() {
mOutputFileName = if (TextUtils.isEmpty(saveFilePath)) {
FileUtils.videoDir + DateUtils.nowTime.replace(" ", "_") + BitmapUtils.VIDEO_FILE_EXT
} else {
saveFilePath!!
}
mMuxer = MediaMuxer(mOutputFileName, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4)
LogUtils.d(TAG, "Muxer init mediaFormat -> $mediaFormat")
mMuxer?.apply {
mTrackIndex = addTrack(mediaFormat)
start()
mMuxerStarted = true
firstFrameHandler()
}
}
private fun firstFrameHandler() {
val frameData = callback.getFirstIFrameData()
if (frameData == null) {
LogUtils.e(TAG, "get first IFrame data failed.")
quit()
return
}
writeSampleData(frameData)
}
override fun drain() {
val frameData = callback.getNextFrameData()
if (frameData == null) {
waitTime(10)
} else {
writeSampleData(frameData)
}
}
private fun writeSampleData(frameData: FrameData) {
mMuxer?.apply {
val sampleData = ByteBuffer.wrap(frameData.data, 0, frameData.length)
setBufferInfo(
if (frameData.isKeyFrame) MediaCodec.BUFFER_FLAG_KEY_FRAME else 0,
frameData.timestamp,
frameData.length
)
writeSampleData(mTrackIndex, sampleData, mBufferInfo)
if (ScreenCapture.PRINT_DEBUG_LOG) {
LogUtils.d(
TAG, "get frame data: size -> ${frameData.length} timestamp -> " +
DateUtils.convertDateToString(
DateUtils.DATE_TIME,
Date(frameData.timestamp)
) + " isKeyFrame -> ${frameData.isKeyFrame}"
)
}
}
}
private fun setBufferInfo(flags: Int, presentationTimeMs: Long, size: Int) {
mBufferInfo.flags = flags
mBufferInfo.offset = 0
mBufferInfo.presentationTimeUs = presentationTimeMs * 1000
mBufferInfo.size = size
}
override fun release() {
LogUtils.d(TAG, "release")
callback.finished(mOutputFileName)
mMuxer?.release()
}
interface Callback {
fun getFirstIFrameData(): FrameData?
fun getNextFrameData(): FrameData?
fun finished(fileName: String)
}
}
参考文献
高效的两段式循环缓冲区──BipBuffer
|