继上篇文章的代码,在原来基础之上添加时间判断,到时间则切换至下一个录制器。
在java层我想到的逻辑就是用不同的MediaMuxer来实现,当然你也可以选择将一个视频切割为多个视频来间接实现。
下面只是粗略写下,没有根据时间戳来命名文件名,10秒到了后将视频丢到next.mp4录制,经测试,2个视频之间的差距为400ms,有待优化。
需要注意的是,如果将muxer放置主线程中释放资源,则大约耗时800ms,不同机型不同配置可能耗时更长,所有要将其放置子线程释放。
主要代码:
package com.my.video.util
import android.content.Context
import android.media.*
import android.os.*
import com.my.video.egl.EglCore
import com.my.video.filter.NoFilter
import java.io.File
import java.nio.ByteBuffer
class CodecUtil(val context: Context, val width: Int, val height: Int) {
//H.264
private var mediaCodec: MediaCodec? = null
private var audioCodec: MediaCodec? = null
//mp4
private var mediaMuxer: MediaMuxer? = null
private var audioRecord: AudioRecord? = null
private var nextMuxer: MediaMuxer? = null
//获取openGl渲染的数据
private var eglCore: EglCore? = null
private var handlerThread: HandlerThread? = null
private var handler: EncodeHandler? = null
private val bufferInfo = MediaCodec.BufferInfo()
private var videoTrack: Int = -1
private var audioTrack: Int = -1
private var baseTimeStamp: Long = -1L
val START_RECORD: Int = 0
val STOP_RECORD: Int = 1
val RECORD_TIME: Int = 2
val CHANGE_NEXT: Int = 3
var isRecording = false
private var mAudioRunnable: AudioCodecRunnable? = null
private var mAudioThread: Thread? = null
private var bufferSize: Int = 0
private val lock: Object = Object()
private var hasStartMuxer: Boolean = false
fun init() {
handlerThread = HandlerThread("videoEncode")
handlerThread?.let {
it.start()
handler = EncodeHandler(it.looper)
}
val path = context.getExternalFilesDir(null)
val file = File("$path/test")
if (!file.exists()) {
file.mkdir()
}
mediaMuxer =
MediaMuxer("/$path/test/test.mp4", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4)
nextMuxer =
MediaMuxer("/$path/test/next.mp4", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4)
val mediaFormat: MediaFormat =
MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, width, height)
mediaFormat.setInteger(
MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
)
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 3500000)
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30)
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1)
mediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC)
mediaCodec?.let {
it.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
val surface = it.createInputSurface()
eglCore = EglCore(surface)
it.start()
}
val audioMediaFormat: MediaFormat = MediaFormat.createAudioFormat(
"audio/mp4a-latm" //音频编码的Mime
, 48000, 2
)
audioMediaFormat.setInteger(
MediaFormat.KEY_AAC_PROFILE,
MediaCodecInfo.CodecProfileLevel.AACObjectLC
)
audioMediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 128000)
audioCodec = MediaCodec.createEncoderByType("audio/mp4a-latm");
audioCodec?.configure(audioMediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
audioCodec?.start()
bufferSize = AudioRecord.getMinBufferSize(
48000,
AudioFormat.CHANNEL_IN_STEREO,
AudioFormat.ENCODING_PCM_16BIT
)
audioRecord = AudioRecord(
MediaRecorder.AudioSource.MIC,
48000,
AudioFormat.CHANNEL_IN_STEREO,
AudioFormat.ENCODING_PCM_16BIT,
bufferSize
)
}
fun drainEncode(endOfStream: Boolean) {
var outPutBuffer = mediaCodec?.outputBuffers;
mediaCodec?.let {
while (isRecording) {
val codeStatus: Int = it.dequeueOutputBuffer(bufferInfo, 10000)
if (codeStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
if (!endOfStream) {
break
}
} else if (codeStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outPutBuffer = mediaCodec?.outputBuffers;
} else if (codeStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
synchronized(lock) {
mediaCodec?.let {
var mediaFormat = it.outputFormat;
mediaMuxer?.let {
videoTrack = it.addTrack(mediaFormat)
if (videoTrack >= 0 && audioTrack >= 0) {
it.start()
hasStartMuxer = true
}
}
}
}
} else if (codeStatus < 0) {
LogUtil.e("->unexpected result from encoder.dequeueOutputBuffer code=$codeStatus")
} else {
var encodecData = outPutBuffer?.get(codeStatus)
encodecData?.let {
if (bufferInfo.flags and MediaCodec.BUFFER_FLAG_CODEC_CONFIG != 0) {
bufferInfo.size = 0
}
LogUtil.e("drainEncode---------->isWaittingInit=$isWaittingInit")
if (bufferInfo.size != 0 && hasStartMuxer && !isWaittingInit) {
getCurMuxer()?.writeSampleData(videoTrack, encodecData, bufferInfo)
}
}
mediaCodec?.releaseOutputBuffer(codeStatus, false)
if (bufferInfo.flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM != 0) {
break
}
}
}
}
}
fun release() {
Thread {
try {
stop()
handlerThread?.quit()
mAudioThread?.interrupt()
audioRecord?.stop()
audioRecord?.release()
audioCodec?.stop()
audioCodec?.release()
mediaCodec?.signalEndOfInputStream()
mediaCodec?.stop()
mediaCodec?.release()
eglCore?.release()
nextMuxer?.stop()
nextMuxer?.release()
mediaMuxer?.stop()
nextMuxer?.release()
audioRecord = null
audioCodec = null
handlerThread = null
mediaCodec = null
eglCore = null
} catch (e: Exception) {
e.printStackTrace()
}
}.start()
}
private var time = 0;
private var hasChangeMuxer: Boolean = false
private var isWaittingInit: Boolean = false
inner class EncodeHandler(looper: Looper) : Handler(looper) {
override fun handleMessage(msg: Message) {
super.handleMessage(msg)
when (msg.what) {
START_RECORD -> {
drainEncode(false)
prepare()
draw()//如果不绘画将得到无画面的视频
if (baseTimeStamp == -1L) {
baseTimeStamp = System.nanoTime()
sendEmptyMessage(RECORD_TIME)
}
var time = System.nanoTime() - baseTimeStamp
eglCore?.setPresentTime(time)
eglCore?.swapBuffer()
}
STOP_RECORD -> {
drainEncode(true)
}
RECORD_TIME -> {
if (time <= 10) {
time++;
sendEmptyMessageDelayed(RECORD_TIME, 1000)
} else {
sendEmptyMessage(CHANGE_NEXT)
}
}
CHANGE_NEXT -> {
isWaittingInit = true
Thread(){
mediaMuxer?.let {
it.stop()
it.release()
}
}.start()
mediaCodec?.let {
var mediaFormat = it.outputFormat;
videoTrack = nextMuxer?.addTrack(mediaFormat) ?: -1
}
audioCodec?.let {
var mediaFormat = it.outputFormat;
audioTrack = nextMuxer?.addTrack(mediaFormat) ?: -1
}
nextMuxer?.start()
hasChangeMuxer = true
isWaittingInit = false
mediaMuxer = null
}
}
}
}
fun start() {
isRecording = true
if (mAudioRunnable == null) {
mAudioRunnable = AudioCodecRunnable()
mAudioThread = Thread(mAudioRunnable)
mAudioThread?.start()
}
handler?.sendEmptyMessage(START_RECORD)
}
fun stop() {
LogUtil.e("--->stop")
isRecording = false
handler?.sendEmptyMessage(STOP_RECORD)
}
private var hasPrepare: Boolean = false
fun prepare() {
if (hasPrepare) {
return
}
hasPrepare = true
eglCore?.eglMakeCurrent()
showFilter.create()
}
private var showFilter: NoFilter =
NoFilter();
fun setTextureId(textureId: Int) {
showFilter.setTextureId(textureId)
}
fun draw() {
showFilter.drawFrame()
}
private inner class AudioCodecRunnable : Runnable {
private var baseTimeStamp: Long = -1
var mInfo = MediaCodec.BufferInfo()
init {
baseTimeStamp = System.nanoTime()
audioRecord?.startRecording()
}
override fun run() {
while (isRecording) {
if (audioCodec != null) {
audioCodec?.let {
val index: Int = it.dequeueInputBuffer(0)
if (index >= 0) {
val buffer: ByteBuffer? =
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
it.getInputBuffer(index)
} else {
it.inputBuffers[index]
}
buffer?.let {
it.clear()
val audioLength: Int = audioRecord?.read(it, bufferSize) ?: 0
if (audioLength > 0) {
val curTimeStamp: Long = System.nanoTime()
val time = (curTimeStamp - baseTimeStamp) / 1000
var endFlag: Int = if (isRecording) {
0
} else {
MediaCodec.BUFFER_FLAG_END_OF_STREAM
}
audioCodec?.queueInputBuffer(//将audioRecord的数据放置在MediaCodec中转码再塞到MediaMuxer合成Mp4
index,
0,
audioLength,
time,
endFlag
)
}
}
}
var outIndex: Int = MediaCodec.INFO_TRY_AGAIN_LATER
do {
if (audioCodec == null) {
outIndex = 1
} else {
outIndex = it.dequeueOutputBuffer(mInfo, 0)
if (outIndex >= 0) {
val buffer: ByteBuffer? =
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
it.getOutputBuffer(outIndex)
} else {
it.outputBuffers[outIndex]
}
buffer?.position(mInfo.offset)
LogUtil.e("---------->isWaittingInit=$isWaittingInit")
if (buffer != null && hasStartMuxer && mInfo.presentationTimeUs > 0 && !isWaittingInit) {
try {
getCurMuxer()?.writeSampleData(
audioTrack,
buffer,
mInfo
)
} catch (e: Exception) {
e.printStackTrace()
}
}
it.releaseOutputBuffer(outIndex, false)
} else if (outIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
synchronized(lock) {
var mediaFormat = it.outputFormat;
mediaMuxer?.let {
audioTrack = it.addTrack(mediaFormat)
LogUtil.e("AudioCodecRunnable----->run in this start videoTrack=$videoTrack->audioTrack=$audioTrack")
if (videoTrack >= 0 && audioTrack >= 0) {
it.start()
hasStartMuxer = true
}
}
}
} else if (outIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
}
}
} while (outIndex >= 0)
}
} else {
break
}
}
}
}
private fun getCurMuxer(): MediaMuxer? {
if (hasChangeMuxer) {
return nextMuxer
}
return mediaMuxer
}
}