步骤->将camera预览至GlSurfaceView上,然后通过EGLSurface+MediaCodec+AudioRecord+MediaMuxer后台录制成Mp4
下载链接:https://download.csdn.net/download/gan303/12505266
主要代码:
package com.my.video
import android.graphics.SurfaceTexture
import android.opengl.GLES20
import android.opengl.GLSurfaceView
import javax.microedition.khronos.egl.EGLConfig
import javax.microedition.khronos.opengles.GL10
class CodecRender(private val surface: CodecSurface) : GLSurfaceView.Renderer {
var textureId: Int = 0;
private var oesfilter: OesFilter? = null
private var noFilter: NoFilter? = null
var surfaceTexture: SurfaceTexture? = null
private var cameraId: Int = 0;
private var listener: SurfaceTexture.OnFrameAvailableListener? = null
private var screenWidth: Int = 0
private var screenHeight: Int = 0
private var codeUtil: CodecUtil? = null
private var fFrame: IntArray = IntArray(1)
private var fTexture: IntArray = IntArray(1)
private var isRelease: Boolean = false
init {
oesfilter = OesFilter()
noFilter = NoFilter()
}
override fun onDrawFrame(gl: GL10?) {
if (isRelease) {
return
}
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f)
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT or GLES20.GL_DEPTH_BUFFER_BIT)
surfaceTexture?.updateTexImage()
bindFrameTexture(fFrame[0], fTexture[0])
GLES20.glViewport(0, 0, screenWidth, screenHeight);
if (cameraUtil?.isOpenFinish == true) {
oesfilter?.drawFrame()
if (codeUtil == null) {
codeUtil = CodecUtil(surface.context, screenWidth, screenHeight)
}
}
unBindFrameTexture()
// /**绘制显示的filter */
GLES20.glViewport(0, 0, screenWidth, screenHeight)
noFilter?.setTextureId(fTexture[0])
noFilter?.drawFrame()
codeUtil?.setTextureId(fTexture[0])
codeUtil?.start()
}
override fun onSurfaceChanged(gl: GL10?, width: Int, height: Int) {
screenWidth = width
screenHeight = height
//
GLES20.glViewport(0, 0, width, height)
//
GLES20.glDeleteFramebuffers(1, fFrame, 0)
GLES20.glDeleteTextures(1, fTexture, 0)
GLES20.glGenFramebuffers(1, fFrame, 0)
GLES20.glGenTextures(1, fTexture, 0)
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, fTexture[0])
GLES20.glTexImage2D(
GLES20.GL_TEXTURE_2D,
0,
GLES20.GL_RGBA,
screenWidth,
screenHeight,
0,
GLES20.GL_RGBA,
GLES20.GL_UNSIGNED_BYTE,
null
)
useTexParameter()
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0)
}
override fun onSurfaceCreated(gl: GL10?, config: EGLConfig?) {
textureId = oesfilter!!.createTextureId()
surfaceTexture = SurfaceTexture(textureId)
surfaceTexture?.setOnFrameAvailableListener {
surface.requestRender()
}
cameraUtil?.setTexture(surfaceTexture!!)
setCameraId(cameraId);
oesfilter?.setTextureId(textureId)
oesfilter?.create()
noFilter?.create()
}
fun setCameraId(id: Int) {
cameraId = id
oesfilter!!.setCoodData(cameraId)
cameraUtil!!.open(cameraId.toString())
}
private var cameraUtil: CameraUtil? = null
fun release() {
isRelease = true
surfaceTexture?.setOnFrameAvailableListener(null)
surfaceTexture?.release()
surfaceTexture = null
cameraUtil?.close()
cameraUtil = null
codeUtil?.release()
codeUtil = null
}
public fun setFrameListener(listener: SurfaceTexture.OnFrameAvailableListener) {
this.listener = listener;
}
init {
cameraUtil = CameraUtil(surface.context)
}
private fun useTexParameter() {
//设置缩小过滤为使用纹理中坐标最接近的一个像素的颜色作为需要绘制的像素颜色
GLES20.glTexParameterf(
GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST.toFloat()
)
//设置放大过滤为使用纹理中坐标最接近的若干个颜色,通过加权平均算法得到需要绘制的像素颜色
GLES20.glTexParameterf(
GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR.toFloat()
)
//设置环绕方向S,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
GLES20.glTexParameterf(
GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE.toFloat()
)
//设置环绕方向T,截取纹理坐标到[1/2n,1-1/2n]。将导致永远不会与border融合
GLES20.glTexParameterf(
GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE.toFloat()
)
}
private fun bindFrameTexture(frameBufferId: Int, textureId: Int) {
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId)
GLES20.glFramebufferTexture2D(
GLES20.GL_FRAMEBUFFER,
GLES20.GL_COLOR_ATTACHMENT0,
GLES20.GL_TEXTURE_2D,
textureId,
0
)
}
private fun unBindFrameTexture() {
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0)
}
}
package com.my.video
import android.content.Context
import android.media.*
import android.os.*
import com.my.video.egl.EglCore
import java.io.File
import java.nio.ByteBuffer
class CodecUtil(context: Context, width: Int, height: Int) {
//H.264
private var mediaCodec: MediaCodec? = null
private var audioCodec: MediaCodec? = null
//mp4
private var mediaMuxer: MediaMuxer? = null
private var audioRecord: AudioRecord? = null
//获取openGl渲染的数据
private var eglCore: EglCore? = null
private var handlerThread: HandlerThread? = null
private var handler: EncodeHandler? = null
private val bufferInfo = MediaCodec.BufferInfo()
private var videoTrack: Int = -1
private var audioTrack: Int = -1
private var baseTimeStamp: Long = -1L
val START_RECORD: Int = 0
val STOP_RECORD: Int = 1
var isRecording = false
private var mAudioRunnable: AudioCodecRunnable? = null
private var mAudioThread: Thread? = null
private var bufferSize: Int = 0
private val lock: Object = Object()
private var hasStartMuxer: Boolean = false
init {
handlerThread = HandlerThread("videoEncode")
handlerThread?.let {
it.start()
handler = EncodeHandler(it.looper)
}
val path = context.getExternalFilesDir(null)
val file = File("$path/test")
if (!file.exists()) {
file.mkdir()
}
mediaMuxer =
MediaMuxer("/$path/test/test.mp4", MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4)
val mediaFormat: MediaFormat =
MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, width, height)
mediaFormat.setInteger(
MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
)
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 3500000)
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30)
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1)
mediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC)
mediaCodec?.let {
it.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
val surface = it.createInputSurface()
eglCore = EglCore(surface)
it.start()
}
val audioMediaFormat: MediaFormat = MediaFormat.createAudioFormat(
"audio/mp4a-latm" //音频编码的Mime
, 48000, 2
)
audioMediaFormat.setInteger(
MediaFormat.KEY_AAC_PROFILE,
MediaCodecInfo.CodecProfileLevel.AACObjectLC
)
audioMediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 128000)
audioCodec = MediaCodec.createEncoderByType("audio/mp4a-latm");
audioCodec?.configure(audioMediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
audioCodec?.start()
bufferSize = AudioRecord.getMinBufferSize(
48000,
AudioFormat.CHANNEL_IN_STEREO,
AudioFormat.ENCODING_PCM_16BIT
)
audioRecord = AudioRecord(
MediaRecorder.AudioSource.MIC,
48000,
AudioFormat.CHANNEL_IN_STEREO,
AudioFormat.ENCODING_PCM_16BIT,
bufferSize
)
mAudioRunnable = AudioCodecRunnable()
mAudioThread = Thread(mAudioRunnable)
mAudioThread?.start()
}
fun drainEncode(endOfStream: Boolean) {
var outPutBuffer = mediaCodec?.outputBuffers;
mediaCodec?.let {
while (isRecording) {
val codeStatus: Int = it.dequeueOutputBuffer(bufferInfo, 10000)
if (codeStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
if (!endOfStream) {
break
}
} else if (codeStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outPutBuffer = mediaCodec?.outputBuffers;
} else if (codeStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
synchronized(lock) {
mediaCodec?.let {
var mediaFormat = it.outputFormat;
mediaMuxer?.let {
videoTrack = it.addTrack(mediaFormat)
if (videoTrack >= 0 && audioTrack >= 0) {
it.start()
hasStartMuxer = true
}
}
}
}
} else if (codeStatus < 0) {
LogUtil.e("->unexpected result from encoder.dequeueOutputBuffer code=$codeStatus")
} else {
var encodecData = outPutBuffer?.get(codeStatus)
encodecData?.let {
if (bufferInfo.flags and MediaCodec.BUFFER_FLAG_CODEC_CONFIG != 0) {
bufferInfo.size = 0
}
if (bufferInfo.size != 0 && hasStartMuxer) {
mediaMuxer?.writeSampleData(videoTrack, encodecData, bufferInfo)
}
}
mediaCodec?.releaseOutputBuffer(codeStatus, false)
if (bufferInfo.flags and MediaCodec.BUFFER_FLAG_END_OF_STREAM != 0) {
break
}
}
}
}
}
fun release() {
Thread {
try {
stop()
handlerThread?.quit()
mAudioThread?.interrupt()
audioRecord?.stop()
audioRecord?.release()
audioCodec?.stop()
audioCodec?.release()
mediaCodec?.signalEndOfInputStream()
mediaCodec?.stop()
mediaCodec?.release()
eglCore?.release()
audioRecord = null
audioCodec = null
handlerThread = null
mediaCodec = null
eglCore = null
} catch (e: Exception) {
e.printStackTrace()
}
}.start()
}
inner class EncodeHandler(looper: Looper) : Handler(looper) {
override fun handleMessage(msg: Message) {
super.handleMessage(msg)
when (msg.what) {
START_RECORD -> {
drainEncode(false)
prepare()
draw()//如果不绘画将得到无画面的视频
if (baseTimeStamp == -1L) {
baseTimeStamp = System.nanoTime()
}
var time = System.nanoTime() - baseTimeStamp
eglCore?.setPresentTime(time)
eglCore?.swapBuffer()
}
STOP_RECORD -> {
drainEncode(true)
}
}
}
}
fun start() {
isRecording = true
handler?.sendEmptyMessage(START_RECORD)
}
fun stop() {
isRecording = false
handler?.sendEmptyMessage(STOP_RECORD)
}
private var hasPrepare: Boolean = false
fun prepare() {
if (hasPrepare) {
return
}
hasPrepare = true
eglCore?.eglMakeCurrent()
showFilter.create()
}
private var showFilter: NoFilter = NoFilter();
fun setTextureId(textureId: Int) {
showFilter.setTextureId(textureId)
}
fun draw() {
showFilter.drawFrame()
}
private inner class AudioCodecRunnable : Runnable {
private var baseTimeStamp: Long = -1
var mInfo = MediaCodec.BufferInfo()
init {
baseTimeStamp = System.nanoTime()
audioRecord?.startRecording()
}
override fun run() {
while (isRecording) {
if (audioCodec != null) {
audioCodec?.let {
val index: Int = it.dequeueInputBuffer(0)
if (index >= 0) {
val buffer: ByteBuffer? =
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
it.getInputBuffer(index)
} else {
it.inputBuffers[index]
}
buffer?.let {
it.clear()
val audioLength: Int = audioRecord?.read(it, bufferSize) ?: 0
if (audioLength > 0) {
val curTimeStamp: Long = System.nanoTime()
val time = (curTimeStamp - baseTimeStamp) / 1000
var endFlag: Int = if (isRecording) {
0
} else {
MediaCodec.BUFFER_FLAG_END_OF_STREAM
}
audioCodec?.queueInputBuffer(//将audioRecord的数据放置在MediaCodec中转码再塞到MediaMuxer合成Mp4
index,
0,
audioLength,
time,
endFlag
)
}
}
}
var outIndex: Int = MediaCodec.INFO_TRY_AGAIN_LATER
do {
if (audioCodec == null) {
outIndex = 1
} else {
outIndex = it.dequeueOutputBuffer(mInfo, 0)
if (outIndex >= 0) {
val buffer: ByteBuffer? =
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
it.getOutputBuffer(outIndex)
} else {
it.outputBuffers[outIndex]
}
buffer?.position(mInfo.offset)
if (buffer != null && hasStartMuxer && mInfo.presentationTimeUs > 0) {
try {
mediaMuxer?.writeSampleData(audioTrack, buffer, mInfo)
} catch (e: Exception) {
e.printStackTrace()
}
}
it.releaseOutputBuffer(outIndex, false)
} else if (outIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
synchronized(lock) {
var mediaFormat = it.outputFormat;
mediaMuxer?.let {
audioTrack = it.addTrack(mediaFormat)
if (videoTrack >= 0 && audioTrack >= 0) {
it.start()
hasStartMuxer = true
}
}
}
} else if (outIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
}
}
} while (outIndex >= 0)
}
} else {
break
}
}
}
}
}
package com.my.video.egl
import android.opengl.*
import android.view.Surface
import com.my.video.LogUtil
class EglCore(surface: Surface) {
//后台获取数据
private var eglDisplay: EGLDisplay? = null
private var eglContext: EGLContext? = null
private var eglConfig: EGLConfig? = null
private var eglSurface: EGLSurface? = null
private var shareContext:EGLContext?=null
init {
eglDisplay = EGL14.eglGetCurrentDisplay()
if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
throw RuntimeException("unable to get EGL14 display")
}
var versionArray = IntArray(2)
if (!EGL14.eglInitialize(eglDisplay, versionArray, 0, versionArray, 0)) {
throw RuntimeException("unable to init EGL14")
}
shareContext= EGL14.eglGetCurrentContext()
LogUtil.e("---->init ${EGL14.EGL_NO_CONTEXT} eglContext=$eglContext")
if (eglContext == null) {
val renderableType: Int = EGL14.EGL_OPENGL_ES2_BIT
var config = intArrayOf(
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_ALPHA_SIZE, 8,
EGL14.EGL_RENDERABLE_TYPE, renderableType,
EGL14.EGL_NONE, 0,
EGL14.EGL_NONE
)
val configs = arrayOfNulls(1)
var configNum = IntArray(1)
if (!EGL14.eglChooseConfig(
eglDisplay,
config,
0,
configs,
0,
configs.size,
configNum,
0
)
) {
throw RuntimeException("--->unable to find config")
}
eglConfig = configs[0]
var attrib3list = intArrayOf(
EGL14.EGL_CONTEXT_CLIENT_VERSION, 3
, EGL14.EGL_NONE
)
eglContext = EGL14.eglCreateContext(
eglDisplay,
eglConfig,
EGL14.eglGetCurrentContext(),
attrib3list,
0
)
if (eglContext == EGL14.EGL_NO_CONTEXT) {
throw RuntimeException("unable to get eglSurface")
}
val surfaceConfig = intArrayOf(EGL14.EGL_NONE)
eglSurface =
EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceConfig, 0)
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw RuntimeException("unable to get eglSurface")
}
LogUtil.e("---->init EGL success")
}
// Confirm with query.
// Confirm with query.
val values = IntArray(1)
EGL14.eglQueryContext(
eglDisplay, eglContext, EGL14.EGL_CONTEXT_CLIENT_VERSION,
values, 0
)
}
public fun release() {
if (eglDisplay != null) {
EGL14.eglMakeCurrent(
eglDisplay,
EGL14.EGL_NO_SURFACE,
EGL14.EGL_NO_SURFACE,
EGL14.EGL_NO_CONTEXT
)
EGL14.eglDestroySurface(eglDisplay,eglSurface)
EGL14.eglDestroyContext(eglDisplay, eglContext)
EGL14.eglReleaseThread()
EGL14.eglTerminate(eglDisplay)
}
eglSurface = null
eglDisplay = null
eglContext = null
}
fun eglMakeCurrent() {
eglContext?.let {
if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, it)) {
throw RuntimeException("unable to makeCurrent")
}
}
}
fun swapBuffer() {
EGL14.eglSwapBuffers(eglDisplay, eglSurface)
}
fun setPresentTime(time: Long) {
EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, time)
}
}