因为自己的创业项目中一直没有视频上传功能,只有视频录制了上传,好久前就想着要写一个视频编辑上传的功能,目前这个功能差不多完成了。
一开始是用的modiacodec的同步模式,在魅族16th上测试了和火山小视频的差距,火山40s变速40s 的视频,耗时20s左右,一开始是我用的同步方式,时间在29s 左右,一番折腾,仍然在25秒左右,我知道有异步的写法,对于我这样的不懂底层内核的野程序员来说,遇到问题先百度,百度下搜了看了下,开始写,在调用
outputSurface.awaitNewImage();
的时候,报错:Unable to update texture contents (see logcat for details) ,
后来google搜了 异步编解码模式,在stackoverflow 上看到帖子,最终解决了问题。目前在速度设为2倍,编解码40s视频时间在23s,可能还有提升吧,暂时不折腾了,
贴上初始化编解码的代码:关键是
inputSurface.releaseEGLContext(); 必须要,
private void initVideoCodec() {
for (int i = 0; i < mVideoExtractor.getTrackCount(); i++) {
MediaFormat format = mVideoExtractor.getTrackFormat(i);
if (format.getString(MediaFormat.KEY_MIME).startsWith("video/")) {
videoTrackIndex = i;
videoFormat = format;
break;
}
}
mVideoExtractor.selectTrack(videoTrackIndex);
long firstVideoTime = mVideoExtractor.getSampleTime();
mVideoExtractor.seekTo(firstVideoTime + videoStartPositionUs, SEEK_TO_PREVIOUS_SYNC);
if (videoFormat == null) {
Log.e(TAG, "initVideoCodec: 没有获取到视频格式");
return;
}
try {
// videoDecoder = MediaCodec.createDecoderByType(videoFormat.getString(MediaFormat.KEY_MIME));
videoEncoder = MediaCodec.createEncoderByType("video/avc");
} catch (IOException e) {
e.printStackTrace();
}
videoEncoder.setCallback(new VideoEncoderCallBack());
videoEncoder.configure(compressMediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
Surface surface = videoEncoder.createInputSurface();
this.inputSurface = new InputSurface_v1(surface);
this.inputSurface.makeCurrent();
videoEncoder.start();
outputSurface = new OutputSurface_v1();
mVideoDecoderHandlerThread = new HandlerThread("DecoderThread");
mVideoDecoderHandlerThread.start();
mVideoDecoderHandler = new CallbackHandler(mVideoDecoderHandlerThread.getLooper());
mVideoDecoderHandler.create(false, videoFormat.getString(MediaFormat.KEY_MIME), new VideoDecoderCallBack());
videoDecoder = mVideoDecoderHandler.getCodec();
videoDecoder.configure(videoFormat, outputSurface.getSurface(), null, 0);
videoDecoder.start();//解码器启动
inputSurface.releaseEGLContext();//必须释放,否则报线程错
}
接下来是 编码器和解码器的回调,和同步写法不同的是在 解码器的
onOutputBufferAvailable()回调中,需要调用 以下两个方法。
inputSurface.makeCurrent();和inputSurface.releaseEGLContext();
因为解码器又是在另外一个线程中,解码器工作在另外一个线程中(看解码器decoder初始化哪儿,后面会跟上那个handler),我的理解是这样。而同步模式却不需要。
视频变速的关键是根据原来的pts/速度倍数
inputSurface.setPresentationTime((long) ((info.presentationTimeUs - videoStartPositionUs) / speed * 1000));
这样能实现视频变速,本来我想这从视频文件中提取数据的时候,根据给定的速度值,丢掉一些数据,确实这样编解码速度速度确实变快了,几乎耗时减半,但是编码出的视频,却在一些画面变化相对快一些的地方,花屏了,我的测试视频是我坐着削橙子的皮,手部位置就花了,,,,,,, 有大佬知道的有好方法的,请告诉我。
private class VideoDecoderCallBack extends MediaCodec.Callback {
@Override
public void onInputBufferAvailable(@NonNull MediaCodec codec, int index) {
ByteBuffer inputBuffer = codec.getInputBuffer(index);//取到一个字节缓冲区
inputBuffer.clear();
int readSampleData = mVideoExtractor.readSampleData(inputBuffer, 0);//往字节缓冲区中填充数据
long dur = mVideoExtractor.getSampleTime() - videoStartPositionUs;//当前已经截取的视频长度
if ((dur < clipDur * speed) && readSampleData > 0) {
codec.queueInputBuffer(index, 0, readSampleData, mVideoExtractor.getSampleTime(), 0); //queueInputBuffer 开始处理(解码)数据,inputIndex 告诉解码器数据所在的位置索引
mVideoExtractor.advance();
// if (speed == 2&&frameCount%2==0) { //如果速度是2,跳过一帧
//
// extractor.advance();
// } else if (speed == 3&&frameCount%3==0) {//如果速度是3,跳过2帧
// extractor.advance();
// extractor.advance();
// }
} else {
codec.queueInputBuffer(index, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
Log.e(TAG, "onInputBufferAvailable:视频数据提取完成");
}
}
@Override
public void onOutputBufferAvailable(@NonNull MediaCodec codec, int index, @NonNull MediaCodec.BufferInfo info) {
if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
Log.d(TAG, "video decoder: codec config buffer");
codec.releaseOutputBuffer(index, false);
return;
}
boolean doRender = (info.size != 0 && info.presentationTimeUs > videoStartPositionUs);
codec.releaseOutputBuffer(index, doRender);//起到把数据给到 encoder的作用,相当于执行了 encoder.queueInputBuffer()
if (doRender) {
inputSurface.makeCurrent();//因为这个回调在另外一个线程中,所以需要在 这儿调用 makeCurrent(),和同步写法不一样的地方
outputSurface.awaitNewImage();
outputSurface.drawImage();
inputSurface.setPresentationTime((long) ((info.presentationTimeUs - videoStartPositionUs) / speed * 1000));
inputSurface.swapBuffers();
inputSurface.releaseEGLContext();
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
videoEncoder.signalEndOfInputStream();
}
}
@Override
public void onError(@NonNull MediaCodec codec, @NonNull MediaCodec.CodecException e) {
}
@Override
public void onOutputFormatChanged(@NonNull MediaCodec codec, @NonNull MediaFormat format) {
}
}
private class VideoEncoderCallBack extends MediaCodec.Callback {
@Override
public void onInputBufferAvailable(@NonNull MediaCodec codec, int index) {
}
@Override
public void onOutputBufferAvailable(@NonNull MediaCodec codec, int index, @NonNull MediaCodec.BufferInfo info) {
if (info.size != 0) {
ByteBuffer outputBuffer = codec.getOutputBuffer(index);
if (outputBuffer != null) {
// byte[] dataSources = new byte[outputBuffer.remaining()];
// outputBuffer.get(dataSources);
if (!muxStarted) {
synchronized (lock) {
if (!muxStarted) {
try {
lock.wait();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
mMediaMuxer.writeSampleData(muxVideoTrack, outputBuffer, info);
if (listener != null) {
handler.post(new Runnable() {
@Override
public void run() {
listener.onProgress((int) (((float) info.presentationTimeUs / clipDur) * 100));
}
});
}
}
}
codec.releaseOutputBuffer(index, false);
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
videoFinish = true;
release();
Log.e(TAG, "run: 视频编解码结束+mOutputVideoPath:" + mOutputVideoPath +
"\n 视频编解码所需时间是:" + (System.currentTimeMillis() - before));
}
}
@Override
public void onError(@NonNull MediaCodec codec, @NonNull MediaCodec.CodecException e) {
}
@Override
public void onOutputFormatChanged(@NonNull MediaCodec codec, @NonNull MediaFormat format) {
startMux(format, 0);
}
}
用于解码器的 handler ,用于在解码器设置回调的时候使用:
static class CallbackHandler extends Handler {
CallbackHandler(Looper l) {
super(l);
}
private MediaCodec mCodec;
private boolean mEncoder;
private MediaCodec.Callback mCallback;
private String mMime;
private boolean mSetDone;
@Override
public void handleMessage(Message msg) {
try {
mCodec = mEncoder ? MediaCodec.createEncoderByType(mMime) : MediaCodec.createDecoderByType(mMime);
} catch (IOException ioe) {
}
mCodec.setCallback(mCallback);
synchronized (this) {
mSetDone = true;
notifyAll();
}
}
void create(boolean encoder, String mime, MediaCodec.Callback callback) {
mEncoder = encoder;
mMime = mime;
mCallback = callback;
mSetDone = false;
sendEmptyMessage(0);
synchronized (this) {
while (!mSetDone) {
try {
wait();
} catch (InterruptedException ie) {
}
}
}
}
MediaCodec getCodec() {
return mCodec;
}
}
音频变速不能修改pts的方式,我尝试过,把音频和视频的pts 都按速度值做除法处理后,通过mediaMuxer 合并,会出现视频卡顿的问题。音频变速需要一个转码器,把解码后的音频数据交给转码器转码,解码完成后,交给音频编码器编码。
音频编解码的过程就是用的同步模式,因为音频的处理几乎都是远远快过视频处理,就没折腾音频的异步模式了,同步模式冗长,且看起繁杂,就不贴完全代码了,只贴上调用转码器的这一部分代码:
下面代码中 mAudioTranscoder 就是转码器
if (!decodeDone) {
int index = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
} else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// expected before first buffer of data
MediaFormat newFormat = decoder.getOutputFormat();
} else if (index < 0) {
} else {
boolean canEncode = (info.size != 0 && info.presentationTimeUs - firstSampleTime > startPosition); //根据给定的开始时间,判断当前的数据是否需要编码,不需要就丢弃
boolean endOfStream = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
if (canEncode && !endOfStream) {
ByteBuffer decoderOutputBuffer;
decoderOutputBuffer = decoder.getOutputBuffer(index);
mAudioTranscoder.queueInput(decoderOutputBuffer);
ByteBuffer output = mAudioTranscoder.getOutput();
if (output != null && output.hasRemaining()) {
info.presentationTimeUs = (long) ((info.presentationTimeUs - startPosition) / speed);
int size = output.remaining();
mTotalBytesRead += size;
int encodeInputIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC);
if (encodeInputIndex >= 0) {
ByteBuffer encoderInputBuffer = encoder.getInputBuffer(encodeInputIndex);
encoderInputBuffer.clear();
encoderInputBuffer.put(output);
encoder.queueInputBuffer(encodeInputIndex, info.offset, size, info.presentationTimeUs, 0);
encodeinput++;
Log.d(TAG, "startAudioCodec: audio encodeInput" + encodeinput + " dataSize" + size + " sampeTime" + info.presentationTimeUs);
mPresentationTimeUs = 1000000L * (mTotalBytesRead / 2 / 2) / sampleRate;//计算pts
}
}
}
,要完整代码的去帖子顶部的开源项目中看,就这几天同步进去,视频的同步编解码代码也项目中,下面贴下转码器的完整代码:
package com.cgfay.media.recorder;
import android.media.AudioFormat;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.ShortBuffer;
/**
* 音频倍速转码器, reference from ExoPlayer's SonicAudioProcessor
*/
public final class AudioTranscoder {
/**
* A value for various fields to indicate that the field's value is unknown or not applicable.
*/
public static final int NO_VALUE = -1;
/**
* An empty, direct {@link ByteBuffer}.
*/
private ByteBuffer EMPTY_BUFFER = ByteBuffer.allocateDirect(0).order(ByteOrder.nativeOrder());
/**
* The maximum allowed playback speed in {@link #setSpeed(float)}.
*/
public static final float MAXIMUM_SPEED = 8.0f;
/**
* The minimum allowed playback speed in {@link #setSpeed(float)}.
*/
public static final float MINIMUM_SPEED = 0.1f;
/**
* The maximum allowed pitch in {@link #setPitch(float)}.
*/
public static final float MAXIMUM_PITCH = 8.0f;
/**
* The minimum allowed pitch in {@link #setPitch(float)}.
*/
public static final float MINIMUM_PITCH = 0.1f;
/**
* Indicates that the output sample rate should be the same as the input.
*/
public static final int SAMPLE_RATE_NO_CHANGE = -1;
/**
* The threshold below which the difference between two pitch/speed factors is negligible.
*/
private static final float CLOSE_THRESHOLD = 0.01f;
/**
* The minimum number of output bytes at which the speedup is calculated using the input/output
* byte counts, rather than using the current playback parameters speed.
*/
private static final int MIN_BYTES_FOR_SPEEDUP_CALCULATION = 1024;
private int pendingOutputSampleRateHz;
private int channelCount;
private int sampleRateHz;
private Sonic sonic;
private float speed;
private float pitch;
private int outputSampleRateHz;
private ByteBuffer buffer;
private ShortBuffer shortBuffer;
private ByteBuffer outputBuffer;
private long inputBytes;
private long outputBytes;
private boolean inputEnded;
/**
* Creates a new audio processor.
*/
public AudioTranscoder() {
speed = 1f;
pitch = 1f;
channelCount = NO_VALUE;
sampleRateHz = NO_VALUE;
outputSampleRateHz = NO_VALUE;
buffer = EMPTY_BUFFER;
shortBuffer = buffer.asShortBuffer();
outputBuffer = EMPTY_BUFFER;
pendingOutputSampleRateHz = SAMPLE_RATE_NO_CHANGE;
}
/**
* Sets the playback speed. The new speed will take effect after a call to {@link #flush()}.
*
* @param speed The requested new playback speed.
* @return The actual new playback speed.
*/
public float setSpeed(float speed) {
this.speed = constrainValue(speed, MINIMUM_SPEED, MAXIMUM_SPEED);
return this.speed;
}
/**
* Sets the playback pitch. The new pitch will take effect after a call to {@link #flush()}.
*
* @param pitch The requested new pitch.
* @return The actual new pitch.
*/
public float setPitch(float pitch) {
this.pitch = constrainValue(pitch, MINIMUM_PITCH, MAXIMUM_PITCH);
return pitch;
}
/**
* Sets the sample rate for output audio, in hertz. Pass {@link #SAMPLE_RATE_NO_CHANGE} to output
* audio at the same sample rate as the input. After calling this method, call
* {@link #configure(int, int, int)} to start using the new sample rate.
*
* @param sampleRateHz The sample rate for output audio, in hertz.
* @see #configure(int, int, int)
*/
public void setOutputSampleRateHz(int sampleRateHz) {
pendingOutputSampleRateHz = sampleRateHz;
}
/**
* Returns the specified duration scaled to take into account the speedup factor of this instance,
* in the same units as {@code duration}.
*
* @param duration The duration to scale taking into account speedup.
* @return The specified duration scaled to take into account speedup, in the same units as
* {@code duration}.
*/
public long scaleDurationForSpeedup(long duration) {
if (outputBytes >= MIN_BYTES_FOR_SPEEDUP_CALCULATION) {
return outputSampleRateHz == sampleRateHz
? scaleLargeTimestamp(duration, inputBytes, outputBytes)
: scaleLargeTimestamp(duration, inputBytes * outputSampleRateHz,
outputBytes * sampleRateHz);
} else {
return (long) ((double) speed * duration);
}
}
/**
* Configures the processor to process input audio with the specified format. After calling this
* method, {@link #isActive()} returns whether the processor needs to handle buffers; if not, the
* processor will not accept any buffers until it is reconfigured. Returns {@code true} if the
* processor must be flushed, or if the value returned by {@link #isActive()} has changed as a
* result of the call. If it's active, {@link #getOutputSampleRateHz()},
* {@link #getOutputChannelCount()} and {@link #getOutputEncoding()} return the processor's output
* format.
*
* @param sampleRateHz The sample rate of input audio in Hz.
* @param channelCount The number of interleaved channels in input audio.
* @param encoding The encoding of input audio.
* @return {@code true} if the processor must be flushed or the value returned by
* {@link #isActive()} has changed as a result of the call.
* @throws UnhandledFormatException Thrown if the specified format can't be handled as input.
*/
public boolean configure(int sampleRateHz, int channelCount, int encoding)
throws UnhandledFormatException {
if (encoding != AudioFormat.ENCODING_PCM_16BIT) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
}
int outputSampleRateHz = pendingOutputSampleRateHz == SAMPLE_RATE_NO_CHANGE
? sampleRateHz : pendingOutputSampleRateHz;
if (this.sampleRateHz == sampleRateHz && this.channelCount == channelCount
&& this.outputSampleRateHz == outputSampleRateHz) {
return false;
}
this.sampleRateHz = sampleRateHz;
this.channelCount = channelCount;
this.outputSampleRateHz = outputSampleRateHz;
return true;
}
/**
* Returns whether the processor is configured and active.
*/
public boolean isActive() {
return Math.abs(speed - 1f) >= CLOSE_THRESHOLD || Math.abs(pitch - 1f) >= CLOSE_THRESHOLD
|| outputSampleRateHz != sampleRateHz;
}
/**
* Returns the number of audio channels in the data output by the processor. The value may change
* as a result of calling {@link #configure(int, int, int)} and is undefined if the instance is
* not active.
*/
public int getOutputChannelCount() {
return channelCount;
}
/**
* Returns the audio encoding used in the data output by the processor. The value may change as a
* result of calling {@link #configure(int, int, int)} and is undefined if the instance is not
* active.
*/
public int getOutputEncoding() {
return AudioFormat.ENCODING_PCM_16BIT;
}
/**
* Returns the sample rate of audio output by the processor, in hertz. The value may change as a
* result of calling {@link #configure(int, int, int)} and is undefined if the instance is not
* active.
*/
public int getOutputSampleRateHz() {
return outputSampleRateHz;
}
/**
* Queues audio data between the position and limit of the input {@code buffer} for processing.
* {@code buffer} must be a direct byte buffer with native byte order. Its contents are treated as
* read-only. Its position will be advanced by the number of bytes consumed (which may be zero).
* The caller retains ownership of the provided buffer. Calling this method invalidates any
* previous buffer returned by {@link #getOutput()}.
*
* @param inputBuffer The input buffer to process.
*/
public void queueInput(ByteBuffer inputBuffer) {
if (inputBuffer.hasRemaining()) {
ShortBuffer shortBuffer = inputBuffer.asShortBuffer();
int inputSize = inputBuffer.remaining();
inputBytes += inputSize;
sonic.queueInput(shortBuffer);
inputBuffer.position(inputBuffer.position() + inputSize);
}
int outputSize = sonic.getSamplesAvailable() * channelCount * 2;
if (outputSize > 0) {
if (buffer.capacity() < outputSize) {
buffer = ByteBuffer.allocateDirect(outputSize).order(ByteOrder.nativeOrder());
shortBuffer = buffer.asShortBuffer();
} else {
buffer.clear();
shortBuffer.clear();
}
sonic.getOutput(shortBuffer);
outputBytes += outputSize;
buffer.limit(outputSize);
outputBuffer = buffer;
}
}
/**
* Queues an end of stream signal. After this method has been called,
* {@link #queueInput(ByteBuffer)} may not be called until after the next call to
* {@link #flush()}. Calling {@link #getOutput()} will return any remaining output data. Multiple
* calls may be required to read all of the remaining output data. {@link #isEnded()} will return
* {@code true} once all remaining output data has been read.
*/
public void endOfStream() {
sonic.queueEndOfStream();
inputEnded = true;
}
/**
* Returns a buffer containing processed output data between its position and limit. The buffer
* will always be a direct byte buffer with native byte order. Calling this method invalidates any
* previously returned buffer. The buffer will be empty if no output is available.
*
* @return A buffer containing processed output data between its position and limit.
*/
public ByteBuffer getOutput() {
ByteBuffer outputBuffer = this.outputBuffer;
this.outputBuffer = EMPTY_BUFFER;
return outputBuffer;
}
/**
* Returns whether this processor will return no more output from {@link #getOutput()} until it
* has been {@link #flush()}ed and more input has been queued.
*/
public boolean isEnded() {
return inputEnded && (sonic == null || sonic.getSamplesAvailable() == 0);
}
/**
* Clears any state in preparation for receiving a new stream of input buffers.
*/
public void flush() {
sonic = new Sonic(sampleRateHz, channelCount, speed, pitch, outputSampleRateHz);
outputBuffer = EMPTY_BUFFER;
inputBytes = 0;
outputBytes = 0;
inputEnded = false;
}
/**
* Resets the processor to its unconfigured state.
*/
public void reset() {
sonic = null;
buffer = EMPTY_BUFFER;
shortBuffer = buffer.asShortBuffer();
outputBuffer = EMPTY_BUFFER;
channelCount = NO_VALUE;
sampleRateHz = NO_VALUE;
outputSampleRateHz = NO_VALUE;
inputBytes = 0;
outputBytes = 0;
inputEnded = false;
pendingOutputSampleRateHz = SAMPLE_RATE_NO_CHANGE;
}
/**
* Constrains a value to the specified bounds.
*
* @param value The value to constrain.
* @param min The lower bound.
* @param max The upper bound.
* @return The constrained value {@code Math.max(min, Math.min(value, max))}.
*/
private static float constrainValue(float value, float min, float max) {
return Math.max(min, Math.min(value, max));
}
/**
* Scales a large timestamp.
*
* Logically, scaling consists of a multiplication followed by a division. The actual operations
* performed are designed to minimize the probability of overflow.
*
* @param timestamp The timestamp to scale.
* @param multiplier The multiplier.
* @param divisor The divisor.
* @return The scaled timestamp.
*/
private static long scaleLargeTimestamp(long timestamp, long multiplier, long divisor) {
if (divisor >= multiplier && (divisor % multiplier) == 0) {
long divisionFactor = divisor / multiplier;
return timestamp / divisionFactor;
} else if (divisor < multiplier && (multiplier % divisor) == 0) {
long multiplicationFactor = multiplier / divisor;
return timestamp * multiplicationFactor;
} else {
double multiplicationFactor = (double) multiplier / divisor;
return (long) (timestamp * multiplicationFactor);
}
}
/**
* Exception thrown when a processor can't be configured for a given input audio format.
*/
final class UnhandledFormatException extends Exception {
public UnhandledFormatException(int sampleRateHz, int channelCount, int encoding) {
super("Unhandled format: " + sampleRateHz + " Hz, " + channelCount + " channels in encoding "
+ encoding);
}
}
}