MediaCodec,MediaExtractor,TextureView+AudioTrack这四个类的作用就不说了。
https://blog.csdn.net/column/details/15450.html
MediaExtractor解封装,拿到H264数据。
MediaCodec把数据解码到Surface中。
TextureView展示Surface中的数据。
MediaExtractor,MediaCodec解码音频数据AudioTrack播放
当然这个还可以用TextureView.getBitmap()获取视频的一帧图片,保存到本地。
extractor = new MediaExtractor();
extractor.setDataSource(mVideoPath);
for (int i = 0; i < extractor.getTrackCount(); i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
extractor.selectTrack(i);
break;
}
}
......
int sampleSize = extractor.readSampleData(buffer, 0);
if (sampleSize < 0) {//把数据传给解码器
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
isEOS = true;
}
else {
decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
extractor.advance();
}
decoder.configure(format, mDecoderSurface, null, 0);
...
decoder.releaseOutputBuffer(outIndex, true);
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int i, int i1)
{
mSurface = new Surface(surfaceTexture);
new Thread(new Runnable()
{
@Override
public void run()
{
VideoDecoder = new VideoDecoder(Environment.getExternalStorageDirectory().getPath()+"/360.mp4",mSurface);
VideoDecoder.start();
}
}).start();
下面是源代码:
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
public class VideoDecoder {
private static final String TAG = VideoDecoder.class.getSimpleName();
private String mVideoPath;
private MediaExtractor extractor;
private Surface mDecoderSurface;
private MediaCodec decoder;
private ByteBuffer[] inputBuffers;
private BufferInfo info = new BufferInfo();
private boolean isEOS = false;
private int index;
private long framestamp;
public VideoDecoder(String videoPath, Surface surface) {
mVideoPath = videoPath;
mDecoderSurface = surface;
}
public void start() {
try {
Start();
} catch (Exception e) {
e.printStackTrace();
}
}
private void Start() throws Exception {
extractor = new MediaExtractor();
extractor.setDataSource(mVideoPath);
for (int i = 0; i < extractor.getTrackCount(); i++) {
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
extractor.selectTrack(i);
decoder = MediaCodec.createDecoderByType(mime);
decoder.configure(format, mDecoderSurface, null, 0);
break;
}
}
if (decoder == null) {
Log.e(TAG, "Can't find video info!");
return;
}
decoder.start();
inputBuffers = decoder.getInputBuffers();
decodeNext();
}
public void decodeNext() {
outerloop:
while (!Thread.interrupted()) {
if (!isEOS) {
int inIndex = decoder.dequeueInputBuffer(10000);
if (inIndex >= 0) {
ByteBuffer buffer = inputBuffers[inIndex];
int sampleSize = extractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
Log.d(TAG, "InputBuffer BUFFER_FLAG_END_OF_STREAM");
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
isEOS = true;
} else {
decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
extractor.advance();
}
}
}
int outIndex = decoder.dequeueOutputBuffer(info, 10000);
// All decoded frames have been rendered, we can stop playing now
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.i(TAG, "total decode " + index + " frames");
stop(true);
Log.d(TAG, "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
break;
}
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
Log.d(TAG, "INFO_OUTPUT_BUFFERS_CHANGED");
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
Log.d(TAG, "New format " + decoder.getOutputFormat());
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.d(TAG, "dequeueOutputBuffer timed out!");
try {
// wait 50ms
Thread.sleep(50);
} catch (InterruptedException e) {
}
break;
default:
index++;
decoder.releaseOutputBuffer(outIndex, true);
framestamp = info.presentationTimeUs;
try
{
Thread.sleep(35);
} catch (InterruptedException e)
{
e.printStackTrace();
}
break;
}
}
}
public long getFramestamp() {
return framestamp;
}
public void stop(boolean doCompleted) {
if (mDecoderSurface != null) {
mDecoderSurface.release();
mDecoderSurface = null;
}
if (extractor != null) {
extractor.release();
extractor = null;
}
if (decoder != null) {
decoder.stop();
decoder.release();
decoder = null;
}
}
}
到此视频播放实现,现在实现音频解码播放。
public void decoderAndPlay() {
ByteBuffer[] inputBuffers = mDecoder.getInputBuffers();
ByteBuffer[] outputBuffers = mDecoder.getOutputBuffers();
BufferInfo info = new BufferInfo();
int buffsize = AudioTrack.getMinBufferSize(mSampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
// create an audiotrack object
AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, mSampleRate,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT,
buffsize,
AudioTrack.MODE_STREAM);
audioTrack.play();
while (!eosReceived) {
int inIndex = mDecoder.dequeueInputBuffer(TIMEOUT_US);
if (inIndex >= 0) {
ByteBuffer buffer = inputBuffers[inIndex];
int sampleSize = mExtractor.readSampleData(buffer, 0);
if (sampleSize < 0) {
// We shouldn't stop the playback at this point, just pass the EOS
// flag to mDecoder, we will get it again from the
// dequeueOutputBuffer
mDecoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
} else {
mDecoder.queueInputBuffer(inIndex, 0, sampleSize, mExtractor.getSampleTime(), 0);
mExtractor.advance();
}
int outIndex = mDecoder.dequeueOutputBuffer(info, TIMEOUT_US);
switch (outIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
outputBuffers = mDecoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
MediaFormat format = mDecoder.getOutputFormat();
audioTrack.setPlaybackRate(format.getInteger(MediaFormat.KEY_SAMPLE_RATE));
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
break;
default:
ByteBuffer outBuffer = outputBuffers[outIndex];
final byte[] chunk = new byte[info.size];
outBuffer.get(chunk); // Read the buffer all at once
outBuffer.clear(); // ** MUST DO!!! OTHERWISE THE NEXT TIME YOU GET THIS SAME BUFFER BAD THINGS WILL HAPPEN
audioTrack.write(chunk, info.offset, info.offset + info.size); // AudioTrack write data
mDecoder.releaseOutputBuffer(outIndex, false);
break;
}
// All decoded frames have been rendered, we can stop playing now
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
}
}
public class VideoPlayerActivity extends AppCompatActivity implements TextureView.SurfaceTextureListener
{
private TextureView mTextureView;
private VideoDecoder VideoDecoder;
private Surface mSurface;
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_video_player);
mTextureView = (TextureView) findViewById(R.id.textureView);
mTextureView.setSurfaceTextureListener(this);
}
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int i, int i1)
{
mSurface = new Surface(surfaceTexture);
new Thread(new Runnable()
{
@Override
public void run()
{
VideoDecoder = new VideoDecoder(Environment.getExternalStorageDirectory().getPath()+"/360.mp4",mSurface);
VideoDecoder.start();
}
}).start();
AudioDecoderThread audioDecoderThread = new AudioDecoderThread();
audioDecoderThread.startPlay(Environment.getExternalStorageDirectory().getPath()+"/360.mp4");
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int i, int i1)
{
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture)
{
VideoDecoder.stop(true);
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture)
{
}
}