Android使用ffmpeg解码h265裸流vs使用MediaCodec解码h265裸流

    突然发现自己曾经做过的东西要记录下来,这样以后就不至于找不到了,早这样该多好。今天整理了一下之前做的demo,希望对你们也有用。如果哪里移植不好。加我qq:106601549

这只是个个简单的demo,要想处理好jni层的一些全局变量还需要一些功夫的,抛砖引玉,网上资料太少,自己try out

Android使用ffmpeg解码h265第一步就是进行ffmpeg的移植,这一点有很多资料可以参考,我这里贴一下我移植ffmpeg的configure:

#!/bin/bash
# configure your ndk root dir.
NDK=/home/zhangshiming/ndk-install-root/android-ndk-r10e
# configure your arch
SYSROOT=$NDK/platforms/android-12/arch-arm/
TOOLCHAIN=$NDK/toolchains/arm-linux-androideabi-4.8/prebuilt/linux-x86_64

function build_one
{
./configure \
    --prefix=$PREFIX \
    --enable-shared \
    --disable-static \
    --disable-debug \
    --disable-encoders \
    --disable-decoders \
    --enable-decoder=hevc \
    --enable-hwaccels \
    --disable-muxers \
    --disable-demuxers \
    --enable-muxer=hevc \
    --enable-demuxer=hevc \
    --disable-parsers \
    --enable-parser=hevc \
    --disable-network \
    --disable-protocols \
    --disable-devices \
    --disable-indevs \
    --disable-outdevs \
    --disable-yasm \
    --enable-asm \
    --enable-neon \
    --disable-programs \
    --disable-ffmpeg \
    --disable-ffplay \
    --disable-ffprobe \
    --disable-ffserver \
    --disable-doc \
    --disable-htmlpages \
    --disable-manpages \
    --disable-podpages \
    --disable-txtpages \
    --enable-cross-compile \
    --cross-prefix=$TOOLCHAIN/bin/arm-linux-androideabi- \
    --target-os=linux \
    --arch=arm \
    --sysroot=$SYSROOT \
    --extra-cflags="-Os -fpic $ADDI_CFLAGS" \
    --extra-ldflags="$ADDI_LDFLAGS" \
    $ADDITIONAL_CONFIGURE_FLAG
}
CPU=arm
PREFIX=$(pwd)/android/$CPU
#ADDI_CFLAGS="-marm"
ADDI_CFLAGS="-mfpu=neon -mfloat-abi=softfp"
build_one

---------------------------------------------------------以上是android_build.sh---------------------------------------------------------------

交叉编译, 这些选项,你自己./configure --help进行查看,这里我只保留了h265的解码器和一些filter,其余的都没有保留,编译完后的库很小,编译速度也很快,这才是使用ffmpeg最快速的办法。为了加速需要开启优化选项--enable-asm   --enable-neon , 编译器选项一定要加上-mfpu=neon


编译好了后make install,  然后会在PREFIX=$(pwd)/android/$CPU 这个目录下,也就是android_build.sh 下生成一个android目录,进去把库拷贝出来就行了。


然后就是Android.mk了

LOCAL_PATH := $(call my-dir)

include $(CLEAR_VARS)
LOCAL_MODULE := avcodec
LOCAL_SRC_FILES := prebuilt/libavcodec-57.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := avformat
LOCAL_SRC_FILES := prebuilt/libavformat-57.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := avutil
LOCAL_SRC_FILES := prebuilt/libavutil-55.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := swresample
LOCAL_SRC_FILES := prebuilt/libswresample-2.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := swscale
LOCAL_SRC_FILES := prebuilt/libswscale-4.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)

LOCAL_SRC_FILES := VideoPlayer.c
LOCAL_LDLIBS += -llog -lz -landroid
LOCAL_MODULE := VideoPlayer
LOCAL_C_INCLUDES += $(LOCAL_PATH)/include
LOCAL_C_FLAGS += -mfpu=neon -mfloat-abi=softfp -fPIC

LOCAL_SHARED_LIBRARIES:= avcodec avformat avutil swresample swscale

include $(BUILD_SHARED_LIBRARY)





include $(CLEAR_VARS)
LOCAL_SRC_FILES := SampleReader.c
LOCAL_LDLIBS += -llog -lz -landroid
LOCAL_MODULE := SampleReader
LOCAL_C_INCLUDES += $(LOCAL_PATH)/include
LOCAL_C_FLAGS += -mfpu=neon -mfloat-abi=softfp -fPIC
LOCAL_SHARED_LIBRARIES:= avcodec avformat avutil swresample swscale
include $(BUILD_SHARED_LIBRARY)


VideoPlayer这个是用来进行软解的,SampleReader是用来做硬解分离h265码流的nalu的



Application.mk 很简单

APP_ABI := armeabi armeabi-v7a


接着就是 native方法的定义和jni实现


public class VideoPlayer {

    static {
        System.loadLibrary("VideoPlayer");
    }

    public static native int play(String path, Object surface);
    public static native int stop();
    public static native float getFps();
    public static native float getGlobalFps();
}


public class SampleReader {
    public static final int TYPE_HEVC = 1;
    private final int MEDIA_TYPE;

    static {
        System.loadLibrary("SampleReader");
    }
    public SampleReader(String path, int type){
        MEDIA_TYPE = type;
        nativeInit(path, type);
    }

    private native void nativeInit(String path, int type);

    /**
     *
     * @param buffer
     * @param offset
     * @return the size of sample in byte.
     */
    public native int readSampleData(ByteBuffer buffer, int offset) throws Exception;
    public native int getVideoHeight();

    public native int getVideoWidth();

    public native long getLastSampleTime();

    public native float getFps();

    public native float getGlobalFps();

    public native int release();
}

看一下jni实现,很粗糙,但是够启发你了


sampleReader.c

const static int TYPE_HEVC = 1;
char file_name[512];
int media_type = 0;
uint8_t in_buffer[10240];
AVCodec *pCodec = NULL;
AVCodecContext  *pCodecCtx = NULL;
AVCodecParserContext *pCodecParserCtx = NULL;
FILE *pFvideo_in = NULL;
AVPacket *pPacket = NULL;
uint8_t *cur_ptr = NULL;
int cur_size = 0;
int videoHeight = 0;
int videoWidth = 0;
int inited = 0;
long lastSampleTime = -1;





static int last1SecFps = 0;
static int globalFps = 0;
static long current_time = 0;
static long _last = 0;
static long _first = 0;
static int _frames = 0;
static int totalFrams = 0;
static int got_first_frame = 0;
long getCurrentTime()    
{    
   struct timeval tv;    
   gettimeofday(&tv, 0);    
   return tv.tv_sec * 1000 + tv.tv_usec / 1000;
}

/*
 * Class:     io_vec_demo_mediacodec_SampleReader
 * Method:    getFps
 * Signature: ()F
 */
JNIEXPORT jfloat JNICALL Java_io_vec_demo_mediacodec_SampleReader_getFps
  (JNIEnv *env, jobject obj)
{
	return last1SecFps * 1.0f;
}

/*
 * Class:     io_vec_demo_mediacodec_SampleReader
 * Method:    getGlobalFps
 * Signature: ()F
 */
JNIEXPORT jfloat JNICALL Java_io_vec_demo_mediacodec_SampleReader_getGlobalFps
  (JNIEnv *env, jobject obj)
{
	return globalFps * 1.0f;
}




void updateLast1SecFPS()
{
    ++_frames;
	++totalFrams;
    if (getCurrentTime() - _last >= 1000)
    {
        last1SecFps = _frames;
        _frames = 0;
		_last = getCurrentTime();
    }
	globalFps = (int)((totalFrams * 1000.0)/(getCurrentTime() - _first));
}






JNIEXPORT void JNICALL Java_io_vec_demo_mediacodec_SampleReader_nativeInit
  (JNIEnv *env, jobject obj, jstring jpath, jint type)
{
	inited = 0;
	avcodec_register_all();
	const char *str = (*env)->GetStringUTFChars(env, jpath, NULL);
	strcpy(file_name, str);
	(*env)->ReleaseStringUTFChars(env, jpath, str);
	media_type = type;
	int codec_id = 0;

	if(media_type == TYPE_HEVC){
		codec_id = AV_CODEC_ID_H265;
	}

    // Find the decoder for the video stream
	if(pCodec == NULL){
		pCodec = avcodec_find_decoder(codec_id);
	}

    if(pCodec == NULL) {
        LOGE("Codec not found.");
        return; // Codec not found
    }
	if(pCodecCtx == NULL){
		pCodecCtx = avcodec_alloc_context3(pCodec);
	}
    if(pCodecCtx == NULL){
        LOGE("avcodec alloc context fail!!\n");  
        return;
    }

    if(pCodecParserCtx == NULL){
		pCodecParserCtx = av_parser_init(codec_id);
	}
    if(pCodecParserCtx == NULL){
        LOGE("av parser init fail!!\n");
        return;
    }
	
	if(pFvideo_in == NULL){
		pFvideo_in = fopen(file_name, "rb");
	}

    if(pFvideo_in == NULL){
        LOGE("open input file %s fail !!!\n", file_name);
        return;
    }

	if(pPacket == NULL){
		pPacket = (AVPacket*)av_mallocz(sizeof(AVPacket));
	}
    if(pPacket == NULL){
        LOGE("av malloc packet fail!!\n");
        return;
    }
    av_init_packet(pPacket);
	inited = 1;
	got_first_frame = 0;
	last1SecFps = 0;
	globalFps = 0;
	current_time = 0;
	_last = 0;
	_first = 0;
	_frames = 0;
	totalFrams = 0;
	got_first_frame = 0;
	

}

JNIEXPORT jint JNICALL Java_io_vec_demo_mediacodec_SampleReader_readSampleData
  (JNIEnv *env, jobject obj, jobject byteBuffer, jint offset)
{
	//LOGE("#####################################");
	avcodec_register_all();
	int gotPacket = 0;

	while(1) {
		if(cur_ptr == NULL){//fill empty buffer.
			cur_size = fread(in_buffer, 1, sizeof(in_buffer), pFvideo_in);
			if(cur_size == 0){
				cur_ptr = NULL;
				gotPacket = -1;
				break;//stream error.
			}else{
				cur_ptr = in_buffer;
			}
			
		}
		//after ensure buffer.
        while(cur_size > 0) {
            int len = av_parser_parse2(pCodecParserCtx, pCodecCtx, &(pPacket->data), &(pPacket->size),
                                        cur_ptr, cur_size, AV_NOPTS_VALUE, AV_NOPTS_VALUE, AV_NOPTS_VALUE);
            cur_ptr += len;//next parse pointer.
            cur_size -= len;//data left to be parse.
			if(cur_size == 0){
				cur_ptr = NULL;//make empty buffer
			}
            if(pPacket->size == 0){
				continue;
			}else{
				
				//get one packet.
				gotPacket = pPacket->size;
				//do somethinng begin.
				jclass jclaz = (*env)->GetObjectClass(env, byteBuffer);
				jbyteArray jbarr = (*env)->NewByteArray(env, pPacket->size);
				(*env)->SetByteArrayRegion(env, jbarr,
                        0, pPacket->size, pPacket->data);
				jmethodID jmid = (*env)->GetMethodID(env, jclaz, "put", "([B)Ljava/nio/ByteBuffer;");
				jobject jobj = (*env)->CallObjectMethod(env, byteBuffer, jmid, jbarr);
				lastSampleTime = pPacket->pts;
				if(got_first_frame == 0){
					got_first_frame = 1;
					_last = getCurrentTime();
					_first = _last;
					_frames = 0;
					totalFrams = 0;
				}
				
				//do somethinng end.
				updateLast1SecFPS();
				//LOGE("pktsize %d \n", pPacket->size);
				av_packet_unref(pPacket);
				
				break;
			}
        }
		if(gotPacket > 0) break;
		
    }

	return gotPacket;
}


JNIEXPORT jint JNICALL Java_io_vec_demo_mediacodec_SampleReader_getVideoHeight
  (JNIEnv *env, jobject obj)
{
	 return  videoHeight;

}

/*
 * Class:     io_vec_demo_mediacodec_SampleReader
 * Method:    getVideoWidth
 * Signature: ()I
 */
JNIEXPORT jint JNICALL Java_io_vec_demo_mediacodec_SampleReader_getVideoWidth
  (JNIEnv *env, jobject obj)
{
	  return videoWidth;
}

/*
 * Class:     io_vec_demo_mediacodec_SampleReader
 * Method:    release
 * Signature: ()I
 */
JNIEXPORT jint JNICALL Java_io_vec_demo_mediacodec_SampleReader_release
  (JNIEnv * env, jobject obj)
{
	fclose(pFvideo_in);
	av_parser_close(pCodecParserCtx);
	avcodec_close(pCodecCtx);
	av_free(pCodecCtx);
    av_free(pPacket);
	media_type = 0;
	uint8_t in_buffer[10240];
	pCodec = NULL;
	pCodecCtx = NULL;
	pCodecParserCtx = NULL;
	pFvideo_in = NULL;
	pPacket = NULL;
	cur_ptr = NULL;
	cur_size = 0;
	inited = 0;
	lastSampleTime = -1;
	got_first_frame = 0;
	last1SecFps = 0;
	globalFps = 0;
	current_time = 0;
	_last = 0;
	_first = 0;
	_frames = 0;
	totalFrams = 0;
	got_first_frame = 0;
	
}

/*
 * Class:     io_vec_demo_mediacodec_SampleReader
 * Method:    getLastSampleTime
 * Signature: ()I
 */
/*
 * Class:     io_vec_demo_mediacodec_SampleReader
 * Method:    getLastSampleTime
 * Signature: ()J
 */
JNIEXPORT jlong JNICALL Java_io_vec_demo_mediacodec_SampleReader_getLastSampleTime
  (JNIEnv *env, jobject obj)
  {
	  return lastSampleTime;
  }

VideoPlayer.c


static int last1SecFps = 0;
static int globalFps = 0;
static int runningFlag = 0;
static long current_time = 0;
static long _last = 0;
static long _first = 0;
static int _frames = 0;
static int totalFrams = 0;
long getCurrentTime()    
{    
   struct timeval tv;    
   gettimeofday(&tv, 0);    
   return tv.tv_sec * 1000 + tv.tv_usec / 1000;
}

JNIEXPORT jfloat JNICALL Java_jonesx_videoplayer_VideoPlayer_getFps
  (JNIEnv *env, jclass clazz)
{
	  
	  return last1SecFps * 1.0f;
}

JNIEXPORT jfloat JNICALL Java_jonesx_videoplayer_VideoPlayer_getGlobalFps
  (JNIEnv *env, jclass clazz)
{
	  return globalFps * 1.0f;
}



void updateLast1SecFPS()
{
    ++_frames;
	++totalFrams;
    if (getCurrentTime() - _last >= 1000)
    {
        last1SecFps = _frames;
        _frames = 0;
		_last = getCurrentTime();
    }
	globalFps = (int)((totalFrams * 1000.0)/(getCurrentTime() - _first));
}

JNIEXPORT jint JNICALL Java_jonesx_videoplayer_VideoPlayer_stop
  (JNIEnv *env, jclass clazz)
{
	runningFlag = 0;
	return 1;
}

JNIEXPORT jint JNICALL Java_jonesx_videoplayer_VideoPlayer_play
        (JNIEnv * env, jclass clazz, jstring jpath, jobject surface)
{
	runningFlag = 1;
    LOGD("1------------------play-------------------------");
    const int in_buffer_size = 10240;
    uint8_t in_buffer[in_buffer_size]; 
    int frameFinished;
    int cur_size = 0;
    uint8_t *cur_ptr;
    int ret;
	char file_name[512];
	const char *str = (*env)->GetStringUTFChars(env, jpath, NULL);
	strcpy(file_name, str);
	(*env)->ReleaseStringUTFChars(env, jpath, str);
    //char * file_name = "/sdcard/test/1080P.h265";
    avcodec_register_all();
    int codec_id = AV_CODEC_ID_H265;
    




    // Find the decoder for the video stream
    AVCodec * pCodec = avcodec_find_decoder(codec_id);
    if(pCodec == NULL) {
        LOGE("Codec not found.");
        return -1; // Codec not found
    }

    AVCodecContext  * pCodecCtx = avcodec_alloc_context3(pCodec);
    if (!pCodecCtx){
        LOGE("avcodec alloc context fail!!\n");  
        return -1;
    }

    AVCodecParserContext *pCodecParserCtx = NULL;
    pCodecParserCtx = av_parser_init(codec_id);
    if (!pCodecParserCtx){
        LOGE("av parser init fail!!\n");
        return -1;
    }


    if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
        LOGE("Could not open codec.");
        return -1; // Could not open codec
    }

    FILE *pFvideo_in = fopen(file_name, "rb");
    if(pFvideo_in == NULL)
    {
        LOGE("open input file %s fail !!!\n", file_name);
        return -1;
    }

    if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
        LOGE("Could not open codec.");
        return -1; // Could not open codec
    }

    // Allocate video frame
    AVFrame * pFrame = av_frame_alloc();

    // used to render.
    AVFrame * pFrameRGBA = av_frame_alloc();
    if(pFrameRGBA == NULL || pFrame == NULL) {
        LOGE("Could not allocate video frame.");
        return -1;
    }

    AVPacket *pPacket = (AVPacket*)av_mallocz(sizeof(AVPacket));
    
    if(pPacket == NULL)
    {
        LOGE("av malloc packet fail!!\n");
        return -1;
    }
    av_init_packet(pPacket);


    // acquire native window
    ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);

    //int videoWidth = 0;//pCodecCtx->width;
    //int videoHeight = 0;//pCodecCtx->height;

    
    ANativeWindow_Buffer windowBuffer;
    LOGD("2------------------play-------------------------");


    // Determine required buffer size and allocate buffer
    // buffer data is used to render and format is RGBA
    
    uint8_t * buffer = NULL;
    

    // the decoded data is YUV fmt, so we need to convert to rgba before rendering.
    struct SwsContext *sws_ctx = NULL;

    _last = getCurrentTime();
	_first = _last;
	_frames = 0;
	totalFrams = 0;
    while(runningFlag) {

        cur_size = fread(in_buffer, 1, in_buffer_size, pFvideo_in);
        if(cur_size == 0)
        {
            LOGD("3------------------play-------------------------");
            break;
        }
        cur_ptr = in_buffer;
        // Is this a packet from the video stream?
        while(cur_size > 0) {
            int len = av_parser_parse2(pCodecParserCtx, pCodecCtx, &(pPacket->data), &(pPacket->size),
                                        cur_ptr, cur_size, AV_NOPTS_VALUE, AV_NOPTS_VALUE, AV_NOPTS_VALUE);
                        //printf("-------------------%d\n",len);
                        cur_ptr += len;//next parse pointer.
                        cur_size -= len;//data left to be parse.

            if(pPacket->size == 0) continue;

            // Decode video frame
            ret = avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, pPacket);
            if(ret < 0)
            {
                //LOGD("4------------------play-------------------------");
                LOGE("decode error!!!!\n");
                break;
            }

            // not all decode once could get one frame
            if (frameFinished) {
				updateLast1SecFPS();
                LOGD("3------------------play-------------------------");
                long end_time = getCurrentTime();
                LOGE("~~~~~~~~~~~~~~~~~%ld~~~~~~~~~~~~~~~~~~~~~\n", (end_time - current_time));
                current_time = end_time;
                if(buffer == NULL && sws_ctx == NULL)
                {
                    int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
                    buffer = (uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
                    av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
                         pCodecCtx->width, pCodecCtx->height, 1);
                    // set native window's buffersize,could strench
                    ANativeWindow_setBuffersGeometry(nativeWindow,  pCodecCtx->width, pCodecCtx->height,
                                                     WINDOW_FORMAT_RGBA_8888);

                    sws_ctx = sws_getContext(pCodecCtx->width,
                             pCodecCtx->height,
                             pCodecCtx->pix_fmt,
                             pCodecCtx->width,
                             pCodecCtx->height,
                             AV_PIX_FMT_RGBA,
                             SWS_BILINEAR,
                             NULL,
                             NULL,
                             NULL);
                }
                

                // lock native window buffer
                ANativeWindow_lock(nativeWindow, &windowBuffer, 0);

                // convert format
                sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
                          pFrame->linesize, 0, pCodecCtx->height,
                          pFrameRGBA->data, pFrameRGBA->linesize);

                // get stride
                uint8_t * dst = windowBuffer.bits;
                int dstStride = windowBuffer.stride * 4;
                uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]);
                int srcStride = pFrameRGBA->linesize[0];

                int h;
                for (h = 0; h < pCodecCtx->height; h++) {
                    memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
                }

                ANativeWindow_unlockAndPost(nativeWindow);
            }

        }
        av_packet_unref(pPacket);
    }








    fclose(pFvideo_in);

    av_free(buffer);
    av_free(pFrameRGBA);
    av_parser_close(pCodecParserCtx);

    // Free the YUV frame
    av_free(pFrame);

    // Close the codecs
    avcodec_close(pCodecCtx);
    av_free(pCodecCtx);
    av_free(pPacket);

    // Close the video file
    //avformat_close_input(&pFormatCtx);
    return 0;
}


如果你觉得乱,别急,我上传了源码,自己下载吧,能运行。 记得在sdcard下新建一个test目录,然后把你的视频文件放进去,记住,一定要放h265裸流,别说你不知道,傻了吧唧的放一个mp4进去。


资源下载链接:http://download.csdn.net/detail/mtaxot/9800163





你可能感兴趣的:(多媒体)