在android平台上如何实现H264解码

在android平台上实现H264解码,一般采用开源的ffmpeg来实现,那么这就涉及一个问题:如何在android平台上移植ffmpeg?

这个移植的方法在网上有一大推,本人看得头晕,所以采用了一个取巧的办法:在android平台上编译XBMC的时候,编译成功后,在目录~/xbmc-android/tools/android/packaging/xbmc/lib/armeabi-v7a/ 下面就有编译好的ffmpeg的库,直接提取出来用就可以了

注:关于如何在android平台上编译XBMC,请看我之前的博文:http://blog.csdn.net/u011469509/article/details/9744385


那么有了android平台上ffmpeg的库,如何在android平台上测试H264解码呢?


新建android app工程ffmpeg_android

gedit ./.bashrc

将 /opt/android-sdk-linux/tools 添加到 "PATH"环境变量中

android list targets

android create project --target 1 --name ffmpeg_android --path ~/workspace/ffmpeg_android --package com.modukaikai.ffmpeg --activity ffmpegActivity


将xbmc中的ffmpeg源码拷贝到工程ffmpeg_android的jni目录下

mkdir ~/workspace/ffmpeg_android/jni

cp -R ~/xbmc-android/lib/ffmpeg ~/workspace/ffmpeg_android/jni/


H264解码器的实现

vim ~/workspace/ffmpeg_android/jni/h264_decoder.h

以下是h264_decoder.h的代码:

#ifndef h264_decoder_h
#define h264_decoder_h
 
extern "C"
{
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavcodec/avcodec.h"

typedef void (*tp_avcodec_register_all)(void);
typedef AVCodec *(*tp_avcodec_find_decoder)(enum AVCodecID id);
typedef AVCodecContext *(*tp_avcodec_alloc_context3)(const AVCodec *codec);
typedef int (*tp_avcodec_open2)(AVCodecContext *avctx, const AVCodec *codec, AVDictionary **options);
typedef AVFrame *(*tp_avcodec_alloc_frame)(void);
typedef void (*tp_avcodec_free_frame)(AVFrame **frame);
typedef int (*tp_avcodec_decode_video2)(AVCodecContext *avctx, AVFrame *picture, int *got_picture_ptr, const AVPacket *avpkt);
typedef int (*tp_avcodec_close)(AVCodecContext *avctx);
typedef void (*tp_av_free)(void *ptr);
typedef void (*tp_av_init_packet)(AVPacket *pkt);

class H264DecoderInterface
{
public:
    H264DecoderInterface()
    {
      p_avcodec_register_all = NULL;
      p_avcodec_find_decoder = NULL;
      p_avcodec_alloc_context3 = NULL;
      p_avcodec_open2 = NULL;
      p_avcodec_alloc_frame = NULL;
      p_avcodec_free_frame = NULL;
      p_avcodec_decode_video2 = NULL;
      p_avcodec_close = NULL;
      p_av_free = NULL;
      p_av_init_packet = NULL;
    };

    ~H264DecoderInterface()
    {
    };

    void (*p_avcodec_register_all)(void);
    AVCodec *(*p_avcodec_find_decoder)(enum AVCodecID id);
    AVCodecContext *(*p_avcodec_alloc_context3)(const AVCodec *codec);
    int (*p_avcodec_open2)(AVCodecContext *avctx, const AVCodec *codec, AVDictionary **options);
    AVFrame *(*p_avcodec_alloc_frame)(void);
    void (*p_avcodec_free_frame)(AVFrame **frame);
    int (*p_avcodec_decode_video2)(AVCodecContext *avctx, AVFrame *picture, int *got_picture_ptr, const AVPacket *avpkt);
    int (*p_avcodec_close)(AVCodecContext *avctx);
    void (*p_av_free)(void *ptr);
    void (*p_av_init_packet)(AVPacket *pkt);
};
 
    class h264_decoder
    {
    public:
        static bool loadLibrary(const char *libDir);
        static bool unloadLibrary();
 
        h264_decoder();
        ~h264_decoder();
 
        bool open_h264_decoder();
        unsigned long h264_decoder_process(unsigned char* pSrcData, unsigned long srcDataLen, unsigned char* pDstData, bool& bNoOutputDecData);
        bool close_h264_decoder();
 
        void setFrameRate(int frameRate);
        void setResolution(int width, int height);
 
    private:
        static void* avcodec_handle;
        static void* avfilter_handle;
        static void* avformat_handle;
        static void* avutil_handle;
        static void* postproc_handle;
        static void* swresample_handle;
        static void* swscale_handle;
        static H264DecoderInterface *p_H264DecoderInterface;
 
        struct AVCodec *p_AVCodec;
        struct AVCodecContext *p_AVCodecContext;
        struct AVFrame *p_AVFrame;
        struct AVPacket avPacket;
 
        int got_frame;
 
        bool isOpened;
 
        int video_frameRate;
        int video_width;
        int video_height;
    };
}
#endif

vim ~/workspace/ffmpeg_android/jni/h264_decoder.cpp

以下是h264_decoder.cpp的代码:

#include "h264_decoder.h"
#include <dlfcn.h>
#include <string.h>

void* h264_decoder::avcodec_handle = NULL;
void* h264_decoder::avfilter_handle = NULL;
void* h264_decoder::avformat_handle = NULL;
void* h264_decoder::avutil_handle = NULL;
void* h264_decoder::postproc_handle = NULL;
void* h264_decoder::swresample_handle = NULL;
void* h264_decoder::swscale_handle = NULL;
H264DecoderInterface* h264_decoder::p_H264DecoderInterface = NULL;

bool h264_decoder::loadLibrary(const char *libDir)
{
    char libPath[200];

    memset(libPath,0,sizeof(char)*200);
    strcpy(libPath,libDir);
    strcat(libPath,"/libavutil-52-arm.so");
        h264_decoder::avutil_handle= dlopen(libPath, RTLD_NOW);
    if(!h264_decoder::avutil_handle){ printf("load avutil library fail!\n"); printf("%s\n",dlerror());}

    memset(libPath,0,sizeof(char)*200);
    strcpy(libPath,libDir);
    strcat(libPath,"/libavcodec-54-arm.so");
        h264_decoder::avcodec_handle= dlopen(libPath, RTLD_NOW);
        if(!h264_decoder::avcodec_handle){ printf("load avcodec library fail!\n"); printf("%s\n",dlerror());}

    memset(libPath,0,sizeof(char)*200);
    strcpy(libPath,libDir);
    strcat(libPath,"/libavformat-54-arm.so");
        h264_decoder::avformat_handle= dlopen(libPath, RTLD_NOW);
    if(!h264_decoder::avformat_handle){ printf("load avformat library fail!\n"); printf("%s\n",dlerror());}

    memset(libPath,0,sizeof(char)*200);
    strcpy(libPath,libDir);
    strcat(libPath,"/libswresample-0-arm.so");
        h264_decoder::swresample_handle= dlopen(libPath, RTLD_NOW);
    if(!h264_decoder::swresample_handle){ printf("load swresample library fail!\n"); printf("%s\n",dlerror());}

    memset(libPath,0,sizeof(char)*200);
    strcpy(libPath,libDir);
    strcat(libPath,"/libpostproc-52-arm.so");
        h264_decoder::postproc_handle= dlopen(libPath, RTLD_NOW);
    if(!h264_decoder::postproc_handle){ printf("load postproc library fail!\n"); printf("%s\n",dlerror());}

    memset(libPath,0,sizeof(char)*200);
    strcpy(libPath,libDir);
    strcat(libPath,"/libswscale-2-arm.so");
        h264_decoder::swscale_handle= dlopen(libPath, RTLD_NOW);
    if(!h264_decoder::swscale_handle){ printf("load swscale library fail!\n"); printf("%s\n",dlerror());}

    memset(libPath,0,sizeof(char)*200);
    strcpy(libPath,libDir);
    strcat(libPath,"/libavfilter-3-arm.so");
    h264_decoder::avfilter_handle= dlopen(libPath, RTLD_NOW);
    if(!h264_decoder::avfilter_handle){ printf("load avfilter library fail!\n"); printf("%s\n",dlerror());}


    if(!h264_decoder::avcodec_handle || !h264_decoder::avfilter_handle || !h264_decoder::avformat_handle || !h264_decoder::avutil_handle || !h264_decoder::postproc_handle || !h264_decoder::swresample_handle || !h264_decoder::swscale_handle){
      printf("load ffmpeg library fail!\n");
      h264_decoder::unloadLibrary();
      return false;
    }else{
      printf("load ffmpeg library success!\n");
    }

    h264_decoder::p_H264DecoderInterface = new H264DecoderInterface();
    h264_decoder::p_H264DecoderInterface->p_avcodec_register_all = (tp_avcodec_register_all)dlsym(h264_decoder::avcodec_handle, "avcodec_register_all");
    h264_decoder::p_H264DecoderInterface->p_avcodec_find_decoder = (tp_avcodec_find_decoder)dlsym(h264_decoder::avcodec_handle, "avcodec_find_decoder");
    h264_decoder::p_H264DecoderInterface->p_avcodec_alloc_context3 = (tp_avcodec_alloc_context3)dlsym(h264_decoder::avcodec_handle, "avcodec_alloc_context3");
    h264_decoder::p_H264DecoderInterface->p_avcodec_open2 = (tp_avcodec_open2)dlsym(h264_decoder::avcodec_handle,"avcodec_open2");
    h264_decoder::p_H264DecoderInterface->p_avcodec_alloc_frame = (tp_avcodec_alloc_frame)dlsym(h264_decoder::avcodec_handle, "avcodec_alloc_frame");
    h264_decoder::p_H264DecoderInterface->p_avcodec_free_frame = (tp_avcodec_free_frame)dlsym(h264_decoder::avcodec_handle, "avcodec_free_frame");
    h264_decoder::p_H264DecoderInterface->p_avcodec_decode_video2 = (tp_avcodec_decode_video2)dlsym(h264_decoder::avcodec_handle, "avcodec_decode_video2");
    h264_decoder::p_H264DecoderInterface->p_avcodec_close = (tp_avcodec_close)dlsym(h264_decoder::avcodec_handle, "avcodec_close");
    h264_decoder::p_H264DecoderInterface->p_av_init_packet = (tp_av_init_packet)dlsym(h264_decoder::avcodec_handle, "av_init_packet");
    h264_decoder::p_H264DecoderInterface->p_av_free = (tp_av_free)dlsym(h264_decoder::avutil_handle, "av_free");

    if(!h264_decoder::p_H264DecoderInterface || !h264_decoder::p_H264DecoderInterface->p_avcodec_register_all || !h264_decoder::p_H264DecoderInterface->p_avcodec_find_decoder || !h264_decoder::p_H264DecoderInterface->p_avcodec_alloc_context3 || !h264_decoder::p_H264DecoderInterface->p_avcodec_open2 || !h264_decoder::p_H264DecoderInterface->p_avcodec_alloc_frame || !h264_decoder::p_H264DecoderInterface->p_avcodec_free_frame || !h264_decoder::p_H264DecoderInterface->p_avcodec_decode_video2 || !h264_decoder::p_H264DecoderInterface->p_avcodec_close || !h264_decoder::p_H264DecoderInterface->p_av_init_packet || !h264_decoder::p_H264DecoderInterface->p_av_free){
      printf("load function symbols from ffmpeg library fail!\n");
      h264_decoder::unloadLibrary();
      return false;
    }else{
      printf("load function symbols from ffmpeg library success!\n");
    }
    
    return true;
}

bool h264_decoder::unloadLibrary()
{
    if(h264_decoder::avcodec_handle)
    {
      dlclose(h264_decoder::avcodec_handle);
      h264_decoder::avcodec_handle = NULL;
    }
    
    if(h264_decoder::avfilter_handle)
    {
      dlclose(h264_decoder::avfilter_handle);
      h264_decoder::avfilter_handle = NULL;
    }
    
    if(h264_decoder::avformat_handle)
    {
      dlclose(h264_decoder::avformat_handle);
      h264_decoder::avformat_handle = NULL;
    }

    if(h264_decoder::avutil_handle)
    {
      dlclose(h264_decoder::avutil_handle);
      h264_decoder::avutil_handle = NULL;
    }

    if(h264_decoder::postproc_handle)
    {
      dlclose(h264_decoder::postproc_handle);
      h264_decoder::postproc_handle = NULL;
    }

    if(h264_decoder::swresample_handle)
    {
      dlclose(h264_decoder::swresample_handle);
      h264_decoder::swresample_handle = NULL;
    }

    if(h264_decoder::swscale_handle)
    {
      dlclose(h264_decoder::swscale_handle);
      h264_decoder::swscale_handle = NULL;
    }

    if(h264_decoder::p_H264DecoderInterface)
    {
      delete h264_decoder::p_H264DecoderInterface;
      h264_decoder::p_H264DecoderInterface = NULL;
    }

    return true;
}
 
h264_decoder::h264_decoder()
{
    p_AVCodec = NULL;
    p_AVCodecContext = NULL;
    p_AVFrame = NULL;
 
    isOpened = false;
 
    video_frameRate = 25;
    video_width = 0;
    video_height = 0;
 
    got_frame = 0;
 
    h264_decoder::p_H264DecoderInterface->p_av_init_packet(&avPacket);
 
}
 
h264_decoder::~h264_decoder()
{
    this->close_h264_decoder();
}
 
void h264_decoder::setFrameRate(int frameRate)
{
    this->video_frameRate = frameRate;
}
void h264_decoder::setResolution(int width, int height)
{
    this->video_width = width;
    this->video_height = height;
}
 
bool h264_decoder::open_h264_decoder()
{
    if (isOpened)
    {
        return true;
    }
 
    h264_decoder::p_H264DecoderInterface->p_avcodec_register_all();
 
    p_AVCodec = h264_decoder::p_H264DecoderInterface->p_avcodec_find_decoder(AV_CODEC_ID_H264);
 
    if (NULL != p_AVCodec){
        p_AVCodecContext = h264_decoder::p_H264DecoderInterface->p_avcodec_alloc_context3(p_AVCodec);
 
        if (p_AVCodecContext != NULL){
 
            p_AVCodecContext->time_base.num = 1;
            p_AVCodecContext->time_base.den = video_frameRate;
            p_AVCodecContext->bit_rate = 0;
            p_AVCodecContext->frame_number = 1;
            p_AVCodecContext->codec_type = AVMEDIA_TYPE_VIDEO;
 
            p_AVCodecContext->width = video_width;
            p_AVCodecContext->height = video_height;
 
            if (h264_decoder::p_H264DecoderInterface->p_avcodec_open2(p_AVCodecContext,p_AVCodec,NULL)==0){
                p_AVFrame = h264_decoder::p_H264DecoderInterface->p_avcodec_alloc_frame();
 
                if (p_AVFrame){
                    isOpened = true;
                    return true;
                }else{
                    return false;
                }
 
            }else{
                return false;
            }
 
        }else{
            return false;
        }
    }else{
        return false;
    }
 
}
 
unsigned long h264_decoder::h264_decoder_process(unsigned char* pSrcData, unsigned long srcDataLen, unsigned char* pDstData, bool& bNoOutputDecData)
{
    avPacket.data = pSrcData;
    avPacket.size = srcDataLen;
    avPacket.flags = AV_PKT_FLAG_KEY;
 
    int returnValue = h264_decoder::p_H264DecoderInterface->p_avcodec_decode_video2(p_AVCodecContext,p_AVFrame,&got_frame,&avPacket);
 
    if (returnValue<=0)
    {
        bNoOutputDecData = true;
        return returnValue;
    }
 
    if (got_frame)
    {
        // YUV
        for (int i = 0, nDataLen = 0; i<3; i++)
        {
            int nShift = (i==0)?0:1;
 
            unsigned char* pYUVData = p_AVFrame->data[i];
 
            for (int j=0; j<(p_AVCodecContext->height >> nShift); j++)
            {
                memcpy(pDstData+nDataLen, pYUVData,(p_AVCodecContext->width >> nShift));
                pYUVData += p_AVFrame->linesize[i];
                nDataLen += (p_AVCodecContext->width >> nShift);
            }
        }
 
        bNoOutputDecData = false;
    }else{
        bNoOutputDecData = true;
    }
 
    got_frame = 0;
 
    return returnValue;
}
 
bool h264_decoder::close_h264_decoder()
{
    if (p_AVFrame)
    {
        h264_decoder::p_H264DecoderInterface->p_avcodec_free_frame(&p_AVFrame);
        p_AVFrame = NULL;
    }
 
    if (p_AVCodecContext)
    {
        h264_decoder::p_H264DecoderInterface->p_avcodec_close(p_AVCodecContext);
        h264_decoder::p_H264DecoderInterface->p_av_free(p_AVCodecContext);
        p_AVCodecContext = NULL;
    }
 
    p_AVCodec = NULL;
 
    isOpened = false;
 
    video_frameRate = 25;
    video_width = 0;
    video_height = 0;
 
    got_frame = 0;
 
    h264_decoder::p_H264DecoderInterface->p_av_init_packet(&avPacket);
 
    return true;
}


vim ~/workspace/ffmpeg_android/jni/ffmpeg_main.cpp

以下是ffmpeg_main.cpp的代码:

#include "h264_decoder.h"
 
int main(int argc, char* argv[])
{
    if(!h264_decoder::loadLibrary("/data/ffmpeg/ffmpeg_libs"))
    {
      return -1;
    }
    
    h264_decoder *p_decoder = NULL;
    p_decoder = new h264_decoder;
    if(!p_decoder)
    {
      return -1;
    }
 
    FILE *inputFile = NULL;
    FILE *outputFile = NULL;
 
    inputFile = fopen("h264_640x480.h264","rb");
    if(!inputFile){
      printf("open h264_640x480.h264 fail!\n");
      return -1;
    }
    outputFile = fopen("yuv_640x480.yuv","wb");
    if(!outputFile){
      printf("open yuv_640x480.yuv fail!\n");
      return -1;
    }

 
    unsigned char *inputData;
    unsigned char *outputData;
 
    inputData = (unsigned char *)malloc(640*480*3/2*sizeof(unsigned char));
    outputData = (unsigned char *)malloc(640*480*3/2*sizeof(unsigned char));
 
    unsigned long inputDataSizeArray[10] = {2896,1406,1952,2218,1190,3228,706,2418,2038,1871};
 
    bool bNoOutputDecData = true;
 
    p_decoder->setFrameRate(25);
    p_decoder->setResolution(640,480);

 
    if(!p_decoder->open_h264_decoder())
    {
      printf("open_h264_decoder fail!\n");
      return -1;
    }
 
        printf("start decode ...\n");
 
    for (int i=0; i < 10; i++)
    {
        fread(inputData,1,inputDataSizeArray[i],inputFile);
 
        bNoOutputDecData = true;
        unsigned long res = p_decoder->h264_decoder_process(inputData,inputDataSizeArray[i],outputData,bNoOutputDecData);
 
        if (res>0 && !bNoOutputDecData)
        {
            fwrite(outputData,640*480*3/2,1,outputFile);
        }
    }
 
        printf("end decode ...\n");
 
    p_decoder->close_h264_decoder();
    delete p_decoder;
    p_decoder = NULL;
 
    fclose(inputFile);
    fclose(outputFile);

 
    if (inputData)
    {
        free(inputData);
        inputData = NULL;
    }
 
    if (outputData)
    {
        free(outputData);
        outputData = NULL;
    }

    h264_decoder::unloadLibrary();
 
    return 0;
}


vim ~/workspace/ffmpeg_android/jni/android.mk

以下是android.mk的脚本:

LOCAL_PATH := $(call my-dir)

include $(CLEAR_VARS)

LOCAL_C_INCLUDES := $(LOCAL_PATH)/ffmpeg \
    $(LOCAL_PATH)

LOCAL_SRC_FILES := h264_decoder.cpp \
    ffmpeg_main.cpp

LOCAL_MODULE := ffmpeg_android

include $(BUILD_EXECUTABLE)


编译

cd ~/workspace/ffmpeg_android
ndk-build

出现错误:/opt/android-ndk-r8e/build/gmsl/__gmsl:512: *** non-numeric second argument

解决:将 <uses-sdk android:minSdkVersion="9" /> 添加进文件 AndroidManifest.xml

出现错误:jni/ffmpeg/libavutil/common.h:175:47: error: 'UINT64_C' was not declared in this scope

解决:在 jni/ffmpeg/libavutil/common.h中添加
// add by XXX
#ifndef UINT64_C
#define UINT64_C(value)__CONCAT(value,ULL)
#endif
//-//

最后编译通过,在目录~/workspace/ffmpeg_android/libs/armeabi-v7a/下面会生成android可执行文件ffmpeg_android (实现H264解码的demo程序)


在android设备上运行可执行文件ffmpeg_android

在设备上创建两个目录:

adb shell
su
mkdir /data/ffmpeg

mkdir /data/ffmpeg/ffmpeg_libs
exit
exit

将动态库和可执行文件push进设备对应的目录:

adb push ~/workspace/ffmpeg_android/libs/armeabi-v7a/ffmpeg_android /data/ffmpeg

adb push ~/xbmc-android/tools/android/packaging/xbmc/lib/armeabi-v7a/libavcodec-54-arm.so /data/ffmpeg/ffmpeg_libs

adb push ~/xbmc-android/tools/android/packaging/xbmc/lib/armeabi-v7a/libavfilter-3-arm.so /data/ffmpeg/ffmpeg_libs

adb push ~/xbmc-android/tools/android/packaging/xbmc/lib/armeabi-v7a/libavutil-52-arm.so /data/ffmpeg/ffmpeg_libs

adb push ~/xbmc-android/tools/android/packaging/xbmc/lib/armeabi-v7a/libavformat-54-arm.so /data/ffmpeg/ffmpeg_libs

adb push ~/xbmc-android/tools/android/packaging/xbmc/lib/armeabi-v7a/libswresample-0-arm.so /data/ffmpeg/ffmpeg_libs

adb push ~/xbmc-android/tools/android/packaging/xbmc/lib/armeabi-v7a/libpostproc-52-arm.so /data/ffmpeg/ffmpeg_libs

adb push ~/xbmc-android/tools/android/packaging/xbmc/lib/armeabi-v7a/libswscale-2-arm.so /data/ffmpeg/ffmpeg_libs


将解码测试文件h264_640x480.h264 push进设备

adb push ~/workspace/ffmpeg_android//jni/h264_640x480.h264 /data/ffmpeg


进入设备运行可执行文件:

adb shell
su
cd /data/ffmpeg
./ffmpeg_android


最后在设备的/data/ffmpeg目录下生成解码后的yuv数据文件yuv_640x480.yuv,将yuv_640x480.yuv pull 到PC端

adb pull /data/ffmpeg/yuv_640x480.yuv ~/Desktop/


测试解码后yuv数据

sudo apt-get install mplayer
mplayer -demuxer rawvideo -rawvideo w=640:h=480:format=i420 ~/Desktop/yuv_640x480.yuv -loop 0

注:由于是raw data文件,文件中只包含图像数据,参数需由命令行提供,如width, height和format。
注意format的可选项有:420:yv12 i420
422:yuy2 uyvy
可用如下命令查看mplayer所支持的format
mplayer -rawvideo format=help
-loop 0为循环播放


以上请参考,有什么问题,可以给我留言,谢谢!

你可能感兴趣的:(android,ffmpeg)