四、JNI解码库接口封装

本篇文章来介绍如何封装JNI解码库接口,然后编译出可以直接使用的so文件

编译环境请参考 二、编译ffmpeg

1.在android-ndk-r9d/samples中新建文件夹demo2,在demo2中新建jni文件夹,jni中新建arm文件夹,将之前我们编译好的ffmpeg动态库和头文件复制到arm文件夹中,如下图


2.编写ALog.h,用于在调试过程中打印信息

#include

#define ALOGI(fmt,args...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, fmt, ##args)
#define ALOGD(fmt,args...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, fmt, ##args)
#define ALOGE(fmt,args...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, fmt, ##args)

3.编写H264Decoder.c,在java中定义的包名必须为com.decoder.util,类名为DecH264,方法名和下面代码中的方法名保持一致

#include 
#include 
#include 
#include 

#include 
#include 

#define LOG_TAG "H264Decoder.c"

//解码
struct AVCodecContext   *pAVCodecCtx;
struct AVCodec          *pAVCodec;

//输入数据变量
struct AVPacket mAVPacket;

//输出变量
struct AVFrame *pAVFrame;

//分辨率长宽
int iWidth = 0;
int iHeight = 0;

//初始化
jint Java_com_decoder_util_DecH264_InitDecoder(JNIEnv* env, jobject thiz)
{
    av_register_all();

    pAVCodec = avcodec_find_decoder(CODEC_ID_H264);
    if (pAVCodec == NULL)
    {
        return -1;
    }

    pAVCodecCtx = avcodec_alloc_context3(pAVCodec);
    if (pAVCodecCtx == NULL)
    {
        return -1;
    }

    //设置解码时是否是完整帧
    // if (pAVCodecCtx->capabilites & CODEC_CAP_TRUNCATED)
    // {
    //  pAVCodecCtx->flags |= CODEC_CAP_TRUNCATED;
    // }

    if (avcodec_open2(pAVCodecCtx, pAVCodec, NULL) < 0)
    {
        return -1;
    }

    av_init_packet(&mAVPacket);
    pAVFrame = av_frame_alloc();
    if (pAVFrame == NULL)
    {
        return -1;
    }

    return 0;
}

//释放资源
jint Java_com_decoder_util_DecH264_UnInitDecoder(JNIEnv* env, jobject thiz)
{
    iWidth = 0;
    iHeight = 0;

    if (pAVCodecCtx != NULL)
    {
        avcodec_close(pAVCodecCtx);
        pAVCodecCtx = NULL;
    }

    if (pAVFrame != NULL)
    {       
        av_free(pAVFrame);
        pAVFrame = NULL;
    }

    return 0;
}

//解码函数
jint Java_com_decoder_util_DecH264_DecoderNal(JNIEnv* env, jobject thiz,
    jbyteArray in, jint nalLen, jintArray prama, jbyteArray out)
{
    int len = -1, got_picture = 0;

    jbyte* inbuf = (*env)->GetByteArrayElements(env, in, 0);
    jint* outPrama = (jint*)(*env)->GetIntArrayElements(env, prama, 0);
    jbyte* picture = (*env)->GetByteArrayElements(env, out, 0);

    mAVPacket.data = inbuf;
    mAVPacket.size = nalLen;

    len = avcodec_decode_video2(pAVCodecCtx, pAVFrame, &got_picture, &mAVPacket);
    if (got_picture)
    {   
        //如果在传输的过程中视频的分辨率发生变化  则需要告诉解码后的数据当前帧的分辨率
        if ((pAVCodecCtx->width != iWidth) || (pAVCodecCtx->height != iHeight))
        {
            iWidth = pAVCodecCtx->width;
            iHeight = pAVCodecCtx->height;
        }

        //取视频帧
        int i, j, nshift, nBufPtr = 0;
        uint8_t* yuv_factor;
        for (i = 0; i < 3; i++)
        {
            nshift = (i == 0 ? 0 : 1);
            yuv_factor = pAVFrame->data[i];
            for (j = 0; i < pAVCodecCtx->height>>nshift; j++)
            {
                memcpy((char*)picture + nBufPtr, yuv_factor, pAVCodecCtx->width>>nshift);
                yuv_factor += pAVFrame->linesize[i];
                nBufPtr += pAVCodecCtx->width>>nshift;
            }
        }

        // 保存 W H
        outPrama[2] = pAVCodecCtx->width;
        outPrama[3] = pAVCodecCtx->height;
    }
    else{
        len = 1;
    }

    (*env)->ReleaseByteArrayElements(env, in, inbuf,0);
    (*env)->ReleaseIntArrayElements(env, prama, outPrama,0);
    (*env)->ReleaseByteArrayElements(env, out, picture,0);
    return 0;
}

4.编写Android.mk文件,如果你的文件路径和我不一样,请将LOCAL_SRC_FILES和LOCAL_C_INCLUDES中的路径更换为你自己的路径,否则会编译出错

LOCAL_PATH := $(call my-dir)

include $(CLEAR_VARS)
LOCAL_MODULE := avcodec
LOCAL_SRC_FILES := arm/lib/libavcodec.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := avformat
LOCAL_SRC_FILES := arm/lib/libavformat.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := avutil
LOCAL_SRC_FILES := arm/lib/libavutil.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := avfilter
LOCAL_SRC_FILES := arm/lib/libavfilter.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := swresample
LOCAL_SRC_FILES := arm/lib/libswresample.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_MODULE := swscale
LOCAL_SRC_FILES := arm/lib/libswscale.so
include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)
LOCAL_SRC_FILES := H264Decoder.c    需要编译的源文件
LOCAL_MODULE := H264Decoder-yuv     编译后生成的动态库名称

LOCAL_C_INCLUDES += $(LOCAL_PATH)/arm/include \
        $(LOCAL_PATH)/arm/include/libavcodec \
        $(LOCAL_PATH)/arm/include/libavformat \
        $(LOCAL_PATH)/arm/include/libavutil \
        $(LOCAL_PATH)/arm/include/libavfilter \
        $(LOCAL_PATH)/arm/include/libswresample \
        $(LOCAL_PATH)/arm/include/libswscale \

LOCAL_LDLIBS := -L$(NDK_PLATFORMS_ROOT)/$(TARGET_PLATFORM)/arch-arm/usr \
                -L$(LOCAL_PATH)/arm/lib -lavformat -lavcodec -lavfilter -lavutil -lswscale -lswresample  \
                -llog -lz -ldl

LOCAL_LDLIBS += -lGLESv2
LOCAL_LDLIBS += -lm 

LOCAL_SHARED_LIBRARIES += \
libcutils libutils
include $(BUILD_SHARED_LIBRARY)

5.编写Application.mk文件

# APP_ABI := all
# APP_ABI := armeabi-v7a x86
# APP_ABI := armeabi x86
# APP_ABI := armeabi armeabi-v7a x86
APP_ABI := armeabi

APP_PLATFORM := android-15

6.最后在jni目录下,输入ndk-build,如果没有错误的话,恭喜你,就可以正确编译出libH264Decoder-yuv.so了,编译生成的库在demo2/libs/armeabi中

需要完整代码和库文件请戳下面链接
https://download.csdn.net/download/adolph_lu/10467443

你可能感兴趣的:(四、JNI解码库接口封装)