FFmpeg3最新的视频解码avcodec_send_packet和avcodec_receive_frame

#include 
#include 
#include 
#include 
#include 
#include 
#include 
//#include 
#define LOGI(FORMAT, ...) __android_log_print(ANDROID_LOG_INFO,"lost520",FORMAT,##__VA_ARGS__);

AVFrame *pFrameRGBA;
AVFrame *pFrame;
AVPacket packet;
ANativeWindow *nativeWindow;
ANativeWindow_Buffer windowBuffer;

AVCodec *codec = NULL;
AVFormatContext *pFormatCtx;
AVCodecContext *codec_ctx = NULL;
jboolean isReceived = JNI_FALSE;

uint8_t *v_out_buffer;
int width;
int height;
struct SwsContext *pImgConvertCtx;

//注册
JNIEXPORT jint JNICALL
Java_com_sharetronic_ffmpegvideo_FFmpeg_register(JNIEnv *env, jclass clazz, jobject surface) {
    av_register_all();

    pFormatCtx = avformat_alloc_context();
    if (pFormatCtx == NULL) {
        LOGI("avformat_alloc_context failed!");
        return -1;
    }
    codec = avcodec_find_decoder(AV_CODEC_ID_H264);
    if (!codec) {
        LOGI("Codec not found");
        return -1;
    }
    codec_ctx = avcodec_alloc_context3(codec);
    if (!codec_ctx) {
        LOGI("Could not allocate video codec context");
        return -1;
    }
    if (avcodec_open2(codec_ctx, codec, NULL) < 0) {
        LOGI("Could not open codec");
        return -1;
    }
    pFrame = av_frame_alloc();
    pFrameRGBA = av_frame_alloc();
    if (pFrame == NULL) {
        LOGI("av_frame_alloc fail");
        return -1;
    }
    nativeWindow = ANativeWindow_fromSurface(env, surface);

    av_init_packet(&packet);
    LOGI("____________________________succ1_________________________");
    return 0;

}

//解码视频
JNIEXPORT jint JNICALL
Java_com_sharetronic_ffmpegvideo_FFmpeg_decoder(JNIEnv *env, jclass clazz, jbyteArray data,
                                                jint size) {
    jbyte *framedata = (*env)->GetByteArrayElements(env, data, NULL);
    packet.data = framedata;
    packet.size = size;

    int ret = avcodec_send_packet(codec_ctx, &packet);
    if (ret != 0) {
        LOGI("decoder fail");
        return -1;
    }

    if (!isReceived) {
        width = codec_ctx->width;
        height = codec_ctx->height;
        ANativeWindow_setBuffersGeometry(nativeWindow, width, height, WINDOW_FORMAT_RGBA_8888);
        int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGBA, width, height, 1);
        v_out_buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
    av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, v_out_buffer, AV_PIX_FMT_RGBA,
                         width, height, 1);
    LOGI("pix_fmtt%d", codec_ctx->pix_fmt);

    pImgConvertCtx = sws_getContext(codec_ctx->width,             //原始宽度
                                    codec_ctx->height,            //原始高度
                                    codec_ctx->pix_fmt,           //原始格式
                                    codec_ctx->width,             //目标宽度
                                    codec_ctx->height,            //目标高度
                                    AV_PIX_FMT_RGBA,               //目标格式
                                    SWS_BILINEAR,                    //选择哪种方式来进行尺寸的改变,关于这个参数,可以参考:http://www.cnblogs.com/mmix2009/p/3585524.html
                                    NULL,
                                    NULL,
                                    NULL);
    isReceived = JNI_TRUE;
    }

    if (ret == 0) {
        ret = avcodec_receive_frame(codec_ctx, pFrame);
        switch (ret) {
            case 0:
                LOGI("got a frame !")
                ANativeWindow_lock(nativeWindow, &windowBuffer, 0);

                // 格式转换
                sws_scale(pImgConvertCtx, (uint8_t const *const *) pFrame->data,
                          pFrame->linesize, 0, codec_ctx->height,
                          pFrameRGBA->data, pFrameRGBA->linesize);

                // 获取stride
                uint8_t *dst = windowBuffer.bits;
                int dstStride = windowBuffer.stride * 4;
                uint8_t *src = (uint8_t *) (pFrameRGBA->data[0]);
                int srcStride = pFrameRGBA->linesize[0];

                // 由于window的stride和帧的stride不同,因此需要逐行复制
                int h;
                LOGI("height: %d", height );
                for (h = 0; h < height; h++) {
                    memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
                }
                ANativeWindow_unlockAndPost(nativeWindow);
                break;
//            case AVERROR_EOF:
//                LOGI("the decoder has been fully flushed,and there will be no more output frames.");
//                break;
//            case AVERROR(EAGAIN):
//                LOGI("Resource temporarily unavailable");
//                break;
//            case AVERROR(EINVAL):
//                LOGI("Invalid argument");
//                break;
//            default:
//                break;
        }
//        av_packet_unref(&packet);
    }
    return 0;
}


//释放资源
JNIEXPORT jint JNICALL Java_com_sharetronic_ffmpegvideo_FFmpeg_free(JNIEnv *env, jclass clazz) {

}

 

参考于:

https://blog.csdn.net/ywl5320/article/details/75136986

https://blog.csdn.net/boonya/article/details/79474754

你可能感兴趣的:(Android)