FFmpeg 播放rtsp流视频

FFmpeg

ffmpeg是较成熟的一款音视频开源库,由于公司有音视频这块的开发需求,所以本人就对FFmpeg的粗浅了解做一个学习记录.

ffmpeg 播放rtsp流视频不含音频要经过如下几个步骤,打开流查找视频流创建解码器打开解码器循环读取数据解码数据为图像数据播放

流程如下:

Created with Raphaël 2.2.0 读取视频文件 avformat_open_input(...) 查找视频信息 avformat_find_stream_info(...) 查找解码器 avcodec_find_decode(...) 打开解码器 avcodec_open2(...) 读视频帧 av_read_frame(...) 解码视频帧 avcodec_decode_video2(...) 像素转换 sws_scale(...) 显示视频 ANativeWindow_lock(...) ANativeWindow_unlockAndPost(...) End

函数调用过程中返回的错误代码可用如下代码解析出来:

char temp[30] = {0};
av_strerror(ret,temp,30);

视频播放代码:

 //-------------------------相关变量初始化-------------------//
    int ret = 0,i = 0;
    int videoStream = -1;
    AVPacket *pPacket;
    AVFrame *pFrame;
    int got_picture;
    mFlagStop = false;
    mFlayPause = false;
    //-----------------------------必须有,初始化工作-------------//
    av_register_all();
    avformat_network_init();

    LOGD("inputUrl = %s",inputUrl);
    m_iformatCtx = avformat_alloc_context();
    ret = avformat_open_input(&m_iformatCtx,inputUrl,NULL,NULL);
    if(ret <0){
        char temp[30] = {0};
        av_strerror(ret,temp,30);
        LOGE("open inpput fail! %d - %s",ret,temp);
        return;
    }
    ret = avformat_find_stream_info(m_iformatCtx,NULL);
    if(ret<0){
        LOGE("Couldn't find stream info !");
        goto error;
    }
    videoStream = -1;
    for(i = 0;inb_streams;i++){
        if(m_iformatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO){
            videoStream = i;
            break;
        }
    }
    if(videoStream == -1){
        LOGE("Didn't find video stream !");
        goto error;
    }
    m_icondecCtx = m_iformatCtx->streams[videoStream]->codec;
    //-------------------------------查找解码器--------------------------//
    m_icodec = avcodec_find_decoder(m_icondecCtx->codec_id);
    if(!m_icodec){
        LOGE("Codec not find !");
        goto error;
    }
    //-------------------------------打开解码器--------------------------//
    ret = avcodec_open2(m_icondecCtx,m_icodec,NULL);
    if(ret <0){
        LOGE("codec open fail!");
        goto error;
    }

    if ((m_icondecCtx->width > 0) && (m_icondecCtx->height > 0)) {
        set_BuffersGeometry(m_icondecCtx->width,
                            m_icondecCtx->height);
    }
    pPacket = av_packet_alloc();
    av_init_packet(pPacket);
    pFrame = av_frame_alloc();
    while(!mFlagStop){
        ret = av_read_frame(m_iformatCtx,pPacket);
        if(ret <0){
            LOGE("read error!");
            break;
        }
        ret = avcodec_decode_video2(m_icondecCtx,pFrame,&got_picture,pPacket);
        if(got_picture == 1){
            uint8_t *dst_data[8];
            int dst_linesize[8];
            av_image_alloc(dst_data,dst_linesize,m_icondecCtx->width,m_icondecCtx->height,
                           AV_PIX_FMT_RGB565LE,16);
            img_convert(dst_data,dst_linesize, AV_PIX_FMT_RGB565LE, (AVPicture *) pFrame,
                        m_icondecCtx->pix_fmt,
                        m_icondecCtx->width,
                        m_icondecCtx->height);
            render_surface(dst_data[0]);
            av_freep(dst_data);
        }
    }
    error:
    if(m_icondecCtx){
        avcodec_close(m_icondecCtx);
        m_icondecCtx = NULL;
    }
    if(m_iformatCtx){
        avformat_close_input(&m_iformatCtx);
        m_iformatCtx = NULL;
    }
    avformat_network_deinit();

render_surface方法是视频渲染的代码,代码如下

/**
 * 渲染surface
 * @param pixel
 */
void render_surface(uint8_t *pixel) {
    LOGV("MonkeyDemo:  renderSurface");
    if (mFlayPause) {
        return;
    }

    ANativeWindow_acquire(mANativeWindow);
//    LOGV("开始渲染");
    if (0 != ANativeWindow_lock(mANativeWindow, &nwBuffer, NULL)) {
        LOGV("ANativeWindow_lock() error");
        return;
    }
    //LOGV("renderSurface, %d, %d, %d", nwBuffer.width ,nwBuffer.height, nwBuffer.stride);
    if (nwBuffer.width >= nwBuffer.stride) {
        //srand(time(NULL));
        //memset(piexels, rand() % 100, nwBuffer.width * nwBuffer.height * 2);
        //memcpy(nwBuffer.bits, piexels, nwBuffer.width * nwBuffer.height * 2);
        memcpy(nwBuffer.bits, pixel, nwBuffer.width * nwBuffer.height * 2);
    } else {
//        LOGV("new buffer width is %d,height is %d ,stride is %d",
//             nwBuffer.width, nwBuffer.height, nwBuffer.stride);
        int i;
        for (i = 0; i < nwBuffer.height; ++i) {
            memcpy((void*) ((uintptr_t) nwBuffer.bits + nwBuffer.stride * i * 2),
                   (void*) ((uintptr_t) pixel + nwBuffer.width * i * 2),
                   nwBuffer.width * 2);
        }
    }

    if (0 != ANativeWindow_unlockAndPost(mANativeWindow)) {
        LOGV("ANativeWindow_unlockAndPost error");
        return;
    }

    ANativeWindow_release(mANativeWindow);
}

img_convert是格式转换的方法,将AVFrame结构体对应的图像数据转换为指定格式的图像数据

/**
 * 格式转换
 * @param dst_data
 * @param dst_linesize
 * @param dst_pix_fmt
 * @param src
 * @param src_pix_fmt
 * @param src_width
 * @param src_height
 * @return
 */
int img_convert(uint8_t *dst_data[8],int dst_linesize[8], int dst_pix_fmt, const AVPicture *src,
                int src_pix_fmt, int src_width, int src_height) {
    int w;
    int h;
    struct SwsContext *pSwsCtx;

    w = src_width;
    h = src_height;

    pSwsCtx = sws_getContext(w, h, (AVPixelFormat) src_pix_fmt, w, h,
                             (AVPixelFormat) dst_pix_fmt, SWS_BICUBIC, NULL, NULL, NULL);
    sws_scale(pSwsCtx, (const uint8_t *const *) src->data, src->linesize, 0, h,
              dst_data, dst_linesize);
    sws_freeContext(pSwsCtx);
    return 0;
}

源码地址

你可能感兴趣的:(FFmpeg)