IjkPlayer初始化过程

背景

最近调研做视频秒开,使用B站开源的ijkplayer作为播放器。ijkplayer基于ffmpeg的播放器。

ijkplayer使用

  1. 创建IjkMediaPlayer对象
  2. 通过setDataSource设置播放路径
  3. 调用prepareAsync让播放器开始工作

JNI_OnLoad

当ijkplayer.so被加载时,会回调到IjkPlayer_jni.c中的JNI_OnLoad中,最主要调用ijkmp_global_init初始化

ijkPlayer播放流程

  1. 在IjkMediaPlayer的构造函数中,
    • 会调用loadLibrariesOnce加载libijkffmpegijksdlijkplayer这三个so
    • 调用native_init打印了一行日志
    • 初始化当前线程Looper所使用的Handler
      • 如果在有Looper的子线程初始化的话,则会在该子线程进行消息循环
      • 如果没有Looper的子线程,则使用主线程进行消息循环
    • 初始化一个Native层的IjkPlayer的引用
 private void initPlayer(IjkLibLoader libLoader) {
        loadLibrariesOnce(libLoader);
        initNativeOnce();
        Looper looper;
        if ((looper = Looper.myLooper()) != null) {
            mEventHandler = new EventHandler(this, looper);
        } else if ((looper = Looper.getMainLooper()) != null) {
            mEventHandler = new EventHandler(this, looper);
        } else {
            mEventHandler = null;
        }
        /*
         * Native setup requires a weak reference to our object. It's easier to
         * create it here than in C++.
         */
        native_setup(new WeakReference(this));
    }
  1. 调用__setDataSource将视频URL传入Native层
  2. 调用__prepareAsync告知Native层开始加载解码
  3. Ijkplayer_jni.c是IjkMediaPlayer对应的C文件,其中setup完成以下事情:
    • 初始化Native的IjkMediaPlayer对象,在ijkmp_create函数中通过ffp_create初始化FFPlayer对象,并且将message_loop赋予该对象
static void IjkMediaPlayer_native_setup(JNIEnv *env, jobject thiz, jobject weak_this)
{
    MPTRACE("%s\n", __func__);
    // 创建Native层的IjkMediaPlayer对象,并且将message_loop对象赋与mp->msg_loop
    IjkMediaPlayer *mp = ijkmp_android_create(message_loop);
    JNI_CHECK_GOTO(mp, env, "java/lang/OutOfMemoryError", "mpjni: native_setup: ijkmp_create() failed", LABEL_RETURN);
    // 通过JNI将上步创建的IjkMediaPlayer对象的指针地址保存到Java层的mNativeMediaPlayer属性中
    // 并且释放旧的IjkMediaPlayer
    jni_set_media_player(env, thiz, mp);
    // 重新生成IjkMediaPlayer的弱引用赋值mp->weak_thiz;
    ijkmp_set_weak_thiz(mp, (*env)->NewGlobalRef(env, weak_this));
    // 设置ffp的inject_opaque为上述弱引用
    ijkmp_set_inject_opaque(mp, ijkmp_get_weak_thiz(mp));
    ijkmp_set_ijkio_inject_opaque(mp, ijkmp_get_weak_thiz(mp));
    ijkmp_android_set_mediacodec_select_callback(mp, mediacodec_select_callback, ijkmp_get_weak_thiz(mp));
LABEL_RETURN:
    ijkmp_dec_ref_p(&mp);
}
  1. setDataSource最终会调用到ijkmp_set_data_source_l
    • 将原来的mp->data_source指针释放
    • 重新将url生成char *赋值给mp->data_source
    • 将ijkmediaplayer对象的状态修改成MP_STATE_INITIALIZED
static int ijkmp_set_data_source_l(IjkMediaPlayer *mp, const char *url)
{
    ...
    freep((void**)&mp->data_source);
    mp->data_source = strdup(url);
    if (!mp->data_source)
        return EIJK_OUT_OF_MEMORY;
    ijkmp_change_state_l(mp, MP_STATE_INITIALIZED);
    return 0;
}
  1. prepareAsync最终也会调用到ijkmp_prepare_async_l
    • 将ijkmediaplayer的状态修改为MP_STATE_ASYNC_PREPARING
    • 初始化消息队列&mp->ffplayer->msg_queue
    • 初始化消息处理线程,线程处理function为ijk_msg_loop
    • 调用ffp_prepare_async_l调用ffmpeg中的方法开始prepare
static int ijkmp_prepare_async_l(IjkMediaPlayer *mp)
{
    ...
    ijkmp_change_state_l(mp, MP_STATE_ASYNC_PREPARING);
    // 向&mp->ffplayer->msg_queue中丢入一个FFP_MSG_FLUSH消息
    // 最终目的只是初始化Native的MessageQueue
    msg_queue_start(&mp->ffplayer->msg_queue);

    // released in msg_loop
    ijkmp_inc_ref(mp);
    // 创建处理消息队列的线程,
    // 向Java层IjkMediaPlayer的postEventFromNative回调
    // 该回调会回调到EventHandler中,而队列中只有FFP_MSG_FLUSH这个消息,这只是一个NOP的测试消息
    mp->msg_thread = SDL_CreateThreadEx(&mp->_msg_thread, ijkmp_msg_loop, mp, "ff_msg_loop");
    // msg_thread is detached inside msg_loop
    // TODO: 9 release weak_thiz if pthread_create() failed;

    int retval = ffp_prepare_async_l(mp->ffplayer, mp->data_source);
    if (retval < 0) {
        ijkmp_change_state_l(mp, MP_STATE_ERROR);
        return retval;
    }

    return 0;
}
  1. ffp_prepare_async_l中真正调用ffmpeg开始准备播放
    • 判断url协议是否为rtmp或者rtsp,如果是则取消timeout参数
    • 如果url长度大于1024,则加入ijklongurl参数
    • 调用stream_open打开视频流,使用FFPlayer播放
int ffp_prepare_async_l(FFPlayer *ffp, const char *file_name)
{
    ...
    if (av_stristart(file_name, "rtmp", NULL) ||
        av_stristart(file_name, "rtsp", NULL)) {
        // There is total different meaning for 'timeout' option in rtmp
        av_log(ffp, AV_LOG_WARNING, "remove 'timeout' option for rtmp.\n");
        av_dict_set(&ffp->format_opts, "timeout", NULL, 0);
    }

    /* there is a length limit in avformat */
    if (strlen(file_name) + 1 > 1024) {
        av_log(ffp, AV_LOG_ERROR, "%s too long url\n", __func__);
        if (avio_find_protocol_name("ijklongurl:")) {
            av_dict_set(&ffp->format_opts, "ijklongurl-url", file_name, 0);
            file_name = "ijklongurl:";
        }
    }
    ...
    av_opt_set_dict(ffp, &ffp->player_opts);
    if (!ffp->aout) {
        ffp->aout = ffpipeline_open_audio_output(ffp->pipeline, ffp);
        if (!ffp->aout)
            return -1;
    }

#if CONFIG_AVFILTER
    if (ffp->vfilter0) {
        GROW_ARRAY(ffp->vfilters_list, ffp->nb_vfilters);
        ffp->vfilters_list[ffp->nb_vfilters - 1] = ffp->vfilter0;
    }
#endif
    //  打开视频流
    VideoState *is = stream_open(ffp, file_name, NULL);
    if (!is) {
        av_log(NULL, AV_LOG_WARNING, "ffp_prepare_async_l: stream_open failed OOM");
        return EIJK_OUT_OF_MEMORY;
    }

    ffp->is = is;
    ffp->input_filename = av_strdup(file_name);
    return 0;
}
  1. stream_open开始打开视频流,其中VideoState也就代表着视频当前的状态,包括帧,数据,解码器等等
    • 初始化帧队列:&is->pictq&is->subpq&is->sampq,Queue大小为16
    • 初始化数据包队列:&is->videoq&is->audioq&is->subtitleq
    • 初始化时针:&is->vidclk&is->audclk&is->extclk,代表当前视频video,audio的时刻
    • 创建视频刷新的线程is->video_refresh_tid
    • 创建视频读取线程is->read_tid
    • 调用decoder_init初始化视频解码器
static VideoState *stream_open(FFPlayer *ffp, const char *filename, AVInputFormat *iformat)
{
    VideoState *is;  //  视频内容以及状态
    ...
    /* start video display */
    if (frame_queue_init(&is->pictq, &is->videoq, ffp->pictq_size, 1) < 0)
        goto fail;
    if (frame_queue_init(&is->subpq, &is->subtitleq, SUBPICTURE_QUEUE_SIZE, 0) < 0)
        goto fail;
    if (frame_queue_init(&is->sampq, &is->audioq, SAMPLE_QUEUE_SIZE, 1) < 0)
        goto fail;

    if (packet_queue_init(&is->videoq) < 0 ||
        packet_queue_init(&is->audioq) < 0 ||
        packet_queue_init(&is->subtitleq) < 0)
        goto fail;

    if (!(is->continue_read_thread = SDL_CreateCond())) {
        av_log(NULL, AV_LOG_FATAL, "SDL_CreateCond(): %s\n", SDL_GetError());
        goto fail;
    }

    if (!(is->video_accurate_seek_cond = SDL_CreateCond())) {
        av_log(NULL, AV_LOG_FATAL, "SDL_CreateCond(): %s\n", SDL_GetError());
        ffp->enable_accurate_seek = 0;
    }

    if (!(is->audio_accurate_seek_cond = SDL_CreateCond())) {
        av_log(NULL, AV_LOG_FATAL, "SDL_CreateCond(): %s\n", SDL_GetError());
        ffp->enable_accurate_seek = 0;
    }

    init_clock(&is->vidclk, &is->videoq.serial);
    init_clock(&is->audclk, &is->audioq.serial);
    init_clock(&is->extclk, &is->extclk.serial);
    is->audio_clock_serial = -1;
    ...
    is->video_refresh_tid = SDL_CreateThreadEx(&is->_video_refresh_tid, video_refresh_thread, ffp, "ff_vout");
    if (!is->video_refresh_tid) {
        av_freep(&ffp->is);
        return NULL;
    }

    is->initialized_decoder = 0;
    is->read_tid = SDL_CreateThreadEx(&is->_read_tid, read_thread, ffp, "ff_read");
    if (!is->read_tid) {
        av_log(NULL, AV_LOG_FATAL, "SDL_CreateThread(): %s\n", SDL_GetError());
        goto fail;
    }

    if (ffp->async_init_decoder && !ffp->video_disable && ffp->video_mime_type && strlen(ffp->video_mime_type) > 0
                    && ffp->mediacodec_default_name && strlen(ffp->mediacodec_default_name) > 0) {
        if (ffp->mediacodec_all_videos || ffp->mediacodec_avc || ffp->mediacodec_hevc || ffp->mediacodec_mpeg2) {
            decoder_init(&is->viddec, NULL, &is->videoq, is->continue_read_thread);
            ffp->node_vdec = ffpipeline_init_video_decoder(ffp->pipeline, ffp);
        }
    }
    is->initialized_decoder = 1;

    return is;
fail:
    is->initialized_decoder = 1;
    is->abort_request = true;
    if (is->video_refresh_tid)
        SDL_WaitThread(is->video_refresh_tid, NULL);
    stream_close(ffp);
    return NULL;
}

FFMpeg模块分布

encode/decode模块

  • 用于音视频的编码和解码,存放在libavcodec子目录中

muxer/demuxer模块

  • 用于音频和视频的合并与分离(也称混合器模块),存放在libavformat目录中

内存等常用模块

  • 存放于libavutil目录中

总结

  1. IjkPlayer在Java层初始化主线程/当前线程的EventHandler用于处理从Native层回调的消息
  2. 在Native层初始化IjkMediaPlayer对象
    • message_loop函数指针赋值,以指定Native层的消息
    • 通过ffp_create创建FFPlayer对象
    • 初始化IjkMediaPlayer中的Mutex,以及ref_count自增
    • 创建SDK_Vout用于图形渲染
    • 根据平台特性创建各平台的IJKFF_Pipeline,PipeLine中包括了视频解码、音频输出等功能
  3. 将NativeMediaPlayer的指针地址赋值给Java层的mNativeMediaPlayer
  4. 当调用prepareAsync时,Native层会从Java层获取之前保存的mNativeMediaPlayer指针地址来获取Native层的IjkMediaPlayer
  5. 初始化&mp->ffplayer->msg_queue
  6. 启动消息线程&mp->_msg_thread以及消息循环函数ijkmp_msg_loop

参考资料

ijkplayer框架深入剖析
在线短视频秒播优化之视频文件格式之MP4文件Moov box的位置
FFmpeg学习1:视频解码
MP4文件格式的解析,以及MP4文件的分割算法
ijkplayer整理笔记(三)——AVFormatContext类图详解类图详解

你可能感兴趣的:(IjkPlayer初始化过程)