GPUImage与ffmpeg整合

bool VideoPlayer::DecodeVideoPacket(AVPacket* packet, JNIEnv* env, bool updateTime)
{
	int frameFinished = 0;
	AVCodecContext* codec_ctx = m_stream_ctx[m_video_stream].codec_ctx;
	AVStream *stream = m_fmt_ctx->streams[m_stream_ctx[m_video_stream].stream_number];

	int decodeByte = avcodec_decode_video2(codec_ctx, m_video_frame, &frameFinished, packet);
	if (decodeByte <= 0) return false;

	if (!frameFinished) return true; // wait for next frame.

	bool ret = WaitForFrame(stream, m_video_frame, m_video_stream);
	if (!ret) return ret;

	if (updateTime) UpdateTime(env, false);

	m_buffer_manager.RenderFrame(m_video_frame, env);

	env->CallVoidMethod(m_obj, m_notify_bitmap_ready_method);
	return ret;
}

bool BufferManager::SetBitmap(JNIEnv* env, jobject bitmap)
{
    if (env == NULL || bitmap == NULL) return false;

    if (m_bitmap == bitmap) return true;

    m_bitmap = bitmap;
    m_window_changed = true;

    int ret;
    if ((ret = AndroidBitmap_getInfo(env, bitmap, &m_bitmap_info)) < 0)
    {
        LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret);
        return false;
    }
      
    if ((ret = AndroidBitmap_lockPixels(env, bitmap, &m_bitmap_pixels)) < 0)
    {
        LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
        return false;
    }

    return true;
}

bool BufferManager::RenderFrame(AVFrame *frame, JNIEnv* env)
{
	bool ret = true;
	if (!SetupEnvironment()) return false;

    if (frame == NULL || m_bitmap_pixels == NULL) return false;

    sws_scale( m_video_sws,
               (uint8_t const * const *)frame->data,
               frame->linesize,
               0, 
               m_bitmap_info.height,
               (uint8_t*const*) &m_bitmap_pixels, 
               (const int *)&(m_bitmap_info.stride));

	return ret;
}
两个模块锁定同一个bitmap对象,ffmepg更新一帧数据到bitmap后,通过jni反射到GPUImage,驱动GPUImage的egl render流程

你可能感兴趣的:(GPUImage与ffmpeg整合)