android使用Ffmpeg JNI实时播放RTSP、RTMP等视频(主码流,子码流均能流畅播放)

前言:最近 公司项目需要在电视上 播放摄像头视频,而且可以随时切换流,延时要求在500ms以内,网上试过了 各种开源库 ,都不能达到要求。于是自己找到公司C++开发人员请教,最后终于完成,在此记录,也给有需求的小伙伴提供方案。

第一步,添加ffmpeg的so库,网上有很多版本,后面我会在demo里面提供,然后这是我的CmakeList文件:

 
# For more information about using CMake with Android Studio, read the
# documentation: https://d.android.com/studio/projects/add-native-code.html

# Sets the minimum version of CMake required to build the native library.

cmake_minimum_required(VERSION 3.4.1)

set(pathToProject F:/androidProject/DuokuWelcome)

# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.

add_library( # Sets the name of the library.
             native-lib

             # Sets the library as a shared library.
             SHARED

             # Provides a relative path to your source file(s).
             src/main/cpp/native-lib.cpp )



add_library(avcodec-57 SHARED IMPORTED)
set_target_properties(avcodec-57 PROPERTIES
                    IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libavcodec-57.so)

add_library(avdevice-57 SHARED IMPORTED)
set_target_properties(avdevice-57 PROPERTIES
                    IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libavdevice-57.so)

add_library(avfilter-6 SHARED IMPORTED)
set_target_properties(avfilter-6 PROPERTIES
                    IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libavfilter-6.so)

add_library(avformat-57 SHARED IMPORTED)
set_target_properties(avformat-57 PROPERTIES
                    IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libavformat-57.so)

add_library(avutil-55 SHARED IMPORTED)
set_target_properties(avutil-55 PROPERTIES
                    IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libavutil-55.so)

add_library(gperf SHARED IMPORTED)
set_target_properties(gperf PROPERTIES
                    IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libgperf.so)

add_library(swresample-2 SHARED IMPORTED)
set_target_properties(swresample-2 PROPERTIES
                    IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libswresample-2.so)

add_library(swscale-4 SHARED IMPORTED)
set_target_properties(swscale-4 PROPERTIES
                    IMPORTED_LOCATION ${CMAKE_SOURCE_DIR}/src/main/jniLibs/${ANDROID_ABI}/libswscale-4.so)

include_directories(${pathToProject}/app/src/main/cpp/include/)

# Searches for a specified prebuilt library and stores the path as a
# variable. Because CMake includes system libraries in the search path by
# default, you only need to specify the name of the public NDK library
# you want to add. CMake verifies that the library exists before
# completing its build.

find_library( # Sets the name of the path variable.
              log-lib

              # Specifies the name of the NDK library that
              # you want CMake to locate.
              log )

# Specifies libraries CMake should link to your target library. You
# can link multiple libraries, such as libraries you define in this
# build script, prebuilt third-party libraries, or system libraries.

target_link_libraries( # Specifies the target library.
                       native-lib

                       android
                       OpenSLES
                       jnigraphics
                       avformat-57
                       avdevice-57
                       avcodec-57
                       avutil-55
                       avfilter-6
                       gperf
                       swresample-2
                       swscale-4
                       ${log-lib})

头文件可以放到你想放置的位置,这里需要用到ANativeWindow,用于openGl渲染画面。然后native-cpp文件也比较简单:

 

#include 
#include 
#include 
#include 
#include 
#include 

using namespace cv;

extern "C" {
#include 
#include 
#include 
#include 
#include 
#include 
#include 

#include "libswresample/swresample.h"
#include "libavutil/opt.h"
#include "libavutil/imgutils.h"

}


extern "C"
JNIEXPORT jstring

JNICALL
Java_com_nazhi_testlive555_MainActivity_stringFromJNI(
        JNIEnv *env,
        jobject /* this */) {
    std::string hello = "Hello from C++";

    return env->NewStringUTF(hello.c_str());
}

static AVPacket *pPacket;
static AVFrame *pAvFrame, *pFrameBGR;
static AVCodecContext *pCodecCtx;
struct SwsContext *pImgConvertCtx;
static AVFormatContext *pFormatCtx;
ANativeWindow* nativeWindow;
ANativeWindow_Buffer windowBuffer;
uint8_t *v_out_buffer;
bool stop;

extern "C"
JNIEXPORT jint JNICALL
Java_com_nazhi_testlive555_FfmpegUtils_openVideo(JNIEnv *env, jclass type, jstring url,
                                                 jobject surface) {
    stop = false;
    // 初始化
    #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(55, 28, 1)
    #define av_frame_alloc  avcodec_alloc_frame
    #endif

    pAvFrame = av_frame_alloc();
    pFrameBGR = av_frame_alloc();

    char input_str[500]={0};
    sprintf(input_str, "%s", env->GetStringUTFChars(url, NULL));
    nativeWindow = ANativeWindow_fromSurface(env, surface);
    if (0 == nativeWindow){
        return -1;
    }

    //初始化
    avcodec_register_all();
    av_register_all();         //注册库中所有可用的文件格式和编码器
    avformat_network_init();
    avdevice_register_all();

    pFormatCtx = avformat_alloc_context();
    if (avformat_open_input(&pFormatCtx, input_str, NULL, NULL) < 0)
        return 1;
    avformat_find_stream_info(pFormatCtx, NULL);

    int videoIndex = -1;
    for (unsigned int i = 0; i < pFormatCtx->nb_streams; i++) //遍历各个流,找到第一个视频流,并记录该流的编码信息
    {
        if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
            videoIndex = i;                                     //这里获取到的videoindex的结果为1.
            break;
        }
    }
    pCodecCtx = pFormatCtx->streams[videoIndex]->codec;
    AVCodec *pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
    avcodec_open2(pCodecCtx, pCodec, NULL);

    int width = pCodecCtx->width;
    int height = pCodecCtx->height;

    int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGBA, width, height, 1);
    v_out_buffer = (uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
    av_image_fill_arrays(pFrameBGR->data, pFrameBGR->linesize, v_out_buffer, AV_PIX_FMT_RGBA, width, height, 1);
    pImgConvertCtx = sws_getContext(
            pCodecCtx->width,             //原始宽度
            pCodecCtx->height,            //原始高度
            pCodecCtx->pix_fmt,           //原始格式
            pCodecCtx->width,             //目标宽度
            pCodecCtx->height,            //目标高度
            AV_PIX_FMT_RGBA,               //目标格式
            SWS_BICUBIC,                    //选择哪种方式来进行尺寸的改变,关于这个参数,可以参考:http://www.cnblogs.com/mmix2009/p/3585524.html
            NULL,
            NULL,
            NULL);
    if (0 > ANativeWindow_setBuffersGeometry(nativeWindow,width,height,WINDOW_FORMAT_RGBA_8888)){
        ANativeWindow_release(nativeWindow);
        return -1;
    }

//    int avPicSize = avpicture_get_size(AV_PIX_FMT_BGR24, pCodecCtx->width, pCodecCtx->height);
//    uint8_t *pPicBuffer = (uint8_t *) av_malloc(avPicSize);
//    avpicture_fill((AVPicture *) pFrameBGR, pPicBuffer, AV_PIX_FMT_BGR24, pCodecCtx->width,
//                   pCodecCtx->height);

    pPacket = (AVPacket*)av_malloc(sizeof(AVPacket));
    // 读取数据包
    int count = 0;
    while (!stop) {
        if (av_read_frame(pFormatCtx, pPacket) >= 0) {
            if ((pPacket)->stream_index != videoIndex) {
                // 包不对,不解码
                continue;
            }
            //解码
            int gotPicCount = 0;

            int code = avcodec_decode_video2(pCodecCtx, pAvFrame, &gotPicCount, pPacket);
            if (gotPicCount != 0) {
                count++;
                sws_scale(
                        pImgConvertCtx,
                        (const uint8_t *const *) pAvFrame->data,
                        pAvFrame->linesize,
                        0,
                        pCodecCtx->height,
                        pFrameBGR->data,
                        pFrameBGR->linesize);
                //cv::Mat tmpFrame(cv::Size(pCodecCtx->width, pCodecCtx->height), CV_8UC3);//转换为openCv的mat类型

                if (ANativeWindow_lock(nativeWindow, &windowBuffer, NULL) < 0) {

                } else {
                    uint8_t *dst = (uint8_t *) windowBuffer.bits;
                    for (int h = 0; h < height; h++) {
                        memcpy(dst + h * windowBuffer.stride * 4,
                               v_out_buffer + h * pFrameBGR->linesize[0],
                               pFrameBGR->linesize[0]);
                    }
                    ANativeWindow_unlockAndPost(nativeWindow);
                }
            }
        }
        av_packet_unref(pPacket);
    }
    sws_freeContext(pImgConvertCtx);
    av_free(pPacket);
    av_free(pFrameBGR);
    avcodec_close(pCodecCtx);
    avformat_close_input(&pFormatCtx);
    return 1;
}extern "C"
JNIEXPORT void JNICALL
Java_com_nazhi_testlive555_FfmpegUtils_stop(JNIEnv *env, jclass type) {
    stop = true;
}

 

 

这里需要注意的是 Ffmpeg的头文件需要包含在extern “C”内部。流程注释写的很清楚,主要就是初始化,然后通过

 

avformat_open_input()

函数打开视频地址,这个地址可以是RTSP,RTMP或者本地视频。打开成功返回0,错误返回负数,

av_read_frame()

是个阻塞函数,会一直读取视频帧数据,需要注意的是,代码中注释的cv:Mat,是可以将视频帧数据转化为openCv的Mat类型。后面就是用AnativeWindow渲染了。这里有个全局变量stop是用来控制切换摄像头流的。

最后是java代码 布局只有一个Button和一个Framelayout

package com.nazhi.testlive555

import android.annotation.SuppressLint
import android.app.Activity
import android.support.v7.app.AppCompatActivity
import android.os.Bundle
import android.os.Handler
import android.os.Message
import android.util.Log
import kotlinx.android.synthetic.main.activity_main.*
import android.view.*
import android.widget.FrameLayout
import java.lang.ref.WeakReference


class MainActivity : AppCompatActivity() {

    private val mhand = MyHandler(this@MainActivity)

    private var canSelectCamera = false//控制是否能选择摄像头

    @SuppressLint("HandlerLeak")
    class MyHandler constructor(activity: AppCompatActivity) : Handler() {
        private var mActivity: WeakReference = WeakReference(activity)

        override fun handleMessage(msg: Message) {
            super.handleMessage(msg)
            when (msg.what) {
//            showCamera -> {
//            }
                else -> {
                }
            }//
        }
    }

    var i = 0

    override fun onCreate(savedInstanceState: Bundle?) {
        super.onCreate(savedInstanceState)
        window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON)
        window.addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN)
        setContentView(R.layout.activity_main)
        frame.removeAllViews()
        val surfaceView = SurfaceView(this)
        val lp = FrameLayout.LayoutParams(
                ViewGroup.LayoutParams.MATCH_PARENT,
                ViewGroup.LayoutParams.MATCH_PARENT)
        lp.gravity = Gravity.LEFT or Gravity.TOP
        surfaceView.layoutParams = lp
        frame.addView(surfaceView)
        val holder = surfaceView.holder
        val inputurl = "rtsp://admin:[email protected]:554/h265/ch1/main/av_stream"
        holder.addCallback(object : SurfaceHolder.Callback {
            override fun surfaceCreated(holder: SurfaceHolder) {
                Thread(Runnable {
                    i = FfmpegUtils.openVideo(inputurl, holder.surface)
                }).start()
                Log.e("i", "i" + i)
                canSelectCamera = true
                Log.e("MainActivity", "canSelectCamera = true")
            }

            override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) {}
            override fun surfaceDestroyed(holder: SurfaceHolder) {}
        })

        btn.setOnClickListener {
            mhand.postDelayed(runnable, 50)
        }


    }

    var inputurl: String = ""
    var isone = false

    var runnable: Runnable = object : Runnable {
        override fun run() {
            if (i == 1) {
                Log.e("MainActivity", "i == 1")
                frame.removeAllViews()
                val surfaceView = SurfaceView(this@MainActivity)

                val lp = FrameLayout.LayoutParams(
                        ViewGroup.LayoutParams.MATCH_PARENT,
                        ViewGroup.LayoutParams.MATCH_PARENT)
                lp.gravity = Gravity.LEFT or Gravity.TOP
                surfaceView.layoutParams = lp
                frame.addView(surfaceView)
                val holder = surfaceView.holder
                holder.addCallback(object : SurfaceHolder.Callback {
                    override fun surfaceCreated(holder: SurfaceHolder) {
                        Thread(Runnable {
                            Log.e("MainActivity", inputurl)
                            if (isone) {
                                inputurl = "rtsp1"     
                            } else {
                                inputurl = "rtsp2"
                            }
                            i = FfmpegUtils.openVideo(inputurl, holder.surface)
                        }).start()
                        Log.e("i", "i" + i)
                        i = 0
                    }

                    override fun surfaceChanged(holder: SurfaceHolder, format: Int, width: Int, height: Int) {}
                    override fun surfaceDestroyed(holder: SurfaceHolder) {}
                })
                canSelectCamera = true
            } else {
                canSelectCamera = false
                Log.e("MainActivity", "i != 1")
                mhand.postDelayed(this, 50)
            }
        }
    }

}

调用stop后一直检测知道openVideo返回1,内存成功释放后再重新打开。

至此,整个app就完成了。想深入的话还可以对图像的Mat进行人脸检测和图像处理。

硬解码和openGl ES渲染版本请移步  Android Ffmpeg硬解码、openGL渲染

你可能感兴趣的:(移动应用)