Android Ffmpeg入门实践

Android平台上编译并使用FFmpeg播放音视频

  • FFmpeg的编译

 1. 在Mac OS10.15.0上编译FFmpeg-3.4.8版本

FFmpeg3.4.7版本的下载地址http://www.ffmpeg.org/download.html#build-mac

NDK16rd的下载地址:

链接:https://pan.baidu.com/s/1ZQhyeOWf6U_UUx6usYdKcg  密码:d0xa

 2.准备编译FFmpeg的环境

   2.1安装homebrew

       打开终端,输入ruby -e "$(curl -fsSkL raw.github.com/mxcl/homebrew/go)"

   2.2安装以下的FFmpeg编译时依赖包

       brew install automake

       brew install celt

       brew install faac

brew install fdk-aac

       brew install lame

       brew install libass

       brew install libvorbis

       brew install libvpx

brew install libvo-aacenc

brew install opencore-amr

brew install openjpeg

brew install opus

brew install sdl2

brew install schroedinger

brew install shtool

brew install speex

brew install texi2html

brew install theora

brew install wget

brew install x264

brew install x265

brew install xvid

brew install yasm

2.3安装libaacplus

       wget http://tipok.org.ua/downloads/media/aacplus/libaacplus/libaacplus-2.0.2.tar.gz

tar xzf libaacplus-2.0.2.tar.gz

cd libaacplus-2.0.2

# libtool on osx is quite different from the gnu libtool, which is called glibtool on osx

sed -i '.bck' -e 's/libtool/glibtool/' autogen.sh

./autogen.sh

make && make install

 

2.4 libtool的安装

curl -OL http://ftpmirror.gnu.org/libtool/libtool-2.4.2.tar.gz

tar -xzf libtool-2.4.2.tar.gz

cd libtool-2.4.2

./configure && make && sudo make install

 

所有准备工作已做完,接下来就是编译FFmpeg

3.编译FFmpeg

   网络上的编译总是会连带着一大堆参数,这些参数会增加编译失败的风险,因此,我在实践的过程中并没有使用这些参数,而是直接进入到FFmpeg-3.4.7文件夹下

命令:

 cd FFmpeg-3.4.7

./configure

make && make install

如果编译失败,那么请根据提示安装缺少的环境,这个过可能比较烦人,需要些耐心,如果编译成功,文件夹里面会多出如下的几个命令文件

Android Ffmpeg入门实践_第1张图片

 

此时,可以使用FFmpeg的命令对视频进行操作,如视频格式转换,添加水印,打印视频Meta信息等等

有如下3个命令:ffmpeg  ffplay  ffprobe,这里不做展开,有兴趣的同学自行google

 

二、android环境下的FFmpeg编译(本人编译成功的环境)

  1.下载ndk-16rd

  2. 修改FFmepg文件夹下的configure文件

   用编辑器打开,快速查找SLIBNAME_WITH_MAJOR以快速定位

   SLIBNAME_WITH_MAJOR='$(SLIBNAME).$(LIBMAJOR)'

LIB_INSTALL_EXTRA_CMD='$$(RANLIB) "$(LIBDIR)/$(LIBNAME)"'

SLIB_INSTALL_NAME='$(SLIBNAME_WITH_VERSION)'

SLIB_INSTALL_LINKS='$(SLIBNAME_WITH_MAJOR) $(SLIBNAME)'

   修改为

       SLIBNAME_WITH_MAJOR='$(SLIBPREF)$(FULLNAME)$(LIBMAJOR)$(SLIBSUF)'

LIB_INSTALL_EXTRA_CMD='$$(RANLIB)"$(LIBDIR)/$(LIBNAME)"'

SLIB_INSTALL_NAME='$(SLIBNAME_WITH_MAJOR)'

SLIB_INSTALL_LINKS='$(SLIBNAME)'

3. 配置build_android.sh

   在FFmpeg下面创建build_android.sh文件,然后粘贴以下的配置,将android环境的路径修改成自己的。

       #!/bin/bash

ADDI_CFLAGS="-marm"

API=27

PLATFORM=arm-linux-androideabi

CPU=armv7-a

NDK=/Users/zouguibao/android-ndk-r16b

SYSROOT=$NDK/platforms/android-$API/arch-arm/

ISYSROOT=$NDK/sysroot

ASM=$ISYSROOT/usr/include/$PLATFORM

TOOLCHAIN=$NDK/toolchains/$PLATFORM-4.9/prebuilt/darwin-x86_64

OUTPUT=/Users/zouguibao/ffmpeg-3.4.7/android

function build_one

{

./configure \

--prefix=$OUTPUT \

--enable-shared \

--disable-static \

--disable-doc \

--disable-ffmpeg \

--disable-ffplay \

--disable-ffprobe \

--disable-avdevice \

--disable-doc \

--disable-symver \

--cross-prefix=$TOOLCHAIN/bin/arm-linux-androideabi- \

--target-os=android \

--arch=arm \

--enable-cross-compile \

--sysroot=$SYSROOT \

--extra-cflags="-I$ASM -isysroot $ISYSROOT -D__ANDROID_API__=27  -Os -fPIC -marm" \

--extra-ldflags="-marm" \

$ADDITIONAL_CONFIGURE_FLAG

  make clean

  #make -j4

  make

  make install

}

 

build_one     

 

如果在编译过程中出现各种错误,需要耐心解决,大多是环境问题导致的,如ndk缺少编译所需的必要文件,如clang文件等

 

编译成功后,ffmpeg文件下会生成一个android文件夹

Android Ffmpeg入门实践_第2张图片

 

将其中的include文件和lib里面so文件放入到android工程下面就可以使用了。

 

4.使用ffmpeg播放视频文件,NDK开发

  4.1 使用ffmpeg播放视频文件,其实就是对一个视频解码的一个过程,将视频载入,然后一帧一帧的解码视频和音频,然后视频渲染到手机屏幕上,播放的流程如下:

     4.1.1 获取到视频的地址

     4.1.2 注册FFmpeg所有编解码器以及相关的协议。

     4.1.3 获取AVFormatContext的结构体,

     4.1.4 打开视频数据源,此处需要读取权限

     4.1.5 获取视频流

     4.1.6 创建解码器AVCodecContext

     4.1.7 创建AVNativeWindow,将解码的视频渲染到屏幕上

     4.1.8 循环获取AVpacket对象,

4.1.9 解码AVPacket,渲染到屏幕上,每渲染一帧,睡眠16ms

  4.2 配置NDK环境为ndk-16

  4.3 先把include文件和lib里面so文件放入到android工程下面的libs文件夹下,如下:

Android Ffmpeg入门实践_第3张图片

 

4.4 Build.gradle中的配置:

Android Ffmpeg入门实践_第4张图片

 

4.5 CMAKELists.txt文件,放在FFmpegProject工程的根目录下,主要是配置FFmpeg需要的so文件和自定义的cpp文件和打印log的文件,内容如下:

# Sets the minimum version of CMake required to build the native

# library. You should either keep the default value or only pass a

# value of 3.4.0 or lower.

 

cmake_minimum_required(VERSION 3.4.1)

 

add_library(native-lib

        SHARED

        src/main/cpp/ffplay.c

        )

 

find_library( # Sets the name of the path variable.

        log-lib

        # Specifies the name of the NDK library that

        # you want CMake to locate.

        log)

find_library(

             android-lib

             android)

set(distribution_DIR ${CMAKE_SOURCE_DIR}/../../../../libs)

 

add_library(avutil-55

        SHARED

        IMPORTED)

set_target_properties(avutil-55

        PROPERTIES IMPORTED_LOCATION

        ../../../../libs/armeabi-v7a/libavutil.so)

 

add_library(swresample-2

        SHARED

        IMPORTED)

set_target_properties(swresample-2

        PROPERTIES IMPORTED_LOCATION

        ../../../../libs/armeabi-v7a/libswresample.so)

add_library(avcodec-57

        SHARED

        IMPORTED)

set_target_properties(avcodec-57

        PROPERTIES IMPORTED_LOCATION

        ../../../../libs/armeabi-v7a/libavcodec.so)

add_library(avfilter-6

        SHARED

        IMPORTED)

set_target_properties(avfilter-6

        PROPERTIES IMPORTED_LOCATION

        ../../../../libs/armeabi-v7a/libavfilter.so)

add_library(swscale-4

        SHARED

        IMPORTED)

set_target_properties(swscale-4

        PROPERTIES IMPORTED_LOCATION

        ../../../../libs/armeabi-v7a/libswscale.so)

 

add_library(avformat-57

        SHARED

        IMPORTED)

set_target_properties(avformat-57

        PROPERTIES IMPORTED_LOCATION

        ../../../../libs/armeabi-v7a/libavformat.so)

 

set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=gnu++11")

include_directories(libs/include)

#target_include_directories(native-lib PRIVATE libs/include)

target_link_libraries(native-lib avformat-57 avcodec-57 avfilter-6 avutil-55 swresample-2 swscale-4  ${log-lib} -landroid)

 

4.6 在src/main下面创建cpp文件夹,创建个native-lib.cpp文件和ffplay.h和ffplay.c的文件

native-lib.cpp文件内容如下:

//

// Created by zouguibao on 2020-04-15.

//

 

 

#include

#include

#include

#include

 

 

extern "C" {

#include

#include

#include

#include

#include

#include

#include

#include

#include

#include

#include "ffplay.h"

 

#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, "zouguibao", __VA_ARGS__)

//int64_t duration = 0;

//double currentTime;

//int pausePlay = 0;

//int stopPlay = 0;

extern "C"

jstring

Java_com_example_ffmpegproject_MainActivity_avcodecinfo(JNIEnv *env, jobject instance) {

 

    return avcodecinfo(env, instance);

}

 

extern "C"

jstring

Java_com_example_ffmpegproject_MainActivity_stringFromJNI(

        JNIEnv *env,

        jobject /* this */) {

    std::string hello = "Hello from C++";

    return env->NewStringUTF(hello.c_str());

}

 

 

extern "C"

void

Java_com_example_ffmpegproject_MainActivity_ffPause(JNIEnv *env, jobject instance) {

    pauseVideo();

}

extern "C"

void

Java_com_example_ffmpegproject_MainActivity_ffStop(JNIEnv *env, jobject instance) {

    stopVideo();

}

extern "C"

jlong

Java_com_example_ffmpegproject_MainActivity_getDuration(JNIEnv *env, jobject instance) {

    return getDuration();

}

 

extern "C"

jint

Java_com_example_ffmpegproject_MainActivity_getCurrentTime(JNIEnv *env, jobject instance) {

    return getCurrentTime();

}

 

extern "C"

jlong

Java_com_example_ffmpegproject_MainActivity_getVideoDuration(JNIEnv *env, jobject instance,

                                                             jstring videoPath) {

    return getVideoDuration(env, instance, videoPath);

}

 

extern "C"

void

Java_com_example_ffmpegproject_MainActivity_ffplay(JNIEnv *env, jobject instance,

                                                   jstring videoPath, jobject surface) {

//    playVideoInThread(env,instance,videoPath,surface);

    playVideo(env, instance, videoPath, surface);

}

 

}

 

 

ffplay.h的文件内容

//

// Created by zouguibao on 2020-04-19.

//

 

#ifndef FFMPEGPROJECT_FFPLAY_H

#define FFMPEGPROJECT_FFPLAY_H

 

//#include "../../../../../../android-ndk-r16b/sysroot/usr/include/jni.h"

#include

// 播放

void playVideo(JNIEnv *env,jobject instance,jstring videoPath, jobject surface);

// 获取视频时长

jlong getVideoDuration(JNIEnv *env,jobject instance,jstring videoPath);

 

// 暂停播放

void pauseVideo();

 

// 停止播放

void stopVideo();

 

// 获取当前播放时长

jint getCurrentTime();

 

// 获取视频时长

jlong getDuration();

 

// 视频解码

jstring avcodecinfo(JNIEnv *env,jobject instance);

void playVideoInThread(JNIEnv *env,jobject instance,jstring videoPath, jobject surface);

#endif //FFMPEGPROJECT_FFPLAY_H

 

 

ffplay.c的文件

//

// Created by zouguibao on 2020-04-19.

//

#include

#include

#include

#include

#include "malloc.h"

#include "stdlib.h"

#include

#include

#include

#include

#include

#include

#include

#include

#include

#include

#include "pthread.h"

 

#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, "zouguibao", __VA_ARGS__)

int64_t duration = 0;

double currentTime;

int pausePlay = 0;

int stopPlay = 0;

pthread_t ntid;

int res;

int currentFrame;

struct PlayParam {

    JNIEnv *env;

    jobject instance;

    jstring videoPath;

    jobject surface;

};

 

void stopVideo() {

    stopPlay = 1;

}

 

void pauseVideo() {

    pausePlay = 1;

}

 

jint getCurrentTime() {

    return currentTime * 1000;

}

 

jlong getDuration() {

    return duration / 1000;

}

 

void playVideo(JNIEnv *env, jobject instance,

               jstring videoPath, jobject surface) {

    LOGE("playVideo......111111111111111");

    const char *input = (*env)->GetStringUTFChars(env, videoPath, NULL);

    LOGE("playVideo......2222222222222222222");

    if (input == NULL) {

        LOGE("字符串转换失败......");

        return;

    }

    // 获取回调的java类

    jclass jcls = (*env)->FindClass(env, "com/example/ffmpegproject/OnVideoPlayListener");

    // 注册FFmpeg所有编解码器以及相关协议

    av_register_all();

    avformat_network_init();

    // 分配结构体

    AVFormatContext *formatContext = avformat_alloc_context();

 

    // 打开视频数据源 需要本地权限

    int open_state = avformat_open_input(&formatContext, input, NULL, NULL);

    if (open_state < 0) {

        char errbuf[128];

        if (av_strerror(open_state, errbuf, sizeof(errbuf) == 0)) {

            LOGE("打开视频输入流信息失败,失败原因:%s", errbuf);

        }

        return;

    }

 

    // 为分配的AVFormatContenxt结构体中填充数据

    if (avformat_find_stream_info(formatContext, NULL) < 0) {

        LOGE("读取视频输入流信息失败");

        return;

    }

 

    if (formatContext->duration != AV_NOPTS_VALUE) {

        duration = formatContext->duration;

    }

 

    if (jcls != NULL) {

        jmethodID jmethodId = (*env)->GetMethodID(env, jcls, "onPrepared", "()V");

        (*env)->CallVoidMethod(env, instance, jmethodId, NULL);

    }

 

    // 记录视频流所在数组下标

    int video_stream_index = -1;

    LOGE("当前视频数据,包含的数据流数量:%d", formatContext->nb_streams);

    // 找到视频流AVFormatContenxt结构体中的nb_streams字段存储的数据就是当前视频文件中所包含的总数据流数量

    // 视频流 音频流 字幕流

    for (int i = 0; i < formatContext->nb_streams; i++) {

        // 如果是数据流,则编码格式为AVMEDIA_TYPE_VIDEO - 视频流

        if (formatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {

            video_stream_index = i;//记录视频流下标

            break;

        }

    }

    LOGE("playVideo......3333333333333");

    if (video_stream_index == -1) {

        LOGE("没有找到视频流");

        return;

    }

 

    //通过编解码器的id - codec_id 获取对应视频流的解码器

    AVCodecParameters *codecParameters = formatContext->streams[video_stream_index]->codecpar;

    AVCodec *videoDecoder = avcodec_find_decoder(codecParameters->codec_id);

    LOGE("playVideo......44444444444444444");

    if (videoDecoder == NULL) {

        LOGE("没有找到视频流的解码器");

        return;

    }

 

    // 通过解码器分配(并用 默认值 初始化)一个解码器context

    AVCodecContext *codecContext = avcodec_alloc_context3(videoDecoder);

 

    if (codecContext == NULL) {

        LOGE("分配解码器上下文失败");

        return;

    }

 

    //根据指定的编码器的值填充编码器的上下文

    if (avcodec_parameters_to_context(codecContext, codecParameters) < 0) {

        LOGE("填充解码器上下文失败");

        return;

    }

    // 通过所给的编解码器初始化解码器上下文

    if (avcodec_open2(codecContext, videoDecoder, NULL)) {

        LOGE("初始化解码器上下文失败");

        return;

    }

 

    // 分配存储压缩数据的结构体对象ACPacket

    // 如果是视频流,ACPacket会包含一帧的压缩数据,如果是音频流则可能包含多帧压缩数据

    enum AVPixelFormat dstFormat = AV_PIX_FMT_RGBA;

    AVPacket *avPacket = av_packet_alloc();

    // 分配解码后的每一帧数据信息的结构体(指针)

    AVFrame *avFrame = av_frame_alloc();

    // 分配最终显示出来的目标帧信息的结构体(指针)

    AVFrame *outAvFrame = av_frame_alloc();

 

    // Determine required buffer size and allocate buffer

    // buffer中数据就是用于渲染的,且格式为RGBA

    int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGBA, codecContext->width,

                                            codecContext->height, 1);

    uint8_t *out_buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));

 

    av_image_fill_arrays(

            outAvFrame->data,

            outAvFrame->linesize,

            out_buffer,

            dstFormat,

            codecContext->width,

            codecContext->height,

            1

    );

 

    // 初始化SwsContext

    struct SwsContext *swsContext = sws_getContext(

            codecContext->width, // 原图宽

            codecContext->height, // 原图高

            codecContext->pix_fmt, // 原图format

            codecContext->width, // 目标宽

            codecContext->height, // 目标高

            dstFormat,

            SWS_BICUBIC,

            NULL,

            NULL,

            NULL

    );

 

    if (swsContext == NULL) {

        LOGE("swsContext上下文初始化失败");

        return;

    }

 

    // Android 原生绘制工具

    ANativeWindow *nativeWindow = ANativeWindow_fromSurface(env, surface);

 

    // 定义绘图缓冲区

    ANativeWindow_Buffer outBuffer;

 

    // 通过设置宽高限制缓冲区中的像素数量,而非屏幕的物理显示尺寸

    // 如果缓冲区与物理屏幕的显示尺寸不相符,则实现显示可能会拉伸,或者被压缩

    ANativeWindow_setBuffersGeometry(nativeWindow, codecContext->width, codecContext->height,

                                     WINDOW_FORMAT_RGBA_8888);

    pausePlay = 0;

    stopPlay = 0;

    int nextIndex = 0;

    LOGE("maxStreams =  %d",formatContext->max_streams);

    // 循环读取数据流的下一帧

    while (av_read_frame(formatContext, avPacket) == 0) {

//        LOGE("avPacket->stream_index = %d", avPacket->stream_index);

//        LOGE("video_stream_index = %d", video_stream_index);

        // 停止播放

        if (stopPlay == 1 || pausePlay == 1) {

            av_packet_unref(avPacket);

            break;

        }

        LOGE("playVideo......555555555555555");

        if (avPacket->stream_index == video_stream_index) {

//            if(pausePlay = 0 && currentFrame > 0 && currentFrame != nextIndex){

//                nextIndex++;

//                av_packet_unref(avPacket);

//                continue;

//            }

//            currentFrame++;

            // 将原始数据发送到解码器

            int sendPacketState = avcodec_send_packet(codecContext, avPacket);

            if (sendPacketState == 0) {

                int receiveFrameState = avcodec_receive_frame(codecContext, avFrame);

                if (receiveFrameState == 0) {

                    // 锁定窗口绘图界面

                    ANativeWindow_lock(nativeWindow, &outBuffer, NULL);

                    // 对输出图像进行色彩,分辨率缩放,滤波处理

                    sws_scale(swsContext, (const uint8_t *const *) avFrame->data, avFrame->linesize,

                              0, avFrame->height, outAvFrame->data, outAvFrame->linesize);

 

                    uint8_t *dst = (uint8_t *) outBuffer.bits;

                    // 解码后的像素数据首地址

                    // 这里使用的是RGBA格式,所以解码图像数据只保存在data[0]中,但是如果是YUV,则就会有data[0]

                    uint8_t *src = outAvFrame->data[0];

                    // 获取一行字节数

                    int oneLineByte = outBuffer.stride * 4;

                    // 复制一行内存的实际数量

                    int srcStride = outAvFrame->linesize[0];

 

                    for (int i = 0; i < codecContext->height; i++) {

                        memcpy(dst + i * oneLineByte, src + i * srcStride, srcStride);

                    }

 

                    // 解锁

                    ANativeWindow_unlockAndPost(nativeWindow);

                    // 当前播放时间

                    double playCurrentTime = avPacket->pts *

                                             av_q2d(formatContext->streams[video_stream_index]->time_base);

                    currentTime = (int) (playCurrentTime * 10) / 10;

                    if (jcls != NULL) {

                        jmethodID jmethodId = (*env)->GetMethodID(env, jcls,

                                                                  "onUpdateCurrentPosition",

                                                                  "()V");

                        (*env)->CallVoidMethod(env, instance, jmethodId);

                    }

                    LOGE("当前播放时间 currentTime = %lf", currentTime);

                    // 进行短暂休眠,如果休眠时间太长会导致播放的每帧画面有延迟感,如果太短则会有加速播放的感觉

                    // 一般一秒60帧 16毫秒一帧的时间进行休眠

                    usleep(1000 * 16);

                } else if (receiveFrameState == AVERROR(EAGAIN)) {

                    LOGE("从解码器接收数据失败:AVERROR(EAGAIN)");

                } else if (receiveFrameState == AVERROR_EOF) {

                    LOGE("从解码器接收数据失败:AVERROR_EOF");

                } else if (receiveFrameState == AVERROR(EINVAL)) {

                    LOGE("从解码器接收数据失败:AVERROR(EINVAL)");

                } else {

                    LOGE("从解码器接收数据失败:未知");

                }

            } else if (sendPacketState == AVERROR(EAGAIN)) {

                LOGE("向解码器发送数据失败:AVERROR(EAGAIN)");

            } else if (sendPacketState == AVERROR_EOF) {

                LOGE("向解码器发送数据失败:AVERROR_EOF");

            } else if (sendPacketState == AVERROR(EINVAL)) {

                LOGE("向解码器发送数据失败:AVERROR(EINVAL)");

            } else if (sendPacketState == AVERROR(ENOMEM)) {

                LOGE("向解码器发送数据失败:AVERROR(ENOMEM)");

            } else {

                LOGE("向解码器发送数据失败:未知");

            }

        }

 

        av_packet_unref(avPacket);

    }

    if(pausePlay == 0){

        currentFrame = 0;

    }

    stopPlay = 1;

    if (jcls != NULL) {

        jmethodID jmethodId = (*env)->GetMethodID(env, jcls, "onCompleted", "()V");

        (*env)->CallVoidMethod(env, instance, jmethodId, NULL);

    }

    // 内存释放

    ANativeWindow_release(nativeWindow);

    av_frame_free(&outAvFrame);

    av_frame_free(&avFrame);

    av_packet_free(&avPacket);

    avcodec_free_context(&codecContext);

    avformat_close_input(&formatContext);

    avformat_free_context(formatContext);

    (*env)->ReleaseStringUTFChars(env, videoPath, input);

}

 

static void * playFFmpegVideo(void *arg) {

    struct PlayParam *pstru;

    pstru = (struct PlayParam *) arg;

    LOGE("playFFmpegVideo...............");

    playVideo(pstru->env, pstru->instance, pstru->videoPath, pstru->surface);

}

 

void playVideoInThread(JNIEnv *env, jobject instance, jstring videoPath, jobject surface) {

    struct PlayParam *pstru = (struct PlayParam *) malloc(sizeof(struct PlayParam));

    pstru->env = env;

    pstru->instance = instance;

    pstru->videoPath = videoPath;

    pstru->surface = surface;

    LOGE("playVideoInThread111111111111111111");

    res = pthread_create(&ntid, NULL, playFFmpegVideo, (void *)pstru);

    if (res != 0) {

        LOGE("Create thread %d failed threadId:1");

        exit(res);

    }

    LOGE("playVideoInThread2222222222222222222");

    pthread_join(ntid, NULL);

    LOGE("playVideoInThread333333333333333333");

    free(pstru);

    LOGE("playVideoInThread4444444444444444");

    pthread_exit(res);

}

 

jlong getVideoDuration(JNIEnv *env, jobject instance, jstring videoPath) {

    const char *input = (*env)->GetStringUTFChars(env, videoPath, NULL);

    if (input == NULL) {

        LOGE("字符串转换失败......");

        return 0;

    }

 

    // 注册FFmpeg所有编解码器以及相关协议

    av_register_all();

 

    // 分配结构体

    AVFormatContext *formatContext = avformat_alloc_context();

    // 打开视频数据源 需要本地权限

    int open_state = avformat_open_input(&formatContext, input, NULL, NULL);

    if (open_state < 0) {

        char errbuf[128];

        if (av_strerror(open_state, errbuf, sizeof(errbuf) == 0)) {

            LOGE("打开视频输入流信息失败,失败原因:%s", errbuf);

        }

        return 0;

    }

 

    // 为分配的AVFormatContenxt结构体中填充数据

    if (avformat_find_stream_info(formatContext, NULL) < 0) {

        LOGE("读取视频输入流信息失败");

        return 0;

    }

 

    if (formatContext->duration != AV_NOPTS_VALUE) {

        long duration = formatContext->duration;

        LOGE("视频总时长为:%ld", duration);

        return duration / 1000;

    }

}

 

jstring avcodecinfo(JNIEnv *env, jobject instance) {

    char info[40000] = {0};

    av_register_all();

    AVCodec *c_temp = av_codec_next(NULL);

    while (c_temp != NULL) {

        if (c_temp->decode != NULL) {

            sprintf(info, "%sdecode:", info);

        } else {

            sprintf(info, "%sencode:", info);

        }

        switch (c_temp->type) {

            case AVMEDIA_TYPE_VIDEO:

                sprintf(info, "%s(video):", info);

                break;

            case AVMEDIA_TYPE_AUDIO:

                sprintf(info, "%s(audio):", info);

                break;

            default:

                sprintf(info, "%s(other):", info);

                break;

        }

        sprintf(info, "%s[%10s]\n", info, c_temp->name);

        c_temp = c_temp->next;

    }

    return (*env)->NewStringUTF(env, info);

}

 

到此,所有的开发流程代码就弄完了。Native-lib.cpp是JNI开发所必须的文件,通过System.load(“native-lib”)加载JNI代码即可。

 

 

FFmpeg源代码的git地址:https://github.com/zouguibao/FFmpegProject

你可能感兴趣的:(十年程序员的专栏)