android全平台基于ffmpeg解码本地MP4视频推流到RTMP服务器

音视频实践学习

  • android全平台编译ffmpeg以及x264与fdk-aac实践
  • ubuntu下使用nginx和nginx-rtmp-module配置直播推流服务器
  • android全平台编译ffmpeg合并为单个库实践
  • android-studio使用cmake编译ffmpeg实践
  • android全平台下基于ffmpeg解码MP4视频文件为YUV文件
  • android全平台编译ffmpeg支持命令行实践
  • android全平台基于ffmpeg解码本地MP4视频推流到RTMP服务器
  • android平台下音频编码之编译LAME库转码PCM为MP3
  • ubuntu平台下编译vlc-android视频播放器实践
  • 图解YU12、I420、YV12、NV12、NV21、YUV420P、YUV420SP、YUV422P、YUV444P的区别
  • 图解RGB565、RGB555、RGB16、RGB24、RGB32、ARGB32等格式的区别
  • YUV420P、YUV420SP、NV12、NV21和RGB互相转换并存储为JPEG以及PNG图片
  • android全平台编译libyuv库实现YUV和RGB的转换
  • android平台下基于ffmpeg对相机采集的NV21数据编码为MP4视频文件
  • android平台下基于ffmpeg采集Camera数据编码成H.264推流到RTMP服务器
  • android平台下基于ffmpeg和ANativeWindow实现简单的视频播放器
  • android平台下基于ffmpeg实现对相机预览截图的功能(NV21数据编码为JPEG文件)
  • android平台下基于ffmpeg的swscale模块实现对YUV和RGB数据进行转换
  • android平台下基于MediaRecorder和AudioRecord实现录制AAC、PCM音频数据

本文目录

  • 概述
  • 配置环境
  • 新建streamer工程
  • 播放串流

概述

还是先从最简单的搞起来,先从最基本的视频推流开始,要知道在电脑上使用ffmpeg完成推流,简直不要太简单,直接使用ffmpeg的推流命令即可,今天想在android平台实践一下,具体操作大家也可以看看雷神的博客

ffmpeg -re -i input.mp4 -vcodec copy -f flv rtmp://192.168.1.102:1935/onzhou/live

配置环境

操作系统: ubuntu 16.05

注意: ffmpeg库的编译使用的是android-ndk-r10e版本,使用高版本编译会报错。

android-studio工程中配合cmake使用的版本则是android-ndk-r16b版本

image

新建工程ffmpeg-single-streamer

image
  • build.gradle配置比较简单,可以参考之前的文章

  • 新建CMakeLists.txt文件,配置如下


cmake_minimum_required(VERSION 3.4.1)


add_library(ffmpeg-streamer
           SHARED
           src/main/cpp/ffmpeg_streamer.c)

find_library(log-lib
            log)

#获取上级目录
get_filename_component(PARENT_DIR ${CMAKE_SOURCE_DIR} PATH)
set(LIBRARY_DIR ${PARENT_DIR}/ffmpeg-single)

set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=gnu++11")
set(CMAKE_VERBOSE_MAKEFILE on)

add_library(ffmpeg-single
           SHARED
           IMPORTED)

set_target_properties(ffmpeg-single
                    PROPERTIES IMPORTED_LOCATION
                    ${LIBRARY_DIR}/libs/${ANDROID_ABI}/libffmpeg.so
                    )

#头文件
include_directories(${LIBRARY_DIR}/libs/${ANDROID_ABI}/include)

target_link_libraries(ffmpeg-streamer ffmpeg-single ${log-lib})
  • 新建NowStreamer.java文件
package com.onzhou.ffmpeg.streamer;

public class NowStreamer {

    static {
        System.loadLibrary("ffmpeg");
        System.loadLibrary("ffmpeg-streamer");
    }

    public native int startPublish(String input, String output);

}
  • src/main/cpp目录新建源文件ffmpeg_streamer.c

#include 
#include 
#include 
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavfilter/avfilter.h"
#include "libavutil/log.h"

#ifdef ANDROID

#include 
#include 

#define LOG_TAG    "NowStreamer"
#define LOGE(format, ...)  __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, format, ##__VA_ARGS__)
#define LOGI(format, ...)  __android_log_print(ANDROID_LOG_INFO,  LOG_TAG, format, ##__VA_ARGS__)
#else
#define LOGE(format, ...)  printf(LOG_TAG format "\n", ##__VA_ARGS__)
#define LOGI(format, ...)  printf(LOG_TAG format "\n", ##__VA_ARGS__)
#endif

//输出日志
void log_callback(void *ptr, int level, const char *fmt, va_list vl) {
    //写入日志文件
    FILE *fp = fopen("/storage/emulated/0/av_log.txt", "a+");
    if (fp) {
        vfprintf(fp, fmt, vl);
        fflush(fp);
        fclose(fp);
    }
    //LOGE(fmt, vl);
}

JNIEXPORT jint JNICALL Java_com_onzhou_ffmpeg_streamer_NowStreamer_startPublish
        (JNIEnv *env, jobject obj, jstring input_jstr, jstring output_jstr) {

    AVOutputFormat *ofmt = NULL;
    AVFormatContext *ifmt_ctx = NULL, *ofmt_ctx = NULL;
    AVPacket pkt;

    int ret, i;
    char input_str[500] = {0};
    char output_str[500] = {0};
    char info[1000] = {0};

    sprintf(input_str, "%s", (*env)->GetStringUTFChars(env, input_jstr, NULL));
    sprintf(output_str, "%s", (*env)->GetStringUTFChars(env, output_jstr, NULL));

    //日志回调写入文件
    av_log_set_callback(log_callback);

    av_register_all();
    //网络初始化
    avformat_network_init();

    //Input
    if ((ret = avformat_open_input(&ifmt_ctx, input_str, 0, 0)) < 0) {
        LOGE("Could not open input file.");
        goto end;
    }
    if ((ret = avformat_find_stream_info(ifmt_ctx, 0)) < 0) {
        LOGE("Failed to retrieve input stream information");
        goto end;
    }

    int videoindex = -1;
    for (i = 0; i < ifmt_ctx->nb_streams; i++)
        if (ifmt_ctx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
            videoindex = i;
            break;
        }
    //Output
    avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", output_str); //RTMP
    //avformat_alloc_output_context2(&ofmt_ctx, NULL, "mpegts", output_str);//UDP

    if (!ofmt_ctx) {
        LOGE("Could not create output context\n");
        ret = AVERROR_UNKNOWN;
        goto end;
    }
    ofmt = ofmt_ctx->oformat;
    for (i = 0; i < ifmt_ctx->nb_streams; i++) {
        //Create output AVStream according to input AVStream
        AVStream *in_stream = ifmt_ctx->streams[i];
        AVStream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);
        if (!out_stream) {
            LOGE("Failed allocating output stream\n");
            ret = AVERROR_UNKNOWN;
            goto end;
        }
        //Copy the settings of AVCodecContext
        ret = avcodec_copy_context(out_stream->codec, in_stream->codec);
        if (ret < 0) {
            LOGE("Failed to copy context from input to output stream codec context\n");
            goto end;
        }
        out_stream->codec->codec_tag = 0;
        if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
            out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
    }

    //Open output URL
    if (!(ofmt->flags & AVFMT_NOFILE)) {
        ret = avio_open(&ofmt_ctx->pb, output_str, AVIO_FLAG_WRITE);
        if (ret < 0) {
            LOGE("Could not open output URL '%s'", output_str);
            goto end;
        }
    }
    //Write file header
    ret = avformat_write_header(ofmt_ctx, NULL);
    if (ret < 0) {
        LOGE("Error occurred when opening output URL\n");
        goto end;
    }

    int frame_index = 0;

    int64_t start_time = av_gettime();
    while (1) {
        AVStream *in_stream, *out_stream;
        //Get an AVPacket
        ret = av_read_frame(ifmt_ctx, &pkt);
        if (ret < 0)
            break;
        //FIX:No PTS (Example: Raw H.264)
        //Simple Write PTS
        if (pkt.pts == AV_NOPTS_VALUE) {
            //Write PTS
            AVRational time_base1 = ifmt_ctx->streams[videoindex]->time_base;
            //Duration between 2 frames (us)
            int64_t calc_duration =
                    (double) AV_TIME_BASE / av_q2d(ifmt_ctx->streams[videoindex]->r_frame_rate);
            //Parameters
            pkt.pts = (double) (frame_index * calc_duration) /
                      (double) (av_q2d(time_base1) * AV_TIME_BASE);
            pkt.dts = pkt.pts;
            pkt.duration = (double) calc_duration / (double) (av_q2d(time_base1) * AV_TIME_BASE);
        }
        //Important:Delay
        if (pkt.stream_index == videoindex) {
            AVRational time_base = ifmt_ctx->streams[videoindex]->time_base;
            AVRational time_base_q = {1, AV_TIME_BASE};
            int64_t pts_time = av_rescale_q(pkt.dts, time_base, time_base_q);
            int64_t now_time = av_gettime() - start_time;
            if (pts_time > now_time)
                av_usleep(pts_time - now_time);

        }

        in_stream = ifmt_ctx->streams[pkt.stream_index];
        out_stream = ofmt_ctx->streams[pkt.stream_index];
        /* copy packet */
        //Convert PTS/DTS
        pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base,
                                   AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX);
        pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base,
                                   AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX);
        pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
        pkt.pos = -1;
        //Print to Screen
        if (pkt.stream_index == videoindex) {
            LOGE("Send %8d video frames to output URL\n", frame_index);
            frame_index++;
        }
        //ret = av_write_frame(ofmt_ctx, &pkt);
        ret = av_interleaved_write_frame(ofmt_ctx, &pkt);

        if (ret < 0) {
            LOGE("Error muxing packet\n");
            break;
        }
        av_free_packet(&pkt);

    }
    //Write file trailer
    av_write_trailer(ofmt_ctx);
end:
    avformat_close_input(&ifmt_ctx);
    /* close output */
    if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
        avio_close(ofmt_ctx->pb);
    avformat_free_context(ofmt_ctx);
    if (ret < 0 && ret != AVERROR_EOF) {
        LOGE("Error occurred.\n");
        return -1;
    }
    return 0;
}

推流服务器的搭建,可以参考之前的文章

public void onStartClick(View view) {
        if (nowStreamer == null) {
                nowStreamer = new NowStreamer();
        }
        if (publishDisposable == null) {
                publishDisposable = Schedulers.newThread().scheduleDirect(new Runnable() {
                        @Override
                        public void run() {
                              //推流本地的一个mp4文件
                                final File intputVideo = new File(getExternalFilesDir(null), "input.mp4");
                                nowStreamer.startPublish(intputVideo.getAbsolutePath(), PUBLISH_ADDRESS);
                        }
                });
        }
}

编译打包运行,开始推流,我们在局域网中使用vlc播放器,打开网络串流

rtmp://192.168.1.102:1935/onzhou/live

image

项目地址:
https://github.com/byhook/ffmpeg4android

原文地址:
https://blog.csdn.net/byhook/article/details/83592093

参考雷神:
https://blog.csdn.net/leixiaohua1020/article/details/47056051

你可能感兴趣的:(android全平台基于ffmpeg解码本地MP4视频推流到RTMP服务器)