FFMPEG+OPENSLES+生产者模式播放音视频(二)

视频播放

准备工作

1. 首先,定义一个播放控件PlayerView
public class PlayerView extends TextureView implements TextureView.SurfaceTextureListener {

    public PlayerView(Context context, AttributeSet attrs) {
        super(context, attrs);
        setSurfaceTextureListener(this);
    }

    @Override
    public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
        LogUtils.e("onSurfaceTextureAvailable:width=" + width + ",height=" + height);
        setSurface(new Surface(surfaceTexture), width, height);
    }

    @Override
    public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int width, int height) {

    }

    @Override
    public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
        return false;
    }

    @Override
    public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {

    }

    /**
     * ndk调用这个方法设置视频的宽高
     *
     * @param videoWidth
     * @param videoHeight
     */
    public void onNativeGetVideoSize(int videoWidth, int videoHeight) {
        LogUtils.e("onNativeGetVideoSize:videoWidth=" + videoWidth + ",videoHeight=" + videoHeight);
        int width = getWidth();
        int height = getHeight();
        float scaleX = videoWidth * 1.0f / width;
        float scaleY = videoHeight * 1.0f / height;
        float maxScale = Math.max(scaleX, scaleY);//要保证宽度或者高度全屏
        scaleX /= maxScale;
        scaleY /= maxScale;
        Matrix matrix = new Matrix();
        matrix.setScale(scaleX, scaleY, width / 2, height / 2);
        setTransform(matrix);
    }

    private native void setSurface(Surface surface, int width, int height);

    public native void play(String path);
}
2. 编写页面布局activity_main



    

    

        
3.编写主界面MainActivity
class MainActivity : AppCompatActivity() {

    val URL = "rtmp://live.hkstv.hk.lxdns.com/live/hks"

    override fun onCreate(savedInstanceState: Bundle?) {
        super.onCreate(savedInstanceState)
        setContentView(R.layout.activity_main)
        play_btn.setOnClickListener { playVideo() }
    }

    fun playVideo() {
        playerView.play(URL)
    }

    companion object {

        init {
            System.loadLibrary("native-lib")
        }
    }
}
4.定义权限




5.CMakeLists.txt修改

add_library( native-lib
             SHARED
             src/main/cpp/native-lib.cpp )

改为

file(GLOB my_source src/main/cpp/*.cpp)
add_library( native-lib
             SHARED
             ${my_source} )

主要是为了方便后续新增别的cpp文件,不需要手动再去修改CMakeLists.txt

C++核心代码

1.定义一个my-log.h
#ifndef FFMPEGDEMO_MY_LOG_H
#define FFMPEGDEMO_MY_LOG_H

#include 

#define TAG "LEVY"
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,TAG,__VA_ARGS__)

#endif //FFMPEGDEMO_MY_LOG_H
2.定义一个FFmpegVideo的h文件和C++文件

FFmpegVideo.h

#ifndef FFMPEGDEMO_FFMPEGVIDEO_H
#define FFMPEGDEMO_FFMPEGVIDEO_H

#include "my-log.h"
#include 
#include 
#include 

extern "C" {
#include "libswscale/swscale.h"
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
};

using namespace std;

class FFmpegVideo {
public:
    FFmpegVideo();

    ~FFmpegVideo();

    /**
     * 从队列中获取一个包
     * @param packet
     * @return
     */
    int get(AVPacket *packet);

    /**
     * 往队列中插入一个包
     * @param packet
     * @return
     */
    int put(AVPacket *packet);

    /**
     * 播放
     */
    void play();

    /**
     * 结束
     */
    void stop();

    /**
     * 设置解码器上下文
     * @param avCodecContext
     */
    void setAVCodecPacket(AVCodecContext *avCodecContext);

    /**
     * 播放回调
     * @param call
     */
    void setPlayCall(void(*call)(AVFrame *frame));

public:
    int isPlay;//是否播放
    int index;//视频流索引
    queue video_queue;//包队列
    pthread_t tid;//播放线程id
    AVCodecContext *avCodecContext;//解码器上下文
    pthread_mutex_t mutex;//互斥锁
    pthread_cond_t cond;
};

#endif //FFMPEGDEMO_FFMPEGVIDEO_H

FFmpegVideo.cpp

#include "FFmpegVideo.h"

/**
 * ANativeWindow绘制的方法回调
 * @param frame 
 */
static void (*video_call)(AVFrame *frame);

/**
 * 视频播放线程
 * @param data 
 * @return 
 */
void *playVideo(void *data) {
    LOGE("播放视频线程");
    FFmpegVideo *video = (FFmpegVideo *) data;
    AVCodecContext *pContext = video->avCodecContext;
    //像素格式
    AVPixelFormat pixelFormat = AV_PIX_FMT_RGBA;
    SwsContext *swsContext = sws_getContext(pContext->width,
                                            pContext->height,
                                            pContext->pix_fmt,
                                            pContext->width,
                                            pContext->height,
                                            pixelFormat,
                                            SWS_BICUBIC,
                                            NULL,
                                            NULL,
                                            NULL);
    LOGE("获取swsContext完成");
    //要画在window上的frame
    AVFrame *rgb_frame = av_frame_alloc();
    uint8_t *out_buffer = (uint8_t *) av_malloc(
            (size_t) avpicture_get_size(pixelFormat, pContext->width, pContext->height));
    avpicture_fill((AVPicture *) rgb_frame, out_buffer, pixelFormat, pContext->width,
                   pContext->height);
    LOGE("设置rgb_frame完成");
    int got_frame;
    AVFrame *frame = av_frame_alloc();
    AVPacket *packet = (AVPacket *) av_malloc(sizeof(AVPacket));
    av_init_packet(packet);
    while (video->isPlay) {
        video->get(packet);
        avcodec_decode_video2(pContext, frame, &got_frame, packet);
        if (!got_frame) {
            continue;
        }
        sws_scale(swsContext, (const uint8_t *const *) frame->data, frame->linesize, 0,
                  frame->height, rgb_frame->data, rgb_frame->linesize);
        video_call(rgb_frame);
        usleep(16 * 1000);//这边先暂定时间是16毫秒
    }
}

FFmpegVideo::FFmpegVideo() {
    pthread_mutex_init(&mutex, NULL);//初始化互斥锁
    pthread_cond_init(&cond, NULL);//初始化条件
}

FFmpegVideo::~FFmpegVideo() {

}

int FFmpegVideo::get(AVPacket *packet) {
    LOGE("获取视频包");
    pthread_mutex_lock(&mutex);
    if (isPlay) {
        if (video_queue.empty()) {
            LOGE("列表为空");
            pthread_cond_wait(&cond, &mutex);
        } else {
            AVPacket *packet1 = video_queue.front();
            video_queue.pop();
            if (av_packet_ref(packet, packet1) < 0) {
                LOGE("获取包.....克隆失败");
                return 0;
            }
            av_free_packet(packet1);
        }
    }
    pthread_mutex_unlock(&mutex);
    return 1;
}

int FFmpegVideo::put(AVPacket *packet) {
    LOGE("插入视频包");
    AVPacket *packet1 = (AVPacket *) malloc(sizeof(AVPacket));
    if (av_copy_packet(packet1, packet) < 0) {
        LOGE("克隆失败");
        return 0;
    }
    pthread_mutex_lock(&mutex);
    video_queue.push(packet1);
    av_free_packet(packet);
    pthread_cond_signal(&cond);
    pthread_mutex_unlock(&mutex);
    return 1;
}

void FFmpegVideo::play() {
    isPlay = 1;
    pthread_create(&tid, NULL, playVideo, this);
}

void FFmpegVideo::stop() {
    isPlay = 0;
}

void FFmpegVideo::setAVCodecPacket(AVCodecContext *avCodecContext) {
    this->avCodecContext = avCodecContext;
}

void FFmpegVideo::setPlayCall(void (*call)(AVFrame *)) {
    video_call = call;
}

3.编写jni实现方法

#include 
#include 
#include "FFmpegVideo.h"
#include 
#include 

pthread_t main_tid;
int isPlaying;
ANativeWindow *window;
const char *path;
FFmpegVideo *video;

jobject jobj;
JavaVM *jvm;

void call_video_play(AVFrame *frame) {
    if (!window) {
        LOGE("window is null");
        return;
    }
    ANativeWindow_Buffer buffer;
    if (ANativeWindow_lock(window, &buffer, NULL) < 0) {
        LOGE("window 锁住失败");
        return;
    }
    uint8_t *dst = (uint8_t *) buffer.bits;
    int dstStride = buffer.stride * 4;
    uint8_t *src = frame->data[0];
    int srcStride = frame->linesize[0];
    for (int i = 0; i < video->avCodecContext->height; ++i) {
        memcpy(dst + i * dstStride, src + i * srcStride, (size_t) srcStride);
    }
    ANativeWindow_unlockAndPost(window);
}

void *proccess(void *data) {
    av_register_all();//使用ffmpeg必须要注册
    avformat_network_init();//如果播放网络视频,需要注册

    AVFormatContext *formatContext = avformat_alloc_context();
    if (avformat_open_input(&formatContext, path, NULL, NULL) < 0) {
        LOGE("打开视频失败");
    }
    LOGE("打开视频成功");
    if (avformat_find_stream_info(formatContext, NULL) < 0) {
        LOGE("寻找流信息失败");
    }
    LOGE("寻找流信息成功");
    for (int i = 0; i < formatContext->nb_streams; ++i) {
        AVStream *stream = formatContext->streams[i];
        AVCodecContext *codecContext = stream->codec;
        //获取解码器
        AVCodec *codec = avcodec_find_decoder(codecContext->codec_id);
        if (avcodec_open2(codecContext, codec, NULL) < 0) {
            LOGE("打开解码器失败");
            continue;
        }
        if (codecContext->codec_type == AVMEDIA_TYPE_VIDEO) {
            video->index = i;
            video->setAVCodecPacket(codecContext);
            int width = codecContext->width;
            int height = codecContext->height;
            LOGE("视频:宽=%d,高宽=%d", width, height);
            JNIEnv *env;
            jvm->AttachCurrentThread(&env, 0);
            LOGE("获取env");
            jclass clazz = env->GetObjectClass(jobj);
            LOGE("Native found Java jobj Class :%d", clazz ? 1 : 0);
            jmethodID mid = env->GetMethodID(clazz, "onNativeGetVideoSize", "(II)V");
            if (env && jobj && mid) {
                LOGE("给JAVA中设置宽高");
                env->CallVoidMethod(jobj, mid, width, height);
            }
            ANativeWindow_setBuffersGeometry(window, width, height,
                                             WINDOW_FORMAT_RGBA_8888);
        }
    }
    LOGE("开始播放");
    video->play();
    AVPacket *packet = (AVPacket *) av_malloc(sizeof(AVPacket));
    while (isPlaying) {
        if (av_read_frame(formatContext, packet) < 0) {
            LOGE("读取帧失败");
            av_packet_unref(packet);
            continue;
        }
        if (video && video->isPlay && video->index == packet->stream_index) {
            video->put(packet);
        }
        av_packet_unref(packet);
    }
    isPlaying = 0;
    if (video && video->isPlay) {
        video->stop();
    }
    av_free_packet(packet);
    avformat_free_context(formatContext);
    pthread_exit(0);
}

JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void *reserved) {
    jvm = vm;
    JNIEnv *env = NULL;
    jint result = -1;
    if (jvm) {
        LOGE("jvm init success");
    }
    if (vm->GetEnv((void **) &env, JNI_VERSION_1_4) != JNI_OK) {
        return result;
    }
    return JNI_VERSION_1_4;
}

extern "C"
JNIEXPORT jstring JNICALL
Java_com_levylin_ffmpegdemo_MainActivity_stringFromJNI(
        JNIEnv *env,
        jobject /* this */) {
    std::string hello = "Hello from C++";
    return env->NewStringUTF(hello.c_str());
}

extern "C"
JNIEXPORT void JNICALL
Java_com_levylin_ffmpegdemo_PlayerView_setSurface(JNIEnv *env, jobject instance, jobject surface,
                                                  jint width, jint height) {
    if (!window) {
        window = ANativeWindow_fromSurface(env, surface);
    }
    if (!jobj) {
        jobj = env->NewGlobalRef(instance);
    }
}
extern "C"
JNIEXPORT void JNICALL
Java_com_levylin_ffmpegdemo_PlayerView_play(JNIEnv *env, jobject instance, jstring path_) {
    path = env->GetStringUTFChars(path_, 0);
    video = new FFmpegVideo;
    video->setPlayCall(call_video_play);
    isPlaying = 1;
    pthread_create(&main_tid, NULL, proccess, NULL);
    env->ReleaseStringUTFChars(path_, path);
}

你可能感兴趣的:(FFMPEG+OPENSLES+生产者模式播放音视频(二))