使用FFMPeg解码rtsp协议的视屏流,并使用openGL渲染,
1.之前有使用VLC解码但是不能满足需求,后来直接使用FFmpeg解码openGL渲染
2.使用FFMpeg解码这里解决了,解码后不能拿到视屏流多出显示的问题
3.解决了打开首帧视屏比较慢的问题,现在1-2内既能打开视屏
4.解决了找不到pps报警告的问题
5.多线程优化,解码的同时不影响UI的操作更新
解码显示可以转为图片贴图显示,和使用openGL渲染两种方式显示
使用前准备
1.首先编译FFMpeg源码,集成ios的库,添加到工程中并导入相应的库,编译集成FFmpeg
我将FFMpeg的解码代码封装在一个文件中,方便使用,解码源码文件下载地址
首先展示转为图片显示的
#import "CQMovieView.h"
#import "OpenglView.h"
#define LERP(A,B,C) ((A)*(1.0-C)+(B)*C)
@interface CQMovieView ()
@property (nonatomic, copy) NSString *cruutenPath;
@property (nonatomic ,strong)NSMutableArray *mutArray;
@property (nonatomic,strong)OpenglView *openglView;
@end
@implementation CQMovieView
{
AVFormatContext *pFormatCtx;
AVCodecContext *pCodecCtx;
AVFrame *pFream;
AVStream *stream;
AVPacket *packet;
int i,videoIndex;
AVPicture picture;
int videoStram;
double fps;
BOOL isReleaseResources;
dispatch_queue_t queue;
}
- (instancetype)initWithFrame:(CGRect)frame
{
if (self = [super initWithFrame:frame]) {
self.frame = frame;
_mutArray = [NSMutableArray array];
_isdecoder = YES;
}
return self;
}
- (void)replaceTheResources:(NSString *)moviePath{
dispatch_async(queue, ^{
[self initializeResources:[moviePath UTF8String]];
});
}
-(void)Video:(NSString *)moviePath
{
NSLog(@"%@",moviePath);
// queue = dispatch_queue_create("label", DISPATCH_QUEUE_SERIAL);
queue = dispatch_get_global_queue(0,DISPATCH_QUEUE_PRIORITY_DEFAULT);
// queue = dispatch_queue_create("label", DISPATCH_QUEUE_CONCURRENT);
self.cruutenPath = [moviePath copy];
self.ImageView = [[UIImageView alloc]initWithFrame:self.bounds];
[self addSubview:self.ImageView];
// self.openglView = [[OpenglView alloc]initWithFrame:self.bounds];
// [self addSubview:self.openglView];
// [self.openglView setVideoSize:self.bounds.size.width height:self.bounds.size.height];
NSLog(@"===%@",moviePath);
assert(moviePath);
dispatch_async(queue, ^{
NSString *path = [moviePath stringByAppendingString:@"0200012000c8000f"];
[self initializeResources:[path UTF8String]];
});
}
- (void)initializeResources:(const char *)pathUrl{
// dispatch_async(queue, ^{
//初始化所有组件
av_register_all();
avcodec_register_all();
//声明上下文
// AVFormatContext *pFormatCtx;
//初始化上下文
pFormatCtx = avformat_alloc_context();
avformat_network_init();
//dispatch_async(queue, ^{
//获取文件路径
const char * path = pathUrl;
if (path == NULL) {
printf("无法找到文件路径/n");
return ;
}
//这里设置检测空间,解决打开视屏慢的问题
AVDictionary *options = NULL;
av_dict_set(&options, "rtsp_transport", "tcp", 0);
pFormatCtx->probesize = 1000;
pFormatCtx->max_analyze_duration2 = 0;//3 * AV_TIME_BASE;
//声明解码器类型
AVCodec *pCodec;
//查找解码器
pCodec = avcodec_find_decoder(AV_CODEC_ID_H264);
// int i,videoIndex;
videoIndex = -1;
pCodecCtx = avcodec_alloc_context3(pCodec);
pCodecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
uint8_t *ut = malloc(32);
pCodecCtx->extradata = ut;
pCodecCtx->extradata_size = 32;
//给extradata成员参数设置值,解决找不到pps报警告的问题
//00 00 00 01
pCodecCtx->extradata[0] = 0x00;
pCodecCtx->extradata[1] = 0x00;
pCodecCtx->extradata[2] = 0x00;
pCodecCtx->extradata[3] = 0x01;
//67 42 80 1e
pCodecCtx->extradata[4] = 0x67;
pCodecCtx->extradata[5] = 0x42;
pCodecCtx->extradata[6] = 0x80;
pCodecCtx->extradata[7] = 0x1e;
//88 8b 40 50
pCodecCtx->extradata[8] = 0x88;
pCodecCtx->extradata[9] = 0x8b;
pCodecCtx->extradata[10] = 0x40;
pCodecCtx->extradata[11] = 0x50;
//1e d0 80 00
pCodecCtx->extradata[12] = 0x1e;
pCodecCtx->extradata[13] = 0xd0;
pCodecCtx->extradata[14] = 0x80;
pCodecCtx->extradata[15] = 0x00;
//03 84 00 00
pCodecCtx->extradata[16] = 0x03;
pCodecCtx->extradata[17] = 0x84;
pCodecCtx->extradata[18] = 0x00;
pCodecCtx->extradata[19] = 0x00;
//af c8 02 00
pCodecCtx->extradata[20] = 0xaf;
pCodecCtx->extradata[21] = 0xc8;
pCodecCtx->extradata[22] = 0x02;
pCodecCtx->extradata[23] = 0x00;
//00 00 00 01
pCodecCtx->extradata[24] = 0x00;
pCodecCtx->extradata[25] = 0x00;
pCodecCtx->extradata[26] = 0x00;
pCodecCtx->extradata[27] = 0x01;
//68 ce 38 80
pCodecCtx->extradata[28] = 0x68;
pCodecCtx->extradata[29] = 0xce;
pCodecCtx->extradata[30] = 0x38;
pCodecCtx->extradata[31] = 0x80;
//打开视频流
if(avformat_open_input(&pFormatCtx,path,NULL,&options)!=0){
NSLog(@"不能打开流");
return ;
}
//查看视频流信息
if(avformat_find_stream_info(pFormatCtx,&options)<0){
NSLog(@"不能成功查看视频流信息");
return ;
}
// });
//对上下文中的视频流进行遍历
for (i = 0; inb_streams; i++) {
//找到视频流信息后跳出循环 && pFormatCtx->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_VIDEO
if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO ){
videoIndex=i;
break;
}
}
//若videoIndex还为初值那么说明没有找到视频流
if(videoIndex==-1){
NSLog(@"没有找到视频流");
return ;
}
//声明编码器上下文结构体
//这里新版本不再使用AVCodecContext这个结构体了,具体原因我也不太清楚,好像是pFormatCtx->streams过于臃肿
// AVCodecContext * pCodecCtx;
if (pCodec == NULL) {
NSLog(@"解码器没找到");
return;
}
//打开解码器
if(avcodec_open2(pCodecCtx, pCodec,NULL)<0){
NSLog(@"解码器打开失败");
return;
}
//解码后的数据
// AVFrame *pFream,*pFreamYUV;
pFream = av_frame_alloc();
// pFreamYUV = av_frame_alloc();
uint8_t *out_buffer;
//开辟空间
packet = (AVPacket *)av_malloc(sizeof(AVPacket));
/*******************************输出信息*********************************************/
NSLog(@"--------------- File Informatin ----------------");
//打印视频信息,av_dump_format()是一个手工调试的函数,能使我们看到pFormatCtx->streams里面有什么内容
av_dump_format(pFormatCtx, 0, path, 0);
NSLog(@"-------------------------------------------------");
//主要用来对图像进行变化,这里是为了缩放,把黑边裁去
struct SwsContext * img_convert_ctx;
/**
该函数包含以下参数:
srcW:源图像的宽
srcH:源图像的高
srcFormat:源图像的像素格式
dstW:目标图像的宽
dstH:目标图像的高
dstFormat:目标图像的像素格式
flags:设定图像拉伸使用的算法
成功执行的话返回生成的SwsContext,否则返回NULL。
// */
// img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
// pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL);
// free(audioBuffer);
free(ut);
//解码序号
int got_picture_ptr = 0;
//循环读取每一帧
while (_isdecoder) {
if ( av_read_frame(pFormatCtx, packet)>=0) {
//若为视频流信息
if (packet->stream_index == videoIndex) {
//解码一帧数据,输入一个压缩编码的结构体AVPacket,输出一个解码后的结构体AVFrame。
int ret = avcodec_decode_video2(pCodecCtx, pFream, &got_picture_ptr, packet);
// ret1 = avcodec_decode_audio4(pCodecCtx, frames, &got_frame, packet);
//当解码失败
// if (ret < 0) {
// NSLog(@"解码失败");
// return;
// }
// if (got_frame) {
// swr_convert(swrCtx, &audioBuffer, 2 * 44100, frames->data, frames->nb_samples);
// NSData *data = [NSData dataWithBytes:fp_pcm length:pFream->height*pFream->width*3/2];
// [player playWithData:data];
// }
/*
char *yuvdata = malloc(pFream->height*pFream->width*3/2);
int i;
for (i = 0; i < pFream->height; i++) {
memcpy(yuvdata + i * pFream->width,
pFream->data[0] + i * pFream->linesize[0], pFream->width);
}
for (i = 0; i < pFream->height / 2; i++) {
memcpy(yuvdata + pFream->height*pFream->width + i * pFream->width / 2,
pFream->data[1] + i * pFream->linesize[1], pFream->width / 2);
}
for (i = 0; i < pFream->height / 2; i++) {
memcpy(yuvdata + pFream->height*pFream->width*5/4 + i * pFream->width / 2,
pFream->data[2] + i * pFream->linesize[2], pFream->width / 2);
}
[self.openglView displayYUV420pData:yuvdata width:pFream->width height:pFream->height];
free(yuvdata);
*/
avpicture_free(&picture);
avpicture_alloc(&picture, AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height);
struct SwsContext *imgConverCtx = sws_getContext(pFream->width,
pFream->height,
AV_PIX_FMT_YUV420P,
pCodecCtx->width,
pCodecCtx->height,
AV_PIX_FMT_RGB24,
SWS_FAST_BILINEAR,
NULL,
NULL,
NULL);
if(imgConverCtx == nil){
return ;
}
sws_scale(imgConverCtx,
pFream->data,
pFream->linesize,
0,
pFream->height,
picture.data,
picture.linesize);
sws_freeContext(imgConverCtx);
CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;
CFDataRef data = CFDataCreate(kCFAllocatorDefault, picture.data[0], picture.linesize[0] * pCodecCtx->height);
CGDataProviderRef provider = CGDataProviderCreateWithCFData(data);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGImageRef cgImage = CGImageCreate(pCodecCtx->width, pCodecCtx->height, 8, 24, picture.linesize[0], colorSpace, bitmapInfo, provider, NULL, NO, kCGRenderingIntentDefault);
UIImage *image = [UIImage imageWithCGImage:cgImage];
CGImageRelease(cgImage);
CGColorSpaceRelease(colorSpace);
CGDataProviderRelease(provider);
CFRelease(data);
dispatch_async(dispatch_get_main_queue(),^{
// self.ImageView.image = image; //在本view中显示
// [[NSNotificationCenter defaultCenter]postNotificationName:self.cruutenPath object:image]; //将数据传出,可以在多个地方显示同意画面,不需多次解码而显示同意画面
});
}
//销毁packet
av_free_packet(packet);
avpicture_free(&picture);
}
//销毁
// sws_freeContext(img_convert_ctx);
//// av_frame_free(&pFreamYUV);
//
// av_frame_free(&pFream);
//
// avcodec_close(pCodecCtx);
//
// avformat_close_input(&pFormatCtx);
// });
}
}
//暂停
- (void)pause{
_isdecoder = NO;
}
//播放
-(void)play{
_isdecoder = YES;
}
- (void)releaseResources {
NSLog(@"释放资源");
// SJLogFunc
isReleaseResources = YES;
// 释放RGB
av_packet_unref(&packet);
// 释放frame
// av_packet_unref(&packet);
// 释放YUV frame
if (pFream) {
av_free(pFream);
}
// 关闭解码器
if (pCodecCtx != nil){
avcodec_close(pCodecCtx);
}
// 关闭文件
if (pFormatCtx) avformat_close_input(&pFormatCtx);
avformat_network_deinit();
}
- (void)dealloc
{
[[NSNotificationCenter defaultCenter]removeObserver:self];
[self releaseResources];
}
@end
下面展示使用openGL渲染的代码
#import "CQMovieView.h"
#import "OpenglView.h"
#define LERP(A,B,C) ((A)*(1.0-C)+(B)*C)
@interface CQMovieView ()
@property (nonatomic, copy) NSString *cruutenPath;
@property (nonatomic ,strong)NSMutableArray *mutArray;
@property (nonatomic,strong)OpenglView *openglView;
@end
@implementation CQMovieView
{
AVFormatContext *pFormatCtx;
AVCodecContext *pCodecCtx;
AVFrame *pFream;
AVStream *stream;
AVPacket *packet;
int i,videoIndex;
AVPicture picture;
int videoStram;
double fps;
BOOL isReleaseResources;
dispatch_queue_t queue;
}
- (instancetype)initWithFrame:(CGRect)frame
{
if (self = [super initWithFrame:frame]) {
self.frame = frame;
_mutArray = [NSMutableArray array];
_isdecoder = YES;
}
return self;
}
- (void)replaceTheResources:(NSString *)moviePath{
dispatch_async(queue, ^{
[self initializeResources:[moviePath UTF8String]];
});
}
-(void)Video:(NSString *)moviePath
{
NSLog(@"%@",moviePath);
// queue = dispatch_queue_create("label", DISPATCH_QUEUE_SERIAL);
queue = dispatch_get_global_queue(0,DISPATCH_QUEUE_PRIORITY_DEFAULT);
// queue = dispatch_queue_create("label", DISPATCH_QUEUE_CONCURRENT);
self.cruutenPath = [moviePath copy];
self.ImageView = [[UIImageView alloc]initWithFrame:self.bounds];
[self addSubview:self.ImageView];
self.openglView = [[OpenglView alloc]initWithFrame:self.bounds];
[self addSubview:self.openglView];
[self.openglView setVideoSize:self.bounds.size.width height:self.bounds.size.height];
NSLog(@"===%@",moviePath);
assert(moviePath);
dispatch_async(queue, ^{
NSString *path = [moviePath stringByAppendingString:@"0200012000c8000f"];
[self initializeResources:[path UTF8String]];
});
}
- (void)initializeResources:(const char *)pathUrl{
// dispatch_async(queue, ^{
//初始化所有组件
av_register_all();
avcodec_register_all();
//声明上下文
// AVFormatContext *pFormatCtx;
//初始化上下文
pFormatCtx = avformat_alloc_context();
avformat_network_init();
//dispatch_async(queue, ^{
//获取文件路径
const char * path = pathUrl;
if (path == NULL) {
printf("无法找到文件路径/n");
return ;
}
AVDictionary *options = NULL;
av_dict_set(&options, "rtsp_transport", "tcp", 0);
pFormatCtx->probesize = 1000;
pFormatCtx->max_analyze_duration2 = 0;//3 * AV_TIME_BASE;
//声明解码器类型
AVCodec *pCodec;
//查找解码器
pCodec = avcodec_find_decoder(AV_CODEC_ID_H264);
// int i,videoIndex;
videoIndex = -1;
pCodecCtx = avcodec_alloc_context3(pCodec);
pCodecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
uint8_t *ut = malloc(32);
pCodecCtx->extradata = ut;
pCodecCtx->extradata_size = 32;
//给extradata成员参数设置值
//00 00 00 01
pCodecCtx->extradata[0] = 0x00;
pCodecCtx->extradata[1] = 0x00;
pCodecCtx->extradata[2] = 0x00;
pCodecCtx->extradata[3] = 0x01;
//67 42 80 1e
pCodecCtx->extradata[4] = 0x67;
pCodecCtx->extradata[5] = 0x42;
pCodecCtx->extradata[6] = 0x80;
pCodecCtx->extradata[7] = 0x1e;
//88 8b 40 50
pCodecCtx->extradata[8] = 0x88;
pCodecCtx->extradata[9] = 0x8b;
pCodecCtx->extradata[10] = 0x40;
pCodecCtx->extradata[11] = 0x50;
//1e d0 80 00
pCodecCtx->extradata[12] = 0x1e;
pCodecCtx->extradata[13] = 0xd0;
pCodecCtx->extradata[14] = 0x80;
pCodecCtx->extradata[15] = 0x00;
//03 84 00 00
pCodecCtx->extradata[16] = 0x03;
pCodecCtx->extradata[17] = 0x84;
pCodecCtx->extradata[18] = 0x00;
pCodecCtx->extradata[19] = 0x00;
//af c8 02 00
pCodecCtx->extradata[20] = 0xaf;
pCodecCtx->extradata[21] = 0xc8;
pCodecCtx->extradata[22] = 0x02;
pCodecCtx->extradata[23] = 0x00;
//00 00 00 01
pCodecCtx->extradata[24] = 0x00;
pCodecCtx->extradata[25] = 0x00;
pCodecCtx->extradata[26] = 0x00;
pCodecCtx->extradata[27] = 0x01;
//68 ce 38 80
pCodecCtx->extradata[28] = 0x68;
pCodecCtx->extradata[29] = 0xce;
pCodecCtx->extradata[30] = 0x38;
pCodecCtx->extradata[31] = 0x80;
//打开视频流
if(avformat_open_input(&pFormatCtx,path,NULL,&options)!=0){
NSLog(@"不能打开流");
return ;
}
//查看视频流信息
if(avformat_find_stream_info(pFormatCtx,&options)<0){
NSLog(@"不能成功查看视频流信息");
return ;
}
// });
//对上下文中的视频流进行遍历
for (i = 0; inb_streams; i++) {
//找到视频流信息后跳出循环 && pFormatCtx->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_VIDEO
if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO ){
videoIndex=i;
break;
}
}
//若videoIndex还为初值那么说明没有找到视频流
if(videoIndex==-1){
NSLog(@"没有找到视频流");
return ;
}
//声明编码器上下文结构体
//这里新版本不再使用AVCodecContext这个结构体了,具体原因我也不太清楚,好像是pFormatCtx->streams过于臃肿
// AVCodecContext * pCodecCtx;
if (pCodec == NULL) {
NSLog(@"解码器没找到");
return;
}
//打开解码器
if(avcodec_open2(pCodecCtx, pCodec,NULL)<0){
NSLog(@"解码器打开失败");
return;
}
//解码后的数据
// AVFrame *pFream,*pFreamYUV;
pFream = av_frame_alloc();
// pFreamYUV = av_frame_alloc();
uint8_t *out_buffer;
//开辟空间
packet = (AVPacket *)av_malloc(sizeof(AVPacket));
/*******************************输出信息*********************************************/
NSLog(@"--------------- File Informatin ----------------");
//打印视频信息,av_dump_format()是一个手工调试的函数,能使我们看到pFormatCtx->streams里面有什么内容
av_dump_format(pFormatCtx, 0, path, 0);
NSLog(@"-------------------------------------------------");
//主要用来对图像进行变化,这里是为了缩放,把黑边裁去
struct SwsContext * img_convert_ctx;
/**
该函数包含以下参数:
srcW:源图像的宽
srcH:源图像的高
srcFormat:源图像的像素格式
dstW:目标图像的宽
dstH:目标图像的高
dstFormat:目标图像的像素格式
flags:设定图像拉伸使用的算法
成功执行的话返回生成的SwsContext,否则返回NULL。
// */
// img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
// pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL);
// free(audioBuffer);
free(ut);
//解码序号
int got_picture_ptr = 0;
//循环读取每一帧
while (av_read_frame(pFormatCtx, packet)>=0) {
//若为视频流信息
if (packet->stream_index == videoIndex) {
//解码一帧数据,输入一个压缩编码的结构体AVPacket,输出一个解码后的结构体AVFrame。
int ret = avcodec_decode_video2(pCodecCtx, pFream, &got_picture_ptr, packet);
// ret1 = avcodec_decode_audio4(pCodecCtx, frames, &got_frame, packet);
//当解码失败
if (ret < 0) {
NSLog(@"解码失败");
return;
}
// if (got_frame) {
// swr_convert(swrCtx, &audioBuffer, 2 * 44100, frames->data, frames->nb_samples);
// NSData *data = [NSData dataWithBytes:fp_pcm length:pFream->height*pFream->width*3/2];
// [player playWithData:data];
// }
//开辟空间存放yuv
char *yuvdata = malloc(pFream->height*pFream->width*3/2);
//下面三个for是对yuv数据查找拼接位置 y = u * 2 v,u= y/2
int i;
for (i = 0; i < pFream->height; i++) {
memcpy(yuvdata + i * pFream->width,
pFream->data[0] + i * pFream->linesize[0], pFream->width);
}
for (i = 0; i < pFream->height / 2; i++) {
memcpy(yuvdata + pFream->height*pFream->width + i * pFream->width / 2,
pFream->data[1] + i * pFream->linesize[1], pFream->width / 2);
}
for (i = 0; i < pFream->height / 2; i++) {
memcpy(yuvdata + pFream->height*pFream->width*5/4 + i * pFream->width / 2,
pFream->data[2] + i * pFream->linesize[2], pFream->width / 2);
}
[self.openglView displayYUV420pData:yuvdata width:pFream->width height:pFream->height];
NSLog(@"====开始获取流33333333=====%@",[NSDate date]);
free(yuvdata);
}
//销毁packet
av_free_packet(packet);
}
// sws_freeContext(img_convert_ctx);
//// av_frame_free(&pFreamYUV);
//
// av_frame_free(&pFream);
//
// avcodec_close(pCodecCtx);
//
// avformat_close_input(&pFormatCtx);
// });
}
}
//暂停
- (void)pause{
_isdecoder = NO;
}
//播放
-(void)play{
_isdecoder = YES;
}
- (void)releaseResources {
NSLog(@"释放资源");
// SJLogFunc
isReleaseResources = YES;
// 释放RGB
av_packet_unref(&packet);
// 释放frame
// av_packet_unref(&packet);
// 释放YUV frame
if (pFream) {
av_free(pFream);
}
// 关闭解码器
if (pCodecCtx != nil){
avcodec_close(pCodecCtx);
}
// 关闭文件
if (pFormatCtx) avformat_close_input(&pFormatCtx);
avformat_network_deinit();
}
- (void)dealloc
{
[[NSNotificationCenter defaultCenter]removeObserver:self];
[self releaseResources];
}
@end
转为图片显示和openGL渲染显示两种样式的解码部分是一样的,不同点在读取流处理的部分,即While循环里的处理部分
解码代码源码文件下载地址和openGL渲染源码下载地址