AVPlayer实现播放视频和AVFoundation获取视频的buffer

这篇文章目的是用AVPlayer实现本地视频的播放,和使用AVFoundation获取视频的buffer;在这个获取视频buffer的过程中,因为不是摄像头取到的视频,所以走了一些弯路,希望写下来,给需要的人
项目本身的需求是获取到视频的buffer做进一步处理,并不需要播放视频,但是播放只是一句话,我注释掉了。
先看效果


效果图.gif

1.0 加载本地视频,创建player 并播放

NSURL url = [[NSBundle mainBundle]URLForResource:@"Cat" withExtension:@"mp4"];
AVPlayerItem item = [[AVPlayerItem alloc]initWithURL:url];
AVPlayer player = [[AVPlayer alloc]initWithPlayerItem:item];
_player = player;
AVPlayerLayer layer = [AVPlayerLayer playerLayerWithPlayer:player];
layer.frame = self.view.bounds;
// 添加layer
[self.view.layer addSublayer:layer];
[player play];

1.1 创建videooutput

AVPlayerItemVideoOutput *videoOutput = [[AVPlayerItemVideoOutput alloc]init];
    [item addOutput:videoOutput];
    self.videoOutput = videoOutput;

1.2 用定时器定时取出某一时刻的CMSampleBufferRef
display link 对象负责在每帧需要绘制的时候给我们发送消息,这个消息是按照显示器的刷新频率同步进行发送的

  CADisplayLink *link = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkDidrefresh:)];
    [link addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
-(void)displayLinkDidrefresh:(CADisplayLink*)link{
    CMTime itemTime = _player.currentItem.currentTime;    
    CVPixelBufferRef pixelBuffer = [_videoOutput copyPixelBufferForItemTime:itemTime itemTimeForDisplay:nil];
// 拿到buffer后 我们可以调用gpuImage或者OpenGL做进一步的操作;
//  可以将buffer转换为UIimage
//    CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
//    CIContext *temporaryContext = [CIContext contextWithOptions:nil];
//    CGImageRef videoImage = [temporaryContext
//                             createCGImage:ciImage
//                             fromRect:CGRectMake(0, 0,
//                                                 CVPixelBufferGetWidth(pixelBuffer),
//                                                 CVPixelBufferGetHeight(pixelBuffer))];
//    
//    //当前帧的画面
//    UIImage *currentImage = [UIImage imageWithCGImage:videoImage];
//    CGImageRelease(videoImage);
//    用完记得及时释放掉否则内存会暴涨
    CVPixelBufferRelease(pixelBuffer);


}

全部代码如下:

//
//  Created by Leon.li on 2017/4/12.
//  Copyright © 2017年 Leon.li. All rights reserved.
//

#import "XBHomeViewController.h"
#import 
@interface XBHomeViewController ()
@property (nonatomic,strong)AVPlayer *player;
@property (nonatomic,strong)AVPlayerItemVideoOutput *videoOutput;
//@property (nonatomic,assign)CVPixelBufferRef pixelBuffer;
@end

@implementation XBHomeViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    
    self.view.backgroundColor = [UIColor brownColor];
    
//    [self loadVideoFile];
    
    [self loadAVPlayer];
    
    // Do any additional setup after loading the view.
}

-(void)loadAVPlayer{
    NSURL *url = [[NSBundle mainBundle]URLForResource:@"Cat" withExtension:@"mp4"];
    AVPlayerItem *item = [[AVPlayerItem alloc]initWithURL:url];
    AVPlayer *player = [[AVPlayer alloc]initWithPlayerItem:item];
    _player = player;
    AVPlayerLayer *layer = [AVPlayerLayer playerLayerWithPlayer:player];
    layer.frame = self.view.bounds;
    // 添加layer
    [self.view.layer addSublayer:layer];
    [player play];
    AVPlayerItemVideoOutput *videoOutput = [[AVPlayerItemVideoOutput alloc]init];
    [item addOutput:videoOutput];
    self.videoOutput = videoOutput;
//
    CADisplayLink *link = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkDidrefresh:)];
    [link addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
// 这里为了循环播放
    [[NSNotificationCenter defaultCenter]addObserver:self selector:@selector(rePlay) name:AVPlayerItemDidPlayToEndTimeNotification object:nil];
}
-(void)displayLinkDidrefresh:(CADisplayLink*)link{
    CMTime itemTime = _player.currentItem.currentTime;    
    CVPixelBufferRef pixelBuffer = [_videoOutput copyPixelBufferForItemTime:itemTime itemTimeForDisplay:nil];
    
//  可以将buffer转换为UIimage
//    CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
//    CIContext *temporaryContext = [CIContext contextWithOptions:nil];
//    CGImageRef videoImage = [temporaryContext
//                             createCGImage:ciImage
//                             fromRect:CGRectMake(0, 0,
//                                                 CVPixelBufferGetWidth(pixelBuffer),
//                                                 CVPixelBufferGetHeight(pixelBuffer))];
//    
//    //当前帧的画面
//    UIImage *currentImage = [UIImage imageWithCGImage:videoImage];
//    CGImageRelease(videoImage);
//    用完记得及时释放掉否则内存会暴涨
    CVPixelBufferRelease(pixelBuffer);
    
    
}
-(void)rePlay{
    typeof(self) weakSelf = self;
    [self.player seekToTime:kCMTimeZero completionHandler:^(BOOL finished) {
        [weakSelf.player play];
    }];
    
}

-(void)dealloc{
    [[NSNotificationCenter defaultCenter] removeObserver:self];
}
- (void)didReceiveMemoryWarning {
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}

/*
#pragma mark - Navigation

// In a storyboard-based application, you will often want to do a little preparation before navigation
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
    // Get the new view controller using [segue destinationViewController].
    // Pass the selected object to the new view controller.
}
*/

@end

你可能感兴趣的:(AVPlayer实现播放视频和AVFoundation获取视频的buffer)