SceneKIt+ AVFoundation 打造VR播放器(1)

下面是我写的播放器

支持VR,全景,视频缩放,本地,网络视频播放,实时获取视频帧,获取播放时间,获取缓存时间,播放,暂停


2017-06-22 17_47_06.gif

要想完成一个Vr播放器,需要完成两个功能

1、写一个可以实时获取视频帧的播放器
2、写一个可以渲染每一帧图片为全景图片的view

SCN3DVideoAdatper 视频播放器

用于解码视频的每一帧图片
使用的是框架

下面是一些相关的方法

//
//  SCN3DVideoAdatper.h
//  SCN3DPlayer
//
//  Created by 俞涛涛 on 16/11/11.
//  Copyright © 2016年 俞涛涛. All rights reserved.
//

#import 
#import 


/////////////////////////////////////////////////////////////////////////////////////////////////////////

@class SCN3DVideoAdatper;

@protocol SCN3DVideoAdatperDelegate 

@optional

/**
 准备播放视频
 实现SCN3DVideoAdatperDelegate协议最先调用该方法

 @param videoAdatper SCN3DVideoAdatper对象

 */
- (void)videoPlayerIsReadyToPlayVideo:(SCN3DVideoAdatper *)videoAdatper;

/**
 播放视频结束
 实现SCN3DVideoAdatperDelegate协议 在视频播放结束以后调用该方法

 @param videoAdatper SCN3DVideoAdatper
 */
- (void)videoPlayerDidReachEnd:(SCN3DVideoAdatper *)videoAdatper;

/**
 播放时间监听
 实现SCN3DVideoAdatperDelegate协议 在视频播放时会返回当前播放的时间

 @param videoAdatper SCN3DVideoAdatper 对象
 @param cmTime  CMTime
 */
- (void)videoPlayer:(SCN3DVideoAdatper *)videoAdatper timeDidChange:(CMTime)cmTime;

/**
 播放已加载的缓存时间监听
 实现SCN3DVideoAdatperDelegate协议 在视频播放的时候会返回当前已加载的视频缓存百分比
 @param videoAdatper SCN3DVideoAdatper 对象
 @param duration float
 */
- (void)videoPlayer:(SCN3DVideoAdatper *)videoAdatper loadedTimeRangeDidChange:(float)duration;

/**
 播放错误监听
 实现SCN3DVideoAdatperDelegate协议 在播放视频失败的时候会调用该方法

 @param videoAdatper SCN3DVideoAdatper 对象
 @param error  NSError 对象
 */
- (void)videoPlayer:(SCN3DVideoAdatper *)videoAdatper didFailWithError:(NSError *)error;

/**
 获取视频的每一帧
 实现SCN3DVideoAdatperDelegate协议 在视频播放的时候,该方法可以得到视频得每一帧图片

 @param videoAdatper SCN3DVideoAdatper 对象
 @param videoImage UIImage 对象
 */
- (void)videoPlayer:(SCN3DVideoAdatper *)videoAdatper displaylinkCallbackImage:(UIImage *)videoImage;

@end

/////////////////////////////////////////////////////////////////////////////////////////////////////////

@interface SCN3DVideoAdatper : NSObject

@property (nonatomic, weak) id delegate;
@property (nonatomic, strong, readonly) AVPlayer     *player;
@property (nonatomic, strong, readonly) AVPlayerItem *playerItem;
@property (nonatomic, strong, readonly) AVPlayerItemVideoOutput *output;
@property (nonatomic, assign, getter=isPlaying, readonly) BOOL playing;
@property (nonatomic, assign, getter=isLooping) BOOL looping;
@property (nonatomic, assign, getter=isMuted) BOOL muted;

// Setting

- (void)setURL:(NSURL *)URL;
- (void)setPlayerItem:(AVPlayerItem *)playerItem;
- (void)setAsset:(AVAsset *)asset;


// 开始播放
- (void)play;
//暂停播放
- (void)pause;
//重置播放器
- (void)reset;

/**
 跳转到相应的时间段
 在相应的时间段里面播放

 @param time 传入一个float 得参数,
 @param completion completion description
 */
- (void)seekToTime:(float)time completion:(void (^)())completion;

//设置音量大小
- (void)setVolume:(float)volume;
//增加音量
- (void)fadeInVolume;
//降低音量
- (void)fadeOutVolume;

// 添加 Displaylink
- (void)addDisplaylink;
// 移除  Displaylink
- (void)removeDisplaylink;

@end

下面讲一下主要方法实现

添加移除播放器监听

/////////////////////////////////////////////////////////////////////////////////////////////////////////
#pragma mark - Player Observers
/////////////////////////////////////////////////////////////////////////////////////////////////////////

- (void)addPlayerObservers {
    [self.player addObserver:self
                  forKeyPath:NSStringFromSelector(@selector(rate))
                     options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
                     context:VideoPlayer_PlayerRateChangedContext];
}

- (void)removePlayerObservers {
    @try {
        [self.player removeObserver:self
                         forKeyPath:NSStringFromSelector(@selector(rate))
                            context:VideoPlayer_PlayerRateChangedContext];
    }
    @catch (NSException *exception) {
        NSLog(@"Exception removing observer: %@", exception);
    }
}

/////////////////////////////////////////////////////////////////////////////////////////////////////////
#pragma mark - PlayerItem Observers
/////////////////////////////////////////////////////////////////////////////////////////////////////////

- (void)addPlayerItemObservers:(AVPlayerItem *)playerItem {
    [playerItem addObserver:self
                 forKeyPath:NSStringFromSelector(@selector(status))
                    options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew
                    context:VideoPlayer_PlayerItemStatusContext];
    
    [playerItem addObserver:self
                 forKeyPath:NSStringFromSelector(@selector(loadedTimeRanges))
                    options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
                    context:VideoPlayer_PlayerItemLoadedTimeRangesContext];
    
    [[NSNotificationCenter defaultCenter] addObserver:self
                                             selector:@selector(playerItemDidPlayToEndTime:)
                                                 name:AVPlayerItemDidPlayToEndTimeNotification
                                               object:playerItem];
}

- (void)removePlayerItemObservers:(AVPlayerItem *)playerItem {
    [playerItem cancelPendingSeeks];
    @try {
        [playerItem removeObserver:self
                        forKeyPath:NSStringFromSelector(@selector(status))
                           context:VideoPlayer_PlayerItemStatusContext];
    }
    @catch (NSException *exception) {
        NSLog(@"Exception removing observer: %@", exception);
    }
    
    @try {
        [playerItem removeObserver:self
                        forKeyPath:NSStringFromSelector(@selector(loadedTimeRanges))
                           context:VideoPlayer_PlayerItemLoadedTimeRangesContext];
    }
    @catch (NSException *exception) {
        NSLog(@"Exception removing observer: %@", exception);
    }
    [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:playerItem];
}

/////////////////////////////////////////////////////////////////////////////////////////////////////////
#pragma mark - Time Observer
/////////////////////////////////////////////////////////////////////////////////////////////////////////

- (void)addTimeObserver {
    if (self.timeObserverToken || self.player == nil) {
        return;
    }
    
    __weak typeof (self) weakSelf = self;
    self.timeObserverToken = [self.player addPeriodicTimeObserverForInterval:CMTimeMakeWithSeconds(TimeObserverInterval, NSEC_PER_SEC)
                                                                       queue:dispatch_get_main_queue()
                                                                  usingBlock:^(CMTime time)
    {
        __strong typeof (self) strongSelf = weakSelf;
        if (!strongSelf) {
            return;
        }
        if ([strongSelf.delegate respondsToSelector:@selector(videoPlayer:timeDidChange:)]) {
            [strongSelf.delegate videoPlayer:strongSelf timeDidChange:time];
        }
    }];
}

判断视频是否播放结束和获取视频缓存时间

- (BOOL)isAtEndTime { // TODO: this is a fucked up override, seems like something could be wrong [AH]
    if (self.player && self.player.currentItem) {
        if (_isAtEndTime) {
            return _isAtEndTime;
        }
        
        float currentTime = 0.0f;
        if (CMTIME_IS_INVALID(self.player.currentTime) == NO) {
            currentTime = CMTimeGetSeconds(self.player.currentTime);
        }
        
        float videoDuration = 0.0f;
        if (CMTIME_IS_INVALID(self.player.currentItem.duration) == NO) {
            videoDuration = CMTimeGetSeconds(self.player.currentItem.duration);
        }
        
        if (currentTime > 0.0f && videoDuration > 0.0f) {
            if (fabs(currentTime - videoDuration) <= 0.01f) {
                return YES;
            }
        }
    }
    return NO;
}
//视频缓存时间
- (float)calcLoadedDuration {
    float loadedDuration = 0.0f;
    if (self.player && self.player.currentItem) {
        NSArray *loadedTimeRanges = self.player.currentItem.loadedTimeRanges;
        
        if (loadedTimeRanges && [loadedTimeRanges count]) {
            CMTimeRange timeRange = [[loadedTimeRanges firstObject] CMTimeRangeValue];
            float startSeconds = CMTimeGetSeconds(timeRange.start);
            float durationSeconds = CMTimeGetSeconds(timeRange.duration);
            loadedDuration = startSeconds + durationSeconds;
        }
    }
    return loadedDuration;
}

实时获取视频每一帧


- (void)addDisplaylink {
    self.displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)];
    [self.displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
}

- (void)removeDisplaylink {
    if (self.displayLink) {
        [self.displayLink removeFromRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
        self.displayLink = nil;
    }
}
- (void)displayLinkCallback:(CADisplayLink *)sender {
    @autoreleasepool {
        CMTime outputItemTime = [self.output itemTimeForHostTime:CACurrentMediaTime()];
        if([self.output hasNewPixelBufferForItemTime:outputItemTime]) {
            CVPixelBufferRef bufferRef = [self.output copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL];
        
            if (bufferRef != nil) {
                UIImage *videoImage = [self pixelBufferToImage:bufferRef];
                if ([self.delegate respondsToSelector:@selector(videoPlayer:displaylinkCallbackImage:)]) {
                    [self.delegate videoPlayer:self displaylinkCallbackImage:videoImage];
                }
                CFRelease(bufferRef);
            }
        }
    }

}

- (UIImage *)pixelBufferToImage:(CVPixelBufferRef)bufferRef {
    CIImage   *ciImage     = [CIImage imageWithCVPixelBuffer:bufferRef];
    CIContext *tempContext = [CIContext contextWithOptions:nil];
    CGFloat    videoWidth  = CVPixelBufferGetWidth(bufferRef);
    CGFloat    videoHeight = CVPixelBufferGetHeight(bufferRef);
    CGImageRef videoImage  = [tempContext createCGImage:ciImage fromRect:CGRectMake(0, 0, videoWidth, videoHeight)];
    
    UIImage *image = [UIImage imageWithCGImage:videoImage];
    CGImageRelease(videoImage);
    return image;
}

音量相关设置

- (void)setVolume:(float)volume {
    [self cancelFadeVolume];
    self.player.volume = volume;
}

- (void)cancelFadeVolume {
    [NSObject cancelPreviousPerformRequestsWithTarget:self selector:@selector(fadeInVolume) object:nil];
    [NSObject cancelPreviousPerformRequestsWithTarget:self selector:@selector(fadeOutVolume) object:nil];
}

- (void)fadeInVolume {
    if (self.player == nil) {
        return;
    }
    [self cancelFadeVolume];
    
    if (self.player.volume >= 1.0f - 0.01f) {
        self.player.volume = 1.0f;
    }
    else {
        self.player.volume += 1.0f/10.0f;
        [self performSelector:@selector(fadeInVolume) withObject:nil afterDelay:DefaultVolumeFadeDuration/10.0f];
    }
}

- (void)fadeOutVolume {
    if (self.player == nil) {
        return;
    }
    [self cancelFadeVolume];
    
    if (self.player.volume <= 0.01f) {
        self.player.volume = 0.0f;
    }
    else {
        self.player.volume -= 1.0f/10.0f;
        [self performSelector:@selector(fadeOutVolume) withObject:nil afterDelay:DefaultVolumeFadeDuration/10.0f];
    }
}

源代码下载
如果喜欢的话,就点个赞,star一下,本文里面有误的地方,请大家指教

你可能感兴趣的:(SceneKIt+ AVFoundation 打造VR播放器(1))