利用AVAssetReader自定义播放器

引入的框架

import UIKit
import CoreMedia
import AVFoundation
import MetalKit

播放器状态枚举

enum GSPlayerStatus: Int{
    case notInited = 0
    case playing
    case pause
    case end
    case fail
}

协议

protocol GSPlayerViewDelegate: NSObjectProtocol{
    func playerView(playerView: GSPlayerView, didChange currentTime: CMTime)
    func playerViewDidMoveToEnd(needReplay: Bool)
}

播放的资源实体类

import UIKit
import AVFoundation

open class GSMediaResource: NSObject{
    
    open var asset: AVAsset
    /// 需要播放的范围
    open var timeRange: CMTimeRange
    open var videoCompostion: AVVideoComposition?
    open var audioMix: AVAudioMix?
    
    public init(asset: AVAsset){
        self.asset = asset
        self.timeRange = CMTimeRange(start: CMTime.zero, end: asset.duration)
        super.init()
    }
    
    /// 获取当前资源的复制对象
    open func getCopyObject() -> GSMediaResource{
        let copyAsset = asset.copy() as! AVAsset
        let resource = GSMediaResource(asset: copyAsset)
        resource.timeRange = timeRange
        resource.videoCompostion = videoCompostion?.copy() as? AVMutableVideoComposition
        resource.audioMix = audioMix?.copy() as? AVMutableAudioMix
        return resource
    }
    
    /// 获取资源的渲染尺寸
    func getResourceSize() -> CGSize{
        if videoCompostion != nil{
            return videoCompostion!.renderSize
        }else{
            if let track = asset.tracks(withMediaType: .video).first{
                return track.naturalSize
            }
        }
        
        return CGSize.zero
    }
}

播放器类

class GSPlayerView: MTKView {
    /// 如果设置为小于1的数字,则无限次循环
    var repeatCount: Int = 1
    private(set) var hasPlayedCount: Int = 0
    private(set) var duration: CMTime = CMTime.zero
    private(set) var currentPlayTime: CMTime = CMTime.zero{
        didSet{
            // 当前播放时间发生变化时,通知代理
            playerDelegate?.playerView(playerView: self, didChange: currentPlayTime)
        }
    }
    private(set) var status: GSPlayerStatus = .notInited
    // 当前显示的原始的未加滤镜的CIImage
    private(set) var image: CIImage?{
        didSet{
            self.setNeedsDisplay()
        }
    }
    private var ciContext: CIContext?
    private var filters: [GSFilter] = []
    
    weak var playerDelegate: GSPlayerViewDelegate?

    var resource: GSMediaResource?{
        didSet{
            // 每次更换资源都必须重新初始化AVAssetReader和AVAssetReaderTrackOutput
            self.drawableSize = resource?.getResourceSize() ?? self.frame.size
            reConfigReader(timeRange: resource?.timeRange ?? CMTimeRange.zero)
            startDecode()
        }
    }
    /// 重绘图片间隔时间
    private var frameDuration: Double = 0
    private(set) var reader: AVAssetReader?
    private(set) var output: AVAssetReaderTrackOutput?
    private var timer: DispatchSourceTimer?
    
    init(resource: GSMediaResource?, frame frameRect: CGRect, device: MTLDevice?) {
        self.resource = resource
        super.init(frame: frameRect, device: device)
        self.drawableSize = resource?.getResourceSize() ?? self.frame.size
        self.ciContext = CIContext(mtlDevice: self.device!, options: [CIContextOption.workingColorSpace: NSNull()])
        self.isPaused = true
        self.enableSetNeedsDisplay = true
        self.framebufferOnly = false  //必须设置为false,否则无法显示CIImage
        self.contentMode = .scaleAspectFit
    }
    
    required init(coder: NSCoder) {
        fatalError("init(coder:) has not been implemented")
    }
    
    /// 将CIImage渲染到MTKView上,可以在此处将原始的CIImage经过CIFilter滤镜处理后再显示
    override func draw(_ rect: CGRect) {
        super.draw(rect)
        guard let image = updateImage(sourceImage: image) else { return }
        self.drawableSize = image.extent.size
        guard let commandBuffer = self.device?.makeCommandQueue()?.makeCommandBuffer() else { return }
        guard let drawingTexture = self.currentDrawable?.texture else { return }
        guard let currentDrawable = self.currentDrawable else { return }
        
        ciContext?.render(image, to: drawingTexture, commandBuffer: commandBuffer, bounds: CGRect(x: 0, y: 0, width: image.extent.width, height: image.extent.height), colorSpace: CGColorSpaceCreateDeviceRGB())
        commandBuffer.present(currentDrawable)
        commandBuffer.commit()
    }
    
    // MARK: internal方法
    func prepareToPlay(){
        
        guard let resource = resource else {
            return
        }
        
        let asset = resource.asset
        
        var videoReader: AVAssetReader?
        do{
            videoReader = try AVAssetReader(asset: asset)
        }catch{
            print(error)
        }
        
        guard let videoReader = videoReader else { return }
        guard let videoTrack = asset.tracks(withMediaType: .video).first else { return }

        let outputSetting: [String: Int] = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]
        let output = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: outputSetting)
        if videoReader.canAdd(output){
            videoReader.add(output)
        }else{
            return
        }
        
        output.supportsRandomAccess = true
        videoReader.timeRange = resource.timeRange
        self.reader = videoReader
        self.output = output
        
        self.frameDuration = CMTimeGetSeconds(videoTrack.minFrameDuration)
        self.duration = asset.duration
        // 添加定时器
        self.createTimer()
    }
    
    // GSFilter是我自定义的一个滤镜处理类
    func setFilters(filters: [GSFilter]){
        self.filters = filters
        if self.status != .playing{
            if Thread.current != .main{
                DispatchQueue.main.sync{
                    self.setNeedsDisplay()
                }
            }else{
                self.setNeedsDisplay()
            }
        }
    }
    
    func play(){
        switch status {
        case .notInited:
            self.startDecode()
        case .playing:
            return
        case .pause:
            self.status = .playing
            self.createTimer()
        case .end, .fail:
            self.status = .playing
            self.rePlay()
        }
    }
    
    func rePlay(){
        self.seekTime(time: resource?.timeRange.start ?? CMTime.zero)
        self.play()
    }
    
    func pause(){
        self.status = .pause
        self.timer?.cancel()
    }
    
    func seekTime(time: CMTime){
        guard let resource = resource else { return }

        if status == .end{
            let range = CMTimeRange(start: time, end: resource.timeRange.end)
            self.output?.reset(forReadingTimeRanges: [NSValue(timeRange: range)])
        }else{
            self.timer?.cancel()
            if self.reader?.status == .reading{
                self.reader?.cancelReading()
            }
            self.currentPlayTime = time
            self.reConfigReader(timeRange: CMTimeRange(start: time, end: resource.timeRange.end))
            self.startDecode()
        }
    }
    
    func reConfigReader(timeRange: CMTimeRange){
        guard let resource = resource else { return }
        let asset = resource.asset

        var videoReader: AVAssetReader?
        do{
            videoReader = try AVAssetReader(asset: asset)
        }catch{
            print(error)
        }
        guard let videoReader = videoReader else { return }
        guard let videoTrack = asset.tracks(withMediaType: .video).first else { return }
        let outputSetting: [String: Int] = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]
        let output = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: outputSetting)
        if videoReader.canAdd(output){
            videoReader.add(output)
        }else{
            return
        }
        
        output.supportsRandomAccess = true
        // 设置需要播放的范围
        videoReader.timeRange = timeRange
        self.reader = videoReader
        self.output = output
        
        self.frameDuration = videoTrack.minFrameDuration.seconds
        MYLog(message: self.frameDuration)
    }
    
    func deleteTimer(){
        timer?.cancel()
        timer = nil
    }
    
    // MARK: Private Methods
    private func startDecode(){
        self.reader?.startReading()
        self.status = .playing
        self.createTimer()
    }
    
    private func readNextFrame(){
    
        guard let reader = reader else { return }
        
        if self.status != .playing{
            return
        }
    
        if let sampleBuffer = self.output?.copyNextSampleBuffer(){
            self.decodeSamleBufferToCgimage(sampleBuffer: sampleBuffer)
            if currentPlayTime.seconds >= (resource?.timeRange.end.seconds ?? 0){
                self.resetPlay()
            }
        }else{
            if reader.status == .reading{
                self.status = .end
                self.resetPlay()
                self.playerDelegate?.playerViewDidMoveToEnd(needReplay: self.status == .playing)
            }else{
                self.status = .fail
            }
        }
    }
    
    private func decodeSamleBufferToCgimage(sampleBuffer: CMSampleBuffer?){
        guard let sampleBuffer = sampleBuffer else { return }
        
        currentPlayTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)
        CMTimeShow(currentPlayTime)
        guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
        self.image = CIImage(cvImageBuffer: imageBuffer)
    }
    
    private func displayFirstImage(){
        if let sampleBuffer = self.output?.copyNextSampleBuffer(){
            self.decodeSamleBufferToCgimage(sampleBuffer: sampleBuffer)
        }
    }
    
    private func updateImage(sourceImage: CIImage?) -> CIImage?{
        var outputImage: CIImage? = sourceImage
        for filter in filters{
            filter.currentTime = currentPlayTime
            if filter.type == .videoMerge{
                let videoFilter = filter as! VideoMergeFilter
                filter.inputImage = outputImage?.transformed(by: videoFilter.transform.inverted())
            }else{
                filter.inputImage = outputImage
            }
            outputImage = filter.outputImage
        }
        
        return outputImage
    }
    
    private func createTimer(){
        timer?.cancel()
        timer = DispatchSource.makeTimerSource()
        timer?.schedule(deadline: .now(), repeating: frameDuration)
        timer?.setEventHandler(handler: {
            executeSyncInMain {
                self.readNextFrame()
            }
        })
        timer?.resume()
    }
    
    private func resetPlay(){
        if repeatCount > 0{
            if hasPlayedCount < repeatCount{
                hasPlayedCount += 1
                rePlay()
            }else{
                hasPlayedCount = 0
                pause()
            }
        }else{
            rePlay()
        }
    }
}

你可能感兴趣的:(利用AVAssetReader自定义播放器)