iOS(swift) 自定义慢动作(slow-mo)相机

使用框架:AVFoundation

实现步骤

1、设置FPS:采集的视频有要求的话,这一步是必须的;FPS默认30。这里采用最大FPS:240。

captureSession.beginConfiguration()     

captureSession.sessionPreset = .hd1280x720 

 var bestFormat: AVCaptureDevice.Format?    

var maxRate: AVFrameRateRange?    

for format in device.formats {      

    for range in format.videoSupportedFrameRateRanges {        

        if maxRate?.maxFrameRate ?? 0 < range.maxFrameRate {        

             maxRate = range          

            bestFormat = format        

        }      

    }    

}         

if let bestFormat = bestFormat, let maxRange = maxRate {      

    do {        

        try device.lockForConfiguration()        

        device.activeFormat = bestFormat                 

        let duration = maxRange.minFrameDuration        

        device.activeVideoMaxFrameDuration = duration        

        device.activeVideoMinFrameDuration = duration        

        device.unlockForConfiguration()      

    } catch {        

        print(error.localizedDescription)      

    }    

}         

captureSession.commitConfiguration()

注意:

    修改activeFormat需要使用方法device.lockForConfiguration(),否则会crash;

    不同FPS,支持的分辨率也不同,240的FPS只有分辨率1280x720支持:sessionPreset = .hd1280x720 ;

    修改device参数,需要更新session,否则修改无效:captureSession.commitConfiguration()。

2、视频录制:使用系统类AVCaptureMovieFileOutput,当然,也可以自己写writer。

// 添加output,如果自己写writer,或者实时处理buffer,需要用AVCaptureVideoDataOutput

let movieFileOutput = AVCaptureMovieFileOutput()

if captureSession.canAddOutput(movieFileOutput) {    

    captureSession.addOutput(movieFileOutput)      

    if let connection = movieFileOutput.connection(with: .video) {               

        captureSession.beginConfiguration()      

        if connection.isVideoStabilizationSupported {          

            connection.preferredVideoStabilizationMode = .auto        

        }        

        if connection.isVideoOrientationSupported {          

            connection.videoOrientation = .landscapeRight        

        }        

        captureSession.commitConfiguration()    

    }

}

//    代理

func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {}    

func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {}

3、视频压缩:将FPS压缩8倍(240->30)。

guard let track = videoAsset.tracks(withMediaType: .video).first else {      

    errorCallback?("读取视频信息失败")      

    return    

}         

let mixComposition = AVMutableComposition()    

let duration = videoAsset.duration         

guard      

    let composTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid),      

    let _ = try? composTrack.insertTimeRange(CMTimeRangeMake(start: .zero, duration: duration), of: track, at: .zero)    

    else {      

        errorCallback?("读取视频信息失败")      

        return    

    }         

let factor = 8         

composTrack.scaleTimeRange(CMTimeRangeMake(start: .zero, duration: duration),  toDuration: CMTimeMake(value: duration.value * Int64(factor), timescale: duration.timescale))         

guard let export = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPreset1280x720) else {      

    errorCallback?("读取视频信息失败")      

    return    

}         

let outURL = URL(fileURLWithPath: NSTemporaryDirectory() + "temp_compose.mp4")    

try? FileManager.default.removeItem(at: outURL)         

export.outputURL = outURL    

export.outputFileType = AVFileType.mp4    

export.shouldOptimizeForNetworkUse = true         

weak var weakSelf = self    

export.exportAsynchronously {      

    DispatchQueue.main.async {        

           if export.status == .completed {

                //outURL

            }

     }    

}

注意:export回调中,进度是没有的,需要自行获取

4、获取压缩进度:

let timer = Timer(timeInterval: 0.1, repeats: true) { (_) in      

    weakSelf?.trackCallback?(export.progress)    

}    

RunLoop.main.add(timer, forMode: .common)

总结:

实现很简单,都是很成熟的API,github上也有几个现成的开源项目,想学习的可以手动实现一下。

参考地址:

1: How to do Slow Motion video in IOS;

2: Handling Frame Drops with AVCaptureVideoDataOutput。

你可能感兴趣的:(iOS(swift) 自定义慢动作(slow-mo)相机)