1.懒加载各对象
lazy var captureSession : AVCaptureSession = {
let captureSessionTmp = AVCaptureSession()
if captureSessionTmp.canSetSessionPreset(AVCaptureSession.Preset.photo) {
captureSessionTmp.sessionPreset = AVCaptureSession.Preset.photo
}
return captureSessionTmp
}()
// MARK: previewLayer
lazy var captureVideoPreviewLayer : AVCaptureVideoPreviewLayer = {
let captureVideoPreviewLayerTmp = AVCaptureVideoPreviewLayer.init(session: captureSession)
captureVideoPreviewLayerTmp.videoGravity = AVLayerVideoGravity.resizeAspectFill
return captureVideoPreviewLayerTmp
}()
// MARK: camera
lazy var captureDeviceCamera : AVCaptureDevice? = {
return captureDeviceInputCamera?.device
}()
// MARK: video input
lazy var captureDeviceInputCamera : AVCaptureDeviceInput? = {
let captureDevice = getCameraDeviceWithPosition(position: AVCaptureDevice.Position.back)
do {
let captureDeviceInputCameraTmp = try AVCaptureDeviceInput.init(device: captureDevice!)
return captureDeviceInputCameraTmp
}catch {
}
return nil
}()
// MARK: audio input
lazy var captureDeviceInputAudio : AVCaptureDeviceInput? = {
let captureDevice = AVCaptureDevice.devices(for: AVMediaType.audio).first
do {
let captureDeviceInputAudioTmp = try AVCaptureDeviceInput.init(device: captureDevice!)
return captureDeviceInputAudioTmp
}catch {
}
return nil
}()
// MARK: movie output
lazy var captureMovieFileOutput : AVCaptureMovieFileOutput = {
let captureMovieFileOutputTmp = AVCaptureMovieFileOutput()
let captureConnection = captureMovieFileOutputTmp.connection(with: AVMediaType.video)
// if (captureConnection?.isVideoStabilizationSupported)! {
captureConnection?.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.auto
// }
captureConnection?.videoOrientation = (captureVideoPreviewLayer.connection?.videoOrientation)!
return captureMovieFileOutputTmp
}()
2.初始化相机
if captureSession.canAddInput(captureDeviceInputCamera!) {
captureSession.addInput(captureDeviceInputCamera!)
}
if captureSession.canAddInput(captureDeviceInputAudio!) {
captureSession.addInput(captureDeviceInputAudio!)
}
if captureSession.canAddOutput(captureMovieFileOutput) {
captureSession.addOutput(captureMovieFileOutput)
}
3.运行session
captureSession.startRunning()
4.开始录制视频
if captureSession.canSetSessionPreset(AVCaptureSession.Preset.high) {
captureSession.sessionPreset = AVCaptureSession.Preset.high
}
let videoUrl = URL.init(fileURLWithPath: MTVideoFileManager.shared.videoFilePath())
captureMovieFileOutput.startRecording(to: videoUrl, recordingDelegate: self)
5.停止录制视频
captureMovieFileOutput.stopRecording()
6.停止session
captureSession.stopRunning()
7.合成视频,并保存
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
let asset = AVAsset.init(url: outputFileURL)
let composition = AVMutableComposition()
let videoTrack = composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: kCMPersistentTrackID_Invalid)
let videoAssetTrack = asset.tracks(withMediaType: AVMediaType.video).first
do {
try videoTrack?.insertTimeRange(CMTimeRange.init(start: kCMTimeZero, duration: (videoAssetTrack?.timeRange.duration)!), of: videoAssetTrack!, at: kCMTimeZero)
let layerInstruction = AVMutableVideoCompositionLayerInstruction.init(assetTrack: videoTrack!)
let totalDuration = CMTimeAdd(kCMTimeZero, (videoAssetTrack?.timeRange.duration)!)
let t1 = CGAffineTransform.init(translationX: -1*(videoAssetTrack?.naturalSize.width)!/2, y: -1*(videoAssetTrack?.naturalSize.height)!/2)
layerInstruction.setTransform(t1, at: kCMTimeZero)
var renderSize = CGSize(width: 0, height: 0)
renderSize.width = max(renderSize.width, (videoAssetTrack?.naturalSize.height)!)
renderSize.height = max(renderSize.height, (videoAssetTrack?.naturalSize.width)!)
let renderW = min(renderSize.width, renderSize.height)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRange.init(start: kCMTimeZero, duration: totalDuration)
instruction.layerInstructions = [layerInstruction]
let mainComposition = AVMutableVideoComposition()
mainComposition.instructions = [instruction]
mainComposition.frameDuration = CMTimeMake(1, 30)
mainComposition.renderSize = CGSize(width: renderW, height: renderW)
let exporter = AVAssetExportSession.init(asset: composition, presetName: AVAssetExportPresetMediumQuality)
exporter?.videoComposition = mainComposition
exporter?.outputURL = outputFileURL
exporter?.shouldOptimizeForNetworkUse = true
exporter?.outputFileType = AVFileType.mov
exporter?.exportAsynchronously {
DispatchQueue.main.async {
let lib = ALAssetsLibrary()
lib.writeVideoAtPath(toSavedPhotosAlbum: outputFileURL, completionBlock: { (url, error) in
if (error != nil) {
print(error)
self.delegate?.recordFinished(error: error as! NSError, url: nil)
}else if (url != nil) {
print(url)
MTVideoFileManager.shared.clearFileWithUrl(url: outputFileURL)
self.delegate?.recordFinished(error: nil, url: outputFileURL)
}
})
}
}
} catch {
print(error)
}
}