项目demo
tips:建议参照我的demo进行了解,如果有帮助到你的话请帮我点亮 star, 谢谢啦~
类似于微信的 点击拍照 , 长按拍视频的功能.
导入框架
#import
在需要进行录屏的时候,创建 视频流 和 音频流 的 输入输出流, 代码如下:
NSArray *devicesVideo = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
NSArray *devicesAudio = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
self.videoInput = [AVCaptureDeviceInput deviceInputWithDevice:devicesVideo[0] error:nil];
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:devicesAudio[0] error:nil];
self.videoDevice = devicesVideo[0];
self.videoDataOut = [[AVCaptureVideoDataOutput alloc] init];
self.videoDataOut.videoSettings = @{(__bridge NSString *)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)};
self.videoDataOut.alwaysDiscardsLateVideoFrames = YES;
[self.videoDataOut setSampleBufferDelegate:self queue:self.recodingQueue];
self.audioDataOut = [[AVCaptureAudioDataOutput alloc] init];
[self.audioDataOut setSampleBufferDelegate:self queue:self.recodingQueue];
在进行录像的时候,主要使用 AVCaptureVideoPreviewLayer 所创建的实例进行展示,它的创建需要上述所创建的数据流
self.videoSession = [[AVCaptureSession alloc] init];
if ([self.videoSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
self.videoSession.sessionPreset = AVCaptureSessionPreset1280x720;
}
if ([self.videoSession canAddInput:self.videoInput]) {
[self.videoSession addInput:self.videoInput];
}
if ([self.videoSession canAddInput:audioInput]) {
[self.videoSession addInput:audioInput];
}
if ([self.videoSession canAddOutput:self.videoDataOut]) {
[self.videoSession addOutput:self.videoDataOut];
}
if ([self.videoSession canAddOutput:self.audioDataOut]) {
[self.videoSession addOutput:self.audioDataOut];
}
self.videoPreLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.videoSession];
self.videoPreLayer.frame = self.videoView.bounds;
self.videoPreLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.videoView.layer addSublayer:self.videoPreLayer];
[self.videoSession startRunning];
在 startRunning 之后,这个时候在你的屏幕上已经可以看到摄像头的实时画面了,同时不断触发代理方法,接下来要开始录制,先要创建录制对象
//这里的 assetUrl 是存储到本地的地址,在录制的过程中,可以将实时数据存储到本地
_assetWriter = [AVAssetWriter assetWriterWithURL:assetUrl fileType:AVFileTypeMPEG4 error:nil];
//默认横屏录入的,所以这里要旋转 90
NSDictionary *outputSettings = @{
AVVideoCodecKey : AVVideoCodecH264,
AVVideoWidthKey : @(self.recordH),
AVVideoHeightKey : @(self.recordW),
AVVideoScalingModeKey:AVVideoScalingModeResizeAspectFill,
// AVVideoCompressionPropertiesKey:codecSettings
};
_assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
_assetWriterVideoInput.expectsMediaDataInRealTime = YES;
CGFloat rate = M_PI / 2.0;
_assetWriterVideoInput.transform = CGAffineTransformMakeRotation(rate);
NSDictionary *audioOutputSettings = @{
AVFormatIDKey:@(kAudioFormatMPEG4AAC),
AVEncoderBitRateKey:@(64000),
AVSampleRateKey:@(44100),
AVNumberOfChannelsKey:@(1),
};
_assetWriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
_assetWriterAudioInput.expectsMediaDataInRealTime = YES;
NSDictionary *SPBADictionary = @{
(__bridge NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA),
(__bridge NSString *)kCVPixelBufferWidthKey : @(self.recordW),
(__bridge NSString *)kCVPixelBufferHeightKey : @(self.recordH),
(__bridge NSString *)kCVPixelFormatOpenGLESCompatibility : ((__bridge NSNumber *)kCFBooleanTrue)
};
_assetWriterPixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_assetWriterVideoInput sourcePixelBufferAttributes:SPBADictionary];
if ([_assetWriter canAddInput:_assetWriterVideoInput]) {
[_assetWriter addInput:_assetWriterVideoInput];
}else {
NSLog(@"不能添加视频writer的input \(assetWriterVideoInput)");
}
if ([_assetWriter canAddInput:_assetWriterAudioInput]) {
[_assetWriter addInput:_assetWriterAudioInput];
}else {
NSLog(@"不能添加视频writer的input \(assetWriterVideoInput)");
}
在摄像头使用的过程中会不断触发代理方法,我们在代理方法里面就可以开启或者关闭视频的录制了
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
//这里就是取得摄像头数据的某一帧,当 长按 时间非常短的时候,我们就可以取这个图片为实时拍摄的照片了
UIImage * image = [self imageFromSampleBuffer:sampleBuffer];
if (image && !self.isGetShootImg) {
self.shootImage = image;
}
if (!_recoding) return;
@autoreleasepool {
_currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer);
if (_assetWriter.status != AVAssetWriterStatusWriting) {
[_assetWriter startWriting];
[_assetWriter startSessionAtSourceTime:_currentSampleTime];
}
if (captureOutput == _videoDataOut) {
if (_assetWriterPixelBufferInput.assetWriterInput.isReadyForMoreMediaData) {
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
BOOL success = [_assetWriterPixelBufferInput appendPixelBuffer:pixelBuffer withPresentationTime:_currentSampleTime];
if (!success) {
NSLog(@"Pixel Buffer没有append成功");
}
}
}
if (captureOutput == _audioDataOut) {
[_assetWriterAudioInput appendSampleBuffer:sampleBuffer];
}
}
}
注: 关于 点击拍照 功能的实现, 我之前尝试过在 点击结束 的时候直接截图,但是, self.videoPreLayer 的位置截取到的只是黑色背景,所以转而采用了这种方法. 如果有更好的建议请留言交流.