镜头录制最简单,最基本的流程,还可以输出视频和音频buffer
// ViewController.m
// CoreMLVision
//
// Created by admin on 27/12/17.
// Copyright © 2017年 Yanyx. All rights reserved.
//
#import "ViewController.h"
@interface ViewController ()
{
// 会话
AVCaptureSession *captureSession_;
AVCaptureDevice *captureDevice;
AVCaptureDeviceInput *videoCaptureDeviceInput;
AVCaptureDeviceInput *audioCaptureDeviceInput;
AVCaptureVideoDataOutput *captureVideoDataOutput;
AVCaptureAudioDataOutput *captureAudioDataOutput;
dispatch_queue_t myQueue;
AVCaptureConnection *videoConnection;
AVCaptureConnection *audioConnection;
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer;
}
@end
@implementation ViewController
#pragma mark - AVCaptureDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
}
- (void)createCamera{
captureSession_ = [[AVCaptureSession alloc] init];
if ([captureSession_ canSetSessionPreset:AVCaptureSessionPresetPhoto]) {
[captureSession_ setSessionPreset:AVCaptureSessionPresetPhoto];
}
captureDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionBack];
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
audioCaptureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioCaptureDevice error:nil];
//视频输入
videoCaptureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:nil];
if ([captureSession_ canAddInput:videoCaptureDeviceInput]) {
[captureSession_ addInput:videoCaptureDeviceInput];
}
if ([captureSession_ canAddInput:audioCaptureDeviceInput]) {
[captureSession_ addInput:audioCaptureDeviceInput];
}
captureVideoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
captureVideoDataOutput.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
if ([captureSession_ canAddOutput:captureVideoDataOutput]) {
[captureSession_ addOutput:captureVideoDataOutput];
}
myQueue = dispatch_queue_create("myQueue", NULL);
[captureVideoDataOutput setSampleBufferDelegate:self queue:myQueue];
captureVideoDataOutput.alwaysDiscardsLateVideoFrames = YES;
//音频输出
captureAudioDataOutput = [[AVCaptureAudioDataOutput alloc] init];
[captureAudioDataOutput setSampleBufferDelegate:self queue:myQueue];
if ([captureSession_ canAddOutput:captureAudioDataOutput]) {
[captureSession_ addOutput:captureAudioDataOutput];
}
// 视频连接
videoConnection = [captureVideoDataOutput connectionWithMediaType:AVMediaTypeVideo];
videoConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
/// 音频连接
audioConnection = [captureAudioDataOutput connectionWithMediaType:AVMediaTypeAudio];
//初始化预览图层
captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession_];
[captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
captureVideoPreviewLayer.frame = self.view.frame;
[self.view.layer addSublayer:captureVideoPreviewLayer];
}
- (void)changeCmaera{
AVCaptureDevicePosition position = [[videoCaptureDeviceInput device] position];
if (position == AVCaptureDevicePositionBack) {
captureDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionFront];
} else {
captureDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionBack];
}
AVCaptureDeviceInput *newInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:nil];
if (newInput) {
[captureSession_ beginConfiguration];
[captureSession_ removeInput:videoCaptureDeviceInput];
if ([captureSession_ canAddInput:newInput]) {
[captureSession_ addInput:newInput];
videoCaptureDeviceInput = newInput;
} else {
[captureSession_ addInput:videoCaptureDeviceInput];
}
[captureSession_ commitConfiguration];
}
}
#pragma mark - samplebuffer 转 cgimage
// 通过抽样缓存数据创建一个UIImage对象
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
// 为媒体数据设置一个CMSampleBuffer的Core Video图像缓存对象
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// 锁定pixel buffer的基地址
CVPixelBufferLockBaseAddress(imageBuffer, 0);
// 得到pixel buffer的基地址
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
// 得到pixel buffer的行字节数
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// 得到pixel buffer的宽和高
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
if (width == 0 || height == 0) {
return nil;
}
// 创建一个依赖于设备的RGB颜色空间
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// 用抽样缓存的数据创建一个位图格式的图形上下文(graphics context)对象
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGAffineTransform transform = CGAffineTransformIdentity;
CGContextConcatCTM(context, transform);
// 根据这个位图context中的像素数据创建一个Quartz image对象
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// 裁剪 图片
struct CGImage *cgImage = CGImageCreateWithImageInRect(quartzImage, CGRectMake(0, 0, height, height));
// 解锁pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// 释放context和颜色空间
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
// 用Quartz image创建一个UIImage对象image
UIImage *image = [UIImage imageWithCGImage:cgImage];
// 释放Quartz image对象
CGImageRelease(cgImage);
CGImageRelease(quartzImage);
return (image);
}
- (void)dealloc{
[captureSession_ stopRunning];
}
- (void)viewWillDisappear:(BOOL)animated{
[super viewWillDisappear:animated];
[captureSession_ startRunning];
}
@end