#import "XMMainViewController.h"
#import
#import
#import
#import
@interface XMMainViewController ()
@property (nonatomic, strong)AVCaptureSession *session;
@property (nonatomic, strong)AVCaptureVideoDataOutput *videoOutput;
@property (nonatomic, strong)AVCaptureConnection *videoConnection;
@property (nonatomic, retain) AVCaptureSession *captureSession;
@property (nonatomic, retain) UIImageView *imageView;
@property (nonatomic, retain) CALayer *customLayer;
@property (nonatomic, retain) AVCaptureVideoPreviewLayer *prevLayer;
@end
@implementation XMMainViewController
#pragma mark-
#pragma markinit
- (instancetype)init
{
self= [superinit];
if(self) {
self.imageView=nil;
self.prevLayer=nil;
self.customLayer=nil;
}
return self;
}
- (void)viewDidLoad {
[super viewDidLoad];
[self initCapture];
}
- (void)initCapture {
//配置采集输入源(摄像头)
AVCaptureDevice*videoDevice=[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
videoDevice=[self getCameraDeviceWithPosition:AVCaptureDevicePositionFront];
//用设备初始化一个采集的输入对象
AVCaptureDeviceInput*captureInput = [AVCaptureDeviceInputdeviceInputWithDevice:videoDevice error:nil];
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc]init];
captureOutput.alwaysDiscardsLateVideoFrames = YES;
// captureOutput.minFrameDuration = CMTimeMake(1, 10);
//配置采集输出,即我们取得视频图像的接口
dispatch_queue_t queue;
queue =dispatch_queue_create("cameraQueue", NULL);
[captureOutputsetSampleBufferDelegate:selfqueue:queue];
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
//配置输出视频图像格式
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionarydictionaryWithObject:valueforKey:key];
[captureOutputsetVideoSettings:videoSettings];
_captureSession = [[AVCaptureSession alloc] init];
[_captureSessionaddInput:captureInput];
[_captureSessionaddOutput:captureOutput];
[_captureSession startRunning];
//保存Connection,用于在SampleBufferDelegate中判断数据来源(是Video/Audio?)
[captureOutputconnectionWithMediaType:AVMediaTypeVideo];
// _customLayer = [CALayer layer];
// _customLayer.frame = self.view.bounds;
// _customLayer.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
// _customLayer.contentsGravity = kCAGravityResizeAspectFill;
// [self.view.layer addSublayer:_customLayer];
// _imageView = [[UIImageView alloc] init];
// _imageView.frame = CGRectMake(0, 0, 100, 100);
// [self.view addSubview:_imageView];
_prevLayer = [AVCaptureVideoPreviewLayer layerWithSession: _captureSession];
_prevLayer.frame = self.view.bounds;
_prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layeraddSublayer:self.prevLayer];
}
/**
* 取得指定位置的摄像头
*
* @param position 摄像头位置
*
* @return 摄像头设备
*/
-(AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition )position{
NSArray *cameras= [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for(AVCaptureDevice*cameraincameras) {
if([cameraposition]==position) {
returncamera;
}
}
return nil;
}
#pragma mark-
#pragma mark AVCaptureSession delegate
- (void)captureOutput:(AVCaptureOutput*)captureOutputdidOutputSampleBuffer:(CMSampleBufferRef)sampleBufferfromConnection:(AVCaptureConnection*)connection {
// 这里的sampleBuffer就是采集到的数据了,但它是Video还是Audio的数据,得根据connection来判断
if(connection ==_videoConnection) { // Video
/*
// 取得当前视频尺寸信息
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
int width = CVPixelBufferGetWidth(pixelBuffer);
int height = CVPixelBufferGetHeight(pixelBuffer);
NSLog(@"video width: %d height: %d", width, height);
*/
NSLog(@"在这里获得video sampleBuffer,做进一步处理(编码H.264)");
}
// if (connection == _audioConnection) { // Audio
// NSLog(@"这里获得audio sampleBuffer,做进一步处理(编码AAC)");
// }
// NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
// 这里的sampleBuffer就是采集到的数据了,但它是Video还是Audio的数据,得根据connection来判断
CVImageBufferRefimageBuffer =CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t*baseAddress =CVPixelBufferGetBaseAddress(imageBuffer);
size_tbytesPerRow =CVPixelBufferGetBytesPerRow(imageBuffer);
size_twidth =CVPixelBufferGetWidth(imageBuffer);
size_theight =CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRefnewContext =CGBitmapContextCreate(baseAddress, width, height,8, bytesPerRow, colorSpace,kCGBitmapByteOrder32Little|kCGImageAlphaPremultipliedFirst);
CGImageRefnewImage =CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
[_customLayer performSelectorOnMainThread:@selector(setContents:) withObject: (__bridge id) newImage waitUntilDone:YES];
UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];
CGImageRelease(newImage);
[_imageView performSelectorOnMainThread:@selector(setImage:) withObject:image waitUntilDone:YES];
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// [pool drain];
}
#pragma mark-
#pragma mark Memory management
//- (void)viewDidUnload {
//
// _imageView = nil;
//
// _customLayer = nil;
//
// _prevLayer = nil;
//
//}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
@end