IOS 直接获取摄像头数据

 
 
  1 #import <UIKit/UIKit.h>

  2 #import <AVFoundation/AVFoundation.h>

  3 #import <CoreGraphics/CoreGraphics.h>

  4 #import <CoreVideo/CoreVideo.h>

  5 #import <CoreMedia/CoreMedia.h>

  6  

  7 @interface MyAVController : UIViewController <

  8 AVCaptureVideoDataOutputSampleBufferDelegate> {

  9     AVCaptureSession *_captureSession;

 10     UIImageView *_imageView;

 11     CALayer *_customLayer;

 12     AVCaptureVideoPreviewLayer *_prevLayer;

 13 }

 14  

 15 @property (nonatomic, retain) AVCaptureSession *captureSession;

 16 @property (nonatomic, retain) UIImageView *imageView;

 17 @property (nonatomic, retain) CALayer *customLayer;

 18 @property (nonatomic, retain) AVCaptureVideoPreviewLayer *prevLayer;

 19 - (void)initCapture;

 20  

 21 @end

 22  

 23 MyAVController.m:

 24  

 25 #import "MyAVController.h"

 26  

 27 @implementation MyAVController

 28  

 29 @synthesize captureSession = _captureSession;

 30 @synthesize imageView = _imageView;

 31 @synthesize customLayer = _customLayer;

 32 @synthesize prevLayer = _prevLayer;

 33  

 34 #pragma mark -

 35 #pragma mark Initialization

 36 - (id)init {

 37     self = [super init];

 38     if (self) {

 39         self.imageView = nil;

 40         self.prevLayer = nil;

 41         self.customLayer = nil;

 42     }

 43     return self;

 44 }

 45  

 46 - (void)viewDidLoad {

 47     [self initCapture];

 48 }

 49  

 50 - (void)initCapture {

 51     AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput

 52                      deviceInputWithDevice:[AVCaptureDevice

 53 defaultDeviceWithMediaType:AVMediaTypeVideo]  error:nil];

 54     AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc]

 55 init];

 56     captureOutput.alwaysDiscardsLateVideoFrames = YES;

 57     //captureOutput.minFrameDuration = CMTimeMake(1, 10);

 58  

 59     dispatch_queue_t queue;

 60     queue = dispatch_queue_create("cameraQueue", NULL);

 61     [captureOutput setSampleBufferDelegate:self queue:queue];

 62     dispatch_release(queue);

 63     NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;

 64     NSNumber* value = [NSNumber

 65 numberWithUnsignedInt:kCVPixelFormatType_32BGRA];

 66     NSDictionary* videoSettings = [NSDictionary

 67 dictionaryWithObject:value forKey:key];

 68     [captureOutput setVideoSettings:videoSettings];

 69     self.captureSession = [[AVCaptureSession alloc] init];

 70     [self.captureSession addInput:captureInput];

 71     [self.captureSession addOutput:captureOutput];

 72     [self.captureSession startRunning];

 73     self.customLayer = [CALayer layer];

 74     self.customLayer.frame = self.view.bounds;

 75     self.customLayer.transform = CATransform3DRotate(

 76 CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);

 77     self.customLayer.contentsGravity = kCAGravityResizeAspectFill;

 78     [self.view.layer addSublayer:self.customLayer];

 79     self.imageView = [[UIImageView alloc] init];

 80     self.imageView.frame = CGRectMake(0, 0, 100, 100);

 81      [self.view addSubview:self.imageView];

 82     self.prevLayer = [AVCaptureVideoPreviewLayer

 83 layerWithSession: self.captureSession];

 84     self.prevLayer.frame = CGRectMake(100, 0, 100, 100);

 85     self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;

 86     [self.view.layer addSublayer: self.prevLayer];

 87 }

 88  

 89 #pragma mark -

 90 #pragma mark AVCaptureSession delegate

 91 - (void)captureOutput:(AVCaptureOutput *)captureOutput

 92 didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer

 93        fromConnection:(AVCaptureConnection *)connection

 94 {

 95  

 96     NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];

 97  

 98     CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);

 99     CVPixelBufferLockBaseAddress(imageBuffer,0);

100     uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);

101     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);

102     size_t width = CVPixelBufferGetWidth(imageBuffer);

103     size_t height = CVPixelBufferGetHeight(imageBuffer);

104  

105     CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

106     CGContextRef newContext = CGBitmapContextCreate(baseAddress,

107  width, height, 8, bytesPerRow, colorSpace,

108 kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);

109     CGImageRef newImage = CGBitmapContextCreateImage(newContext);

110  

111     CGContextRelease(newContext);

112     CGColorSpaceRelease(colorSpace);

113  

114     [self.customLayer performSelectorOnMainThread:@selector(setContents:)

115 withObject: (id) newImage waitUntilDone:YES];

116  

117     UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0

118 orientation:UIImageOrientationRight];

119  

120     CGImageRelease(newImage);

121  

122     [self.imageView performSelectorOnMainThread:@selector(setImage:)

123 withObject:image waitUntilDone:YES];

124  

125     CVPixelBufferUnlockBaseAddress(imageBuffer,0);

126  

127     [pool drain];

128 }

129  

130 #pragma mark -

131 #pragma mark Memory management

132  

133 - (void)viewDidUnload {

134     self.imageView = nil;

135     self.customLayer = nil;

136     self.prevLayer = nil;

137 }

138  

139 - (void)dealloc {

140     [self.captureSession release];

141     [super dealloc];

142 }

143  

144 @end

 

Framework: 1.CoreMedia 2.CoreVideo 3.QuartzCore 4.AVFoundation

文章来自:Benjamin Loulier

 

你可能感兴趣的:(ios)