内容转自:http://www.myexception.cn/operating-system/1258905.html
视频捕获和AR可视化对于增强现实应用是不可或缺的。
视频捕获阶段主要包括从设备相机上接收帧,然后进行简单的操作(如色彩转换),把帧传递给处理管道。因为对每个帧的处理对AR应用相当关键,因此确保该过程的处理效率至关重要。最好达到最大性能的方式是直接访问从相机获取帧。
比如说,AVCaptureVideoPreviewLayer和UIGetScreenImage,这两个类只能在IOS 3及以前使用。
苹果之所以抛弃它们,有两个主要原因:
1.没有直接从相机获取帧。因为为了获得一个位图(bitmap),程序不得不创建一个中间变量UIImage,然后把图像赋给它,作为返回值返回。这样对于AR这种需要频繁处理帧的应用相当不理智!
2.为了绘制一个AR画面,我们不得不添加一个透明的图层(transparent overlay view)去呈现AR。但是根据苹果的指导原则(apple guidelines),我们应该避免使用透明图层(non-opaque layers),因为对于移动设置的处理器很难在它上面进行渲染。
当然,我们也有一个有效的高性能的视频捕获方式:AVFoundation
AVCaptureDevice。这里代表抽象的硬件设备。
AVCaptureInput。这里代表输入设备(可以是它的子类),它配置抽象硬件设备的端口。
AVCaptureOutput。它代表输出数据,管理着输出到一个movie或者图像。
AVCaptureSession。它是input和output的桥梁。它协调着intput到output的数据传输。
启动相机关键代码:
#import
#import
#import
#include "BGRAVideoFrame.h"
@protocol VideoSourceDelegate
-(void)frameReady:(struct BGRAVideoFrame) frame;
@end
@interface VideoSource : NSObject
{
}
@property (nonatomic,retain) AVCaptureSession *captureSession;
@property (nonatomic,retain) AVCaptureDeviceInput *deviceInput;
@property (nonatomic,assign) id delegate;
- (bool) startWithDevicePosition:(AVCaptureDevicePosition)
devicePosition;
//- (CameraCalibration) getCalibration;
//- (CGSize) getFrameSize;
@end
#import "VideoSource.h"
@implementation VideoSource
@synthesize captureSession,deviceInput,delegate;
- (void)dealloc
{
[captureSession release];
[deviceInput release];
self.delegate = nil;
[super dealloc];
}
- (id)init
{
if (self = [super init])
{
captureSession = [[AVCaptureSession alloc] init];
if ([captureSession canSetSessionPreset:AVCaptureSessionPreset640x480])
{
[captureSession setSessionPreset:AVCaptureSessionPreset640x480];
NSLog(@"Set capture session preset AVCaptureSessionPreset640x480");
}else if ([captureSession canSetSessionPreset:AVCaptureSessionPresetLow])
{
[captureSession setSessionPreset:AVCaptureSessionPresetLow];
NSLog(@"Set capture session preset AVCaptureSessionPresetLow");
}
}
return self;
}
//外部调用,启动相机
- (bool) startWithDevicePosition:(AVCaptureDevicePosition)
devicePosition
{
AVCaptureDevice *device = [self cameraWithPosition:devicePosition];
if (!device) return FALSE;
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
self.deviceInput = input;
if (!error)//初始化没有发生错误
{
if ([[self captureSession] canAddInput:self.deviceInput])
{
[[self captureSession] addInput:self.deviceInput];
}else
{
NSLog(@"Couldn't add video input");
return FALSE;
}
}else
{
NSLog(@"Couldn't create video input");
return FALSE;
}
//添加输出
[self addRawViewOutput];
//开始视频捕捉
[captureSession startRunning];
return TRUE;
}
//获取相机
- (AVCaptureDevice *) cameraWithPosition:(AVCaptureDevicePosition) position
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices)
{
if ([device position] == position)
{
return device;
}
}
return nil;
}
//添加输出
- (void)addRawViewOutput
{
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
//同一时间只处理一帧,否则no
output.alwaysDiscardsLateVideoFrames = YES;
//创建操作队列
dispatch_queue_t queue;
queue = dispatch_queue_create("com.lanhaijiye", nil);
[output setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
NSString *keyString = (NSString *)kCVPixelBufferPixelFormatTypeKey;
NSNumber *value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary *setting = [NSDictionary dictionaryWithObject:value forKey:keyString];
[output setVideoSettings:setting];
if ([self.captureSession canAddOutput:output])
{
[self.captureSession addOutput:output];
}
}
//- (CameraCalibration) getCalibration
//{
//
//}
//
//- (CGSize) getFrameSize
//{
//
//}
#pragma -mark AVCaptureOutput delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
//给图像加把锁
CVPixelBufferLockBaseAddress(imageBuffer, 0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
size_t stride = CVPixelBufferGetBytesPerRow(imageBuffer);
BGRAVideoFrame frame = {width,height,stride,baseAddress};
if (delegate && [delegate respondsToSelector:@selector(frameReady:)])
{
[delegate frameReady:frame];
}
//解锁
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
}
@end
启动相机:
VideoSource *source = [[VideoSource alloc] init];
if([source startWithDevicePosition:AVCaptureDevicePositionFront])
{
NSLog(@"启动相机成功");
[source setDelegate:self];
}
- (void)frameReady:(struct BGRAVideoFrame)frame
{
NSLog(@"file:%s method:%@",__FILE__,NSStringFromSelector(_cmd));
}
struct BGRAVideoFrame
{
size_t width;
size_t height;
size_t stride;
unsigned char *data;
};
备注:#import