AVCaptureDevice
枚举说明部分
AVCaptureDeviceType | 枚举说明 |
---|---|
AVCaptureDeviceTypeBuiltInMicrophone | 一个内置的麦克风 |
AVCaptureDeviceTypeBuiltInWideAngleCamera | 内置广角相机,这些装置适用于一般用途 |
AVCaptureDeviceTypeBuiltInTelephotoCamera | 内置长焦相机,比广角相机的焦距长。这种类型只是将窄角设备与配备两种类型的摄像机的硬件上的宽角设备区分开来 |
AVCaptureDeviceTypeBuiltInDualCamera | 广角相机和长焦相机的组合,创建了一个拍照 (常用类型) |
AVCaptureDeviceTypeBuiltInTelephotoCamera | 创建比广角相机更长的焦距。只有使用 AVCaptureDeviceDiscoverySession 可以使用 |
AVCaptureDeviceTypeBuiltInDuoCamera | iOS 10.2 被 AVCaptureDeviceTypeBuiltInDualCamera 替换 |
接下来是AVMediaType的枚举说明.未补充完整后续查阅资料来补齐.
AVMediaType | 枚举说明 |
---|---|
AVMediaTypeVideo | 视频 |
AVMediaTypeAudio | 音频 |
AVMediaTypeText | 文本 |
AVMediaTypeClosedCaption | 电影 |
AVMediaTypeSubtitle | AVMediaTypeSubtitle |
AVMediaTypeTimecode | AVMediaTypeTimecode |
AVMediaTypeMetadata | AVMediaTypeMetadata |
AVMediaTypeMuxed | 音频 + 视频 |
还有一个摄像头的枚举部分.
AVCaptureDevicePosition | 枚举说明 |
---|---|
AVCaptureDevicePositionUnspecified | 未指定的设备 |
AVCaptureDevicePositionBack | 后摄像头 手机屏幕背面的摄像头 |
AVCaptureDevicePositionFront | 前摄像头 手机屏幕上边的摄像头 |
摄像头的权限的枚举说明
AVAuthorizationStatus | 摄像头权限说明 |
---|---|
AVAuthorizationStatusNotDetermined | 用户暂时没有授权 |
AVAuthorizationStatusRestricted | 没有改变媒体类型 |
AVAuthorizationStatusDenied | 用户允许 |
AVAuthorizationStatusAuthorized | 用户 拒绝 |
思路 : 拿到摄像头->开启摄像头->捕捉画面->拿到数据 下面开始写代码
//
// XJCaptureSesion.m
// AVFoundation
//
// Created by 张孝江 on 2021/4/1.
//
#import "XJCaptureManager.h"
#import
@interface XJCaptureManager()
/**最主要的一个类**/
@property (nonatomic,strong) AVCaptureSession *session;
@property (nonatomic,strong) dispatch_queue_t sessionQueue; //线程的队列
/*设备的抽象的类 输入*/
@property (nonatomic, strong) AVCaptureDeviceInput *frontCamera;
@property (nonatomic, strong) AVCaptureDeviceInput *backCamera;
@property (nonatomic, weak) AVCaptureDeviceInput *currentCamera;
/**抽象类, 输出**/
@property (nonatomic,strong) AVCaptureVideoDataOutput *videoDataOutput; // 这个是视频的输出的类.
@property (nonatomic, strong) AVCaptureConnection *videoConnection;
/**抽象类预览测**/
@property (nonatomic,strong) AVCaptureVideoPreviewLayer *preLayer; //预览层
@end
@implementation XJCaptureManager
-(instancetype)init{
if (self = [super init]) {
[self initVideo];
}
return self;
}
-(void)startLive{
[self.session startRunning];
}
#pragma mark - 开始捕捉..
-(void)initVideo{
/**第一步获取摄像头**/
AVCaptureDevice * front = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionFront];
self.frontCamera = [AVCaptureDeviceInput deviceInputWithDevice: front error:NULL];
AVCaptureDevice *back = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionBack];
self.backCamera = [AVCaptureDeviceInput deviceInputWithDevice:back error:NULL];
//设置默认选项为前置摄像头
self.currentCamera = self.frontCamera;
//设置视频的输出..
[self.videoDataOutput setSampleBufferDelegate:self queue:self.sessionQueue];
//会丢失处理不过来的帧数据.
[self.videoDataOutput setAlwaysDiscardsLateVideoFrames:YES];
//设置输出格式..
NSDictionary *dictionary = @{
(__bridge NSString *)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
};
//当你不知所错的时候 记得点开看一下
/*
On iOS, the only supported key is kCVPixelBufferPixelFormatTypeKey. Supported pixel formats are kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, kCVPixelFormatType_420YpCbCr8BiPlanarFullRange and kCVPixelFormatType_32BGRA.
*/
[self.videoDataOutput setVideoSettings:dictionary];
//配置输入
[self.session beginConfiguration];
if ([self.session canAddInput:self.currentCamera]) {
[self.session addInput:self.currentCamera];
}
//配置输出
if ([self.session canAddOutput:self.videoDataOutput]) {
[self.session addOutput:self.videoDataOutput];
}
//设置分辨率
if ([self.session canSetSessionPreset:AVCaptureSessionPreset1920x1080]) {
self.session.sessionPreset = AVCaptureSessionPreset1920x1080;
}else if ([self.session canSetSessionPreset:AVCaptureSessionPreset1280x720]){
self.session.sessionPreset = AVCaptureSessionPreset1280x720;
}else if ([self.session canSetSessionPreset:AVCaptureSessionPreset640x480]){
self.session.sessionPreset = AVCaptureSessionPreset640x480;
}else{
self.session.sessionPreset = AVCaptureSessionPreset352x288;
}
//结尾
[self.session commitConfiguration];
self.videoConnection = [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
//设置视频的输出方向
self.videoConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
//设置分辨率..
[self setUpdataFps:30];
//设置预览层...
[self.preview.layer addSublayer:self.preLayer];
}
#pragma mark - 根据代理取获取的文件的地址. 这就是当前获取的样本...
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
NSLog(@"%@",sampleBuffer);
}
#pragma mark - 设置分辨率......
-(void)setUpdataFps:(NSInteger)fps{
AVCaptureDevice *front = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionFront];
AVCaptureDevice *back = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionBack];
NSArray *array = @[front,back];
for (AVCaptureDevice *device in array) {
//获取当前支持的最大的FPS
Float64 a = device.activeFormat.videoSupportedFrameRateRanges.firstObject.maxFrameRate;
if (a >= fps) {
if ([device lockForConfiguration:NULL]) {
device.activeVideoMinFrameDuration = CMTimeMake(10, (int)(fps * 10));
device.activeVideoMaxFrameDuration = device.activeVideoMinFrameDuration;
[device unlockForConfiguration];
}
}
}
}
-(void)dealloc{
NSLog(@"我销毁了");
}
/*简单的初始化*/
-(AVCaptureSession *)session{
if (!_session) {
_session = [[AVCaptureSession alloc]init];
}
return _session;
}
/*初始化输出的类*/
-(AVCaptureVideoDataOutput *)videoDataOutput{
if (!_videoDataOutput) {
_videoDataOutput = [[AVCaptureVideoDataOutput alloc]init];
}
return _videoDataOutput;
}
-(dispatch_queue_t)sessionQueue{
if (!_sessionQueue) {
_sessionQueue = dispatch_queue_create("XJ.SESSION", NULL);
}
return _sessionQueue;
}
-(AVCaptureVideoPreviewLayer *)preLayer{
if (!_preLayer) {
_preLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
_preLayer.frame = CGRectMake(0, 0, [UIScreen mainScreen].bounds.size.width, [UIScreen mainScreen].bounds.size.height);
_preLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
}
return _preLayer;
}
-(UIView *)preview{
if (!_preview) {
_preview = [[UIView alloc] init];
}
return _preview;
}
@end
在代理回调的地方就可以拿到这个视频的数据..