项目地址
相机的实现主要有:1.创建session(捕捉会话)
2.创建device input(捕捉设备输入)
3.预览view
4.创建capture output(捕捉输出)
5.拍照、录视频(元数据转成图片或文件)
下面就是对相机的每一步实现进行详解
AVCaptureSession *captureSession = [[AVCaptureSession alloc]init];
[captureSession setSessionPreset:AVCaptureSessionPresetPhoto];
_captureSession = captureSession;
/**
* session preset在iOS中大概有11个
NSString *const AVCaptureSessionPresetPhoto;
NSString *const AVCaptureSessionPresetHigh;
NSString *const AVCaptureSessionPresetMedium;
NSString *const AVCaptureSessionPresetLow;
NSString *const AVCaptureSessionPreset352x288;
NSString *const AVCaptureSessionPreset640x480;
NSString *const AVCaptureSessionPreset1280x720;
NSString *const AVCaptureSessionPreset1920x1080;
NSString *const AVCaptureSessionPresetiFrame960x540;
NSString *const AVCaptureSessionPresetiFrame1280x720;
NSString *const AVCaptureSessionPresetInputPriority;
第一个代表高像素图片输出;
接下来三种为相对预设(low, medium, high),这些预设的编码配置会因设备不同而不同,如果选择high,那么你选定的相机会提供给你该设备所能支持的最高画质;
再后面就是特定分辨率的预设(352x288 VGA, 1920x1080 VGA, 1280x720 VGA, 640x480 VGA, 960x540 iFrame, 1280x720 iFrame);
最后一个代表 capture session 不去控制音频与视频输出设置,而是通过已连接的捕获设备的 activeFormat 来反过来控制 capture session 的输出质量等级
*/
注意:所有对 capture session 的调用都是阻塞的,因此建议将它们分配到后台串行队列中,不过这里为了简单,不考虑性能,所以省略了dispatch queue
/** 该方法会返回当前能够输入视频的全部设备,包括前后摄像头和外接设备 NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; 该方法会返回当前能够输入音频的全部设备 NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio]; 不过在本例中我们不使用这种方法,我们使用下面的方法获取输入设备 */
// 视频输入
// 获取视频输入设备,该方法默认返回iPhone的后置摄像头
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
// 将捕捉设备加入到捕捉会话中
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:error];
if (videoInput) {
if ([_captureSession canAddInput:videoInput]){
[_captureSession addInput:videoInput];
_deviceInput = videoInput;
}
}
// 音频输入
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *audioIn = [[AVCaptureDeviceInput alloc] initWithDevice:audioDevice error:error];
if ([_captureSession canAddInput:audioIn]){
[_captureSession addInput:audioIn];
}
// 创建一个previewLayer
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc] initpWithFrame:self.view.bounds]
[previewLayer.layer setVideoGravity:AVLayerVideoGravityResizeAspect];
[previewLayer.layer setSession:session];
self.previewView = previewLayer;
// 将屏幕坐标系的点转换为previewLayer坐标系的点
- (CGPoint)captureDevicePointForPoint:(CGPoint)point {
return [self.previewView.layer captureDevicePointOfInterestForPoint:point];
}
注意:1. 它看起来有点像输出,但其实不是,它仅用来预览摄像头捕捉的画面。真正用于输出的是AVCaptureSession(previewLayer拥有session,session拥有outputs); 2. 它的坐标系和屏幕的坐标系不同,如果点击某区域实现对焦时,我们需要将设备的坐标系转换为实时预览图的坐标;3.它的坐标原点永远都在右上角,这和我们手机的坐标系不同,手机坐标系的原点是不变的。因此拍照或录制视频是,要先得到设备方向(关于方向问题,后面会详解),计算输出的旋转角度。
捕捉预览除了用AVCaptureVideoPreviewLayer外,还可以用OpenGL ES绘制,我们可以从输出数据流捕捉单一的图像帧,并使用 OpenGL ES手动地把它们显示在 view 上。如果我们想对预览视图进行操作,如使用滤镜,我们就必须这样做。这里不做深入研究,下面给出一段简单的实现代码:
// 创建glview
EAGLContext *context = [[EAGLContext alloc]initWithAPI:kEAGLRenderingAPIOpenGLES2];
_glview = [[GLKView alloc]initWithFrame:self.view.bounds context:context];
[EAGLContext setCurrentContext:context];
[self.view addSubview:_glview];
_cicontext = [CIContext contextWithEAGLContext:context];
// 在视频输出函数中绘制出来
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
if (_glview.context != [EAGLContext currentContext]) {
[EAGLContext setCurrentContext:_glview.context];
}
CVImageBufferRef imageRef = CMSampleBufferGetImageBuffer(sampleBuffer);
CIImage *image = [CIImage imageWithCVImageBuffer:imageRef];
[_glview bindDrawable];
[_cicontext drawImage:image inRect:image.extent fromRect:image.extent];
[_glview display];
}
// 比如在给会话设置视频输出后,设置捕捉连接
_videoConnection = [videoOutput connectionWithMediaType:AVMediaTypeVideo];
_videoConnection.videoOrientation = self.referenceOrientation;
// 在视频元数据的输出函数中,如果捕捉连接是视频连接,则在写入视频数据
if (connection == _videoConnection)
{
if ([self inputsReadyToRecord]){
[self writeSampleBuffer:sampleBuffer ofType:AVMediaTypeVideo];
}
}
// 创建image output 代码
AVCaptureStillImageOutput *imageOutput = [[AVCaptureStillImageOutput alloc] init];
imageOutput.outputSettings = @{AVVideoCodecKey:AVVideoCodecJPEG};
if ([_captureSession canAddOutput:imageOutput]) {
[_captureSession addOutput:imageOutput];
_imageOutput = imageOutput;
}
// 输出图片
AVCaptureConnection *connection = [_imageOutput connectionWithMediaType:AVMediaTypeVideo];
if (connection.isVideoOrientationSupported) {
connection.videoOrientation = [self currentVideoOrientation];
}
id takePictureSuccess = ^(CMSampleBufferRef sampleBuffer,NSError *error){
if (sampleBuffer == NULL) {
[self showError:error];
return ;
}
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:sampleBuffer];
UIImage *image = [[UIImage alloc]initWithData:imageData];
};
[_imageOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler:takePictureSuccess];
// 音频输出
AVCaptureAudioDataOutput *audioOut = [[AVCaptureAudioDataOutput alloc] init];
[audioOut setSampleBufferDelegate:self queue:captureQueue];
if ([_captureSession canAddOutput:audioOut]){
[_captureSession addOutput:audioOut];
}
// 设置音频捕捉连接
_audioConnection = [audioOut connectionWithMediaType:AVMediaTypeAudio];
// 视频输出
AVCaptureVideoDataOutput *videoOut = [[AVCaptureVideoDataOutput alloc] init];
[videoOut setAlwaysDiscardsLateVideoFrames:YES];
[videoOut setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithInt:kCVPixelFormatType_32BGRA]}];
[videoOut setSampleBufferDelegate:self queue:captureQueue];
if ([_captureSession canAddOutput:videoOut]){
[_captureSession addOutput:videoOut];
_videoOutput = videoOut;
}
// 设置视频捕捉连接
_videoConnection = [videoOut connectionWithMediaType:AVMediaTypeVideo];
// 设置视频输入方向
_videoConnection.videoOrientation = self.referenceOrientation;
// 初始化一个assetWriter
NSError *error;
_assetWriter = [[AVAssetWriter alloc] initWithURL:_movieURL fileType:AVFileTypeQuickTimeMovie error:&error];
if (error){
[self showError:error];
}
// 配置视频输入
- (BOOL)setupAssetWriterVideoInput:(CMFormatDescriptionRef)currentFormatDescription
{
CGFloat bitsPerPixel;
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(currentFormatDescription);
NSUInteger numPixels = dimensions.width * dimensions.height;
NSUInteger bitsPerSecond;
if (numPixels < (640 * 480)){
bitsPerPixel = 4.05;
}
else{
bitsPerPixel = 11.4;
}
bitsPerSecond = numPixels * bitsPerPixel;
NSDictionary *videoCompressionSettings = @{AVVideoCodecKey : AVVideoCodecH264,
AVVideoWidthKey : [NSNumber numberWithInteger:dimensions.width],
AVVideoHeightKey : [NSNumber numberWithInteger:dimensions.height],
AVVideoCompressionPropertiesKey:@{AVVideoAverageBitRateKey:[NSNumber numberWithInteger:bitsPerSecond],
AVVideoMaxKeyFrameIntervalKey:[NSNumber numberWithInteger:30]}
};
if ([_assetWriter canApplyOutputSettings:videoCompressionSettings forMediaType:AVMediaTypeVideo])
{
_assetVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoCompressionSettings];
_assetVideoInput.expectsMediaDataInRealTime = YES;
_assetVideoInput.transform = [self transformFromCurrentVideoOrientationToOrientation:self.referenceOrientation];
if ([_assetWriter canAddInput:_assetVideoInput]){
[_assetWriter addInput:_assetVideoInput];
}
else{
[self showError:_assetWriter.error];
return NO;
}
}
else{
[self showError:_assetWriter.error];
return NO;
}
return YES;
}
// 配置音频输入
- (BOOL)setupAssetWriterAudioInput:(CMFormatDescriptionRef)currentFormatDescription
{
size_t aclSize = 0;
const AudioStreamBasicDescription *currentASBD = CMAudioFormatDescriptionGetStreamBasicDescription(currentFormatDescription);
const AudioChannelLayout *currentChannelLayout = CMAudioFormatDescriptionGetChannelLayout(currentFormatDescription, &aclSize);
NSData *currentChannelLayoutData = nil;
if (currentChannelLayout && aclSize > 0 ){
currentChannelLayoutData = [NSData dataWithBytes:currentChannelLayout length:aclSize];
}
else{
currentChannelLayoutData = [NSData data];
}
NSDictionary *audioCompressionSettings = @{AVFormatIDKey : [NSNumber numberWithInteger:kAudioFormatMPEG4AAC],
AVSampleRateKey : [NSNumber numberWithFloat:currentASBD->mSampleRate],
AVEncoderBitRatePerChannelKey : [NSNumber numberWithInt:64000],
AVNumberOfChannelsKey : [NSNumber numberWithInteger:currentASBD->mChannelsPerFrame],
AVChannelLayoutKey : currentChannelLayoutData};
if ([_assetWriter canApplyOutputSettings:audioCompressionSettings forMediaType:AVMediaTypeAudio])
{
_assetAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioCompressionSettings];
_assetAudioInput.expectsMediaDataInRealTime = YES;
if ([_assetWriter canAddInput:_assetAudioInput]){
[_assetWriter addInput:_assetAudioInput];
}
else{
[self showError:_assetWriter.error];
return NO;
}
}
else{
[self showError:_assetWriter.error];
return NO;
}
return YES;
}
ALAssetsLibrary *lab = [[ALAssetsLibrary alloc]init];
// 保存视频
[lab writeVideoAtPathToSavedPhotosAlbum:_movieURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (error) {
[self showError:error];
}
}];
iOS9.0以后
[PHPhotoLibrary requestAuthorization:^( PHAuthorizationStatus status ) {
if (status == PHAuthorizationStatusAuthorized) {
[[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{ // 保存视频 PHAssetCreationRequest *videoRequest = [PHAssetCreationRequest creationRequestForAsset]; [videoRequest addResourceWithType:PHAssetResourceTypeVideo fileURL:_movieURL options:nil]; } completionHandler:^( BOOL success, NSError * _Nullable error ) {
if (!success) {
[self showError:error];
}
}];
}
}];
- (BOOL)switchCameras{
if (![self canSwitchCameras]) {
return NO;
}
NSError *error;
AVCaptureDevice *videoDevice = [self inactiveCamera];
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if (videoInput) {
[_captureSession beginConfiguration];
[_captureSession removeInput:_deviceInput];
if ([_captureSession canAddInput:videoInput]) {
[_captureSession addInput:videoInput];
_deviceInput = videoInput;
}
else{
[_captureSession addInput:_deviceInput];
}
[_captureSession commitConfiguration];
// 注意:转换摄像头后,一定要重新设置视频输出
[self resetupVideoOutput];
}
else{
[self showError:error];
return NO;
}
return YES;
}
-(void)resetupVideoOutput{
[_captureSession beginConfiguration];
[_captureSession removeOutput:_videoOutput];
AVCaptureVideoDataOutput *videoOut = [[AVCaptureVideoDataOutput alloc] init];
[videoOut setAlwaysDiscardsLateVideoFrames:YES];
[videoOut setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithInt:kCVPixelFormatType_32BGRA]}];
dispatch_queue_t videoCaptureQueue = dispatch_queue_create("Video Capture Queue", DISPATCH_QUEUE_SERIAL);
[videoOut setSampleBufferDelegate:self queue:videoCaptureQueue];
if ([_captureSession canAddOutput:videoOut]) {
[_captureSession addOutput:videoOut];
_videoOutput = videoOut;
}
_videoConnection = [videoOut connectionWithMediaType:AVMediaTypeVideo];
[_captureSession commitConfiguration];
}
注意补光和闪光灯不能同时打开
- (void)setTorchMode:(AVCaptureTorchMode)torchMode{
// 如果闪光灯打开,先关闭闪光灯
if ([self flashMode] == AVCaptureFlashModeOn) {
[self flashClick:_flashBtn];
}
AVCaptureDevice *device = [self activeCamera];
if (device.torchMode != torchMode && [device isTorchModeSupported:torchMode]) {
NSError *error;
if ([device lockForConfiguration:&error]) {
device.torchMode = torchMode;
[device unlockForConfiguration];
}
else{
[self showError:error];
}
}
}
- (void)setFlashMode:(AVCaptureFlashMode)flashMode{
// 如果手电筒打开,先关闭手电筒
if ([self torchMode] == AVCaptureTorchModeOn) {
[self torchClick:_torchBtn];
}
AVCaptureDevice *device = [self activeCamera];
if (device.flashMode != flashMode && [device isFlashModeSupported:flashMode]) {
NSError *error;
if ([device lockForConfiguration:&error]) {
device.flashMode = flashMode;
[device unlockForConfiguration];
}
else{
[self showError:error];
}
}
}
- (void)focusAtPoint:(CGPoint)point {
AVCaptureDevice *device = [self activeCamera];
if ([self cameraSupportsTapToFocus] && [device isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
NSError *error;
if ([device lockForConfiguration:&error]) {
device.focusPointOfInterest = point;
device.focusMode = AVCaptureFocusModeAutoFocus;
[device unlockForConfiguration];
}
else{
[self showError:error];
}
}
}
static const NSString *CameraAdjustingExposureContext;
- (void)exposeAtPoint:(CGPoint)point{
AVCaptureDevice *device = [self activeCamera];
if ([self cameraSupportsTapToExpose] && [device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
NSError *error;
if ([device lockForConfiguration:&error]) {
device.exposurePointOfInterest = point;
device.exposureMode = AVCaptureExposureModeContinuousAutoExposure;
if ([device isExposureModeSupported:AVCaptureExposureModeLocked]) {
[device addObserver:self
forKeyPath:@"adjustingExposure"
options:NSKeyValueObservingOptionNew
context:&CameraAdjustingExposureContext];
}
[device unlockForConfiguration];
}
else{
[self showError:error];
}
}
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if (context == &CameraAdjustingExposureContext) {
AVCaptureDevice *device = (AVCaptureDevice *)object;
if (!device.isAdjustingExposure && [device isExposureModeSupported:AVCaptureExposureModeLocked]) {
[object removeObserver:self
forKeyPath:@"adjustingExposure"
context:&CameraAdjustingExposureContext];
dispatch_async(dispatch_get_main_queue(), ^{
NSError *error;
if ([device lockForConfiguration:&error]) {
device.exposureMode = AVCaptureExposureModeLocked;
[device unlockForConfiguration];
}
else{
[self showError:error];
}
});
}
}
else{
[super observeValueForKeyPath:keyPath
ofObject:object
change:change
context:context];
}
}
- (BOOL)resetFocusAndExposureModes{
AVCaptureDevice *device = [self activeCamera];
AVCaptureExposureMode exposureMode = AVCaptureExposureModeContinuousAutoExposure;
AVCaptureFocusMode focusMode = AVCaptureFocusModeContinuousAutoFocus;
BOOL canResetFocus = [device isFocusPointOfInterestSupported] && [device isFocusModeSupported:focusMode];
BOOL canResetExposure = [device isExposurePointOfInterestSupported] && [device isExposureModeSupported:exposureMode];
CGPoint centerPoint = CGPointMake(0.5f, 0.5f);
NSError *error;
if ([device lockForConfiguration:&error]) {
if (canResetFocus) {
device.focusMode = focusMode;
device.focusPointOfInterest = centerPoint;
}
if (canResetExposure) {
device.exposureMode = exposureMode;
device.exposurePointOfInterest = centerPoint;
}
[device unlockForConfiguration];
return YES;
}
else{
[self showError:error];
return NO;
}
}
Printing description of image:
<CIImage: 0x12c7bdad0 extent [0 0 750 1000]>
affine [1 0 0 -1 0 1000] extent=[0 0 750 1000]
colormatch "QuickTime 'nclc' Video (1,1,6)"-to-workingspace extent=[0 0 750 1000]
IOSurface 0x12da00008 BGRA8 extent=[0 0 750 1000]
当我选择参数AVCaptureSessionPresetHigh时,输出图片大小如下:
Printing description of image:
<CIImage: 0x15f851680 extent [0 0 1080 1920]>
affine [1 0 0 -1 0 1920] extent=[0 0 1080 1920]
colormatch "QuickTime 'nclc' Video (1,1,6)"-to-workingspace extent=[0 0 1080 1920]
IOSurface 0x15f900008 BGRA8 extent=[0 0 1080 1920]
可以看出选择不同的session preset,会输出不同大小的图片,但是这些图片都是很大的,这么大的图片要显示在手机预览层,必须要缩放,而视频重力其实就是缩放参数。
AVLayerVideoGravityResizeAspect:在预览层区域内缩放视频,保持视频原始宽高比。这是默认值,同时适用大多数情况。使用该参数预览时,有可能不能铺满整个预览视图
AVLayerVideoGravityResizeAspectFill:按照视频的宽高比将视频拉伸填满整个图层。使用该参数时,很可能造成视频预览图片被裁剪,而拍摄输出没有被裁剪,这样就会使预览图和最终拍摄的图不一致。
AVLayerVideoGravityResize:拉伸视频内容以匹配预览层大小,这个是最不常用的,可能造成视频扭曲。
// 设备方向
UIDevice *device = [UIDevice currentDevice] ;
switch (device.orientation) {
case UIDeviceOrientationFaceUp:
NSLog(@"屏幕朝上平躺");
break;
case UIDeviceOrientationFaceDown:
NSLog(@"屏幕朝下平躺");
break;
case UIDeviceOrientationUnknown:
NSLog(@"未知方向");
break;
case UIDeviceOrientationLandscapeLeft:
NSLog(@"屏幕向左橫置");
break;
case UIDeviceOrientationLandscapeRight:
NSLog(@"屏幕向右橫置");
break;
case UIDeviceOrientationPortrait:
NSLog(@"屏幕直立");
break;
case UIDeviceOrientationPortraitUpsideDown:
NSLog(@"屏幕直立,上下顛倒");
break;
从上面可以看到所有的设备方向,而视频方向videoOrientation没有那么多分类,它分为:
AVCaptureVideoOrientationPortrait home健在下
AVCaptureVideoOrientationPortraitUpsideDown home健在上
AVCaptureVideoOrientationLandscapeRight home健在右
AVCaptureVideoOrientationLandscapeLeft home健在左
这些视频方向,是视频或拍照时的输入方向,而我们的数据输出时会跟具这些输入方向自动对图片或视频进行矩阵变换,以达到最佳的用户体验。
这里以拍照举个例子(视频同理):
假如你横着手机拍了一张照片,第一次你在拍照前不传入视频方向,它默认为AVCaptureVideoOrientationPortrait,这是正常手机拿着的姿势,所以到输出时不会对图片进行矩阵变换,当你把图片存入相册时,你会发现,你要正确查看这张图,你也需要横着手机看。如果你是倒着手机拍的,就需要倒着手机看。但是如果你在拍照前传入视频方向,比如你横着手机拍,并且home健在右,就传入参数AVCaptureVideoOrientationLandscapeRight,这时你存入相册的照片就可以以正常拿手机的姿势查看它了。
// 在拍照前通过会话连接,传入当前输入视频方向(视频同理也可以这样做)
AVCaptureConnection *connection = [_imageOutput connectionWithMediaType:AVMediaTypeVideo];
if (connection.isVideoOrientationSupported) {
connection.videoOrientation = [self currentVideoOrientation];
}
苹果给出的类处理后都是默认正常拿手机的姿势观看,不管是图片还是视频,如果我们想拍出的所有图片或视频都需要横着手机看,我们这时可以不传入视频方向,这样视频到输出时就不会被变换,我们在视频输入类中,手动对视频进行transform变换,这样就可以实现我们想要的查看方式,在本例中,视频就是用的这种处理方式。
// 视频的播放方向,后面计算视频旋转角度使用
_referenceOrientation = AVCaptureVideoOrientationPortrait;
// 这行代码在设置视频输入方向为默认输入方向
_videoConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
// 视频输入类中手动旋转视频方向
_assetVideoInput.transform = [self transformFromCurrentVideoOrientationToOrientation:self.referenceOrientation];
// 旋转视频方向函数实现
- (CGAffineTransform)transformFromCurrentVideoOrientationToOrientation:(AVCaptureVideoOrientation)orientation
{
CGFloat orientationAngleOffset = [self angleOffsetFromPortraitOrientationToOrientation:orientation];
CGFloat videoOrientationAngleOffset = [self angleOffsetFromPortraitOrientationToOrientation:self.motionManager.videoOrientation];
CGFloat angleOffset;
if ([self activeCamera].position == AVCaptureDevicePositionBack) {
angleOffset = orientationAngleOffset - videoOrientationAngleOffset;
}
else{
angleOffset = videoOrientationAngleOffset - orientationAngleOffset + M_PI_2;
}
CGAffineTransform transform = CGAffineTransformMakeRotation(angleOffset);
return transform;
}
- (CGFloat)angleOffsetFromPortraitOrientationToOrientation:(AVCaptureVideoOrientation)orientation
{
CGFloat angle = 0.0;
switch (orientation)
{
case AVCaptureVideoOrientationPortrait:
angle = 0.0;
break;
case AVCaptureVideoOrientationPortraitUpsideDown:
angle = M_PI;
break;
case AVCaptureVideoOrientationLandscapeRight:
angle = -M_PI_2;
break;
case AVCaptureVideoOrientationLandscapeLeft:
angle = M_PI_2;
break;
default:
break;
}
return angle;
}
// 保存GIF
NSData *data = [[NSData alloc]initWithContentsOfURL:GifURL];
[lab writeImageDataToSavedPhotosAlbum:data metadata:nil completionBlock:^(NSURL *assetURL, NSError *error) { if (error) { [self showError:error]; }
}];
iOS9.o以后
[PHPhotoLibrary requestAuthorization:^( PHAuthorizationStatus status ) {
if (status == PHAuthorizationStatusAuthorized) {
[[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{ // 保存GIF NSData *data = [[NSData alloc]initWithContentsOfURL:GifURL]; PHAssetCreationRequest *gifRequest = [PHAssetCreationRequest creationRequestForAsset]; [gifRequest addResourceWithType:PHAssetResourceTypePhoto data:data options:nil]; } completionHandler:^( BOOL success, NSError * _Nullable error ) {
if (!success) {
[self showError:error];
}
}];
}
}];
用该方法保存到相册后,由于苹果相册不支持GIF图,所以我们看到的是它的第一贞图片,不过我们如果用QQ发出去就可以查看,目前微信也不能查看,还不知道原因。
// 加载框
-(void)showHUD:(UIViewController *)vc message:(NSString *)message isLoad:(BOOL)isLoad{
if (!self.ccAlertController) {
self.ccAlertController = [UIAlertController alertControllerWithTitle:nil
message:[NSString stringWithFormat:@"\n\n\n%@",message] //空3行来加载菊花图
preferredStyle:UIAlertControllerStyleAlert];
if (isLoad) {
// 遍历取到UIAlertController的Label
[self findLabel:self.ccAlertController.view succ:^(UIView *label) {
dispatch_async(dispatch_get_main_queue(), ^{
UIActivityIndicatorView *activityView = [[UIActivityIndicatorView alloc]initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleWhiteLarge];
activityView.color = [UIColor lightGrayColor];
activityView.center = CGPointMake(label.width/2, 25);
[label addSubview:activityView];
[activityView startAnimating];
});
}];
}
}
[vc presentViewController:self.ccAlertController animated:YES completion:nil];
}
// 取Label的实现
-(void)findLabel:(UIView*)view succ:(void(^)(UIView *label))succ
{
for (UIView* subView in view.subviews)
{
if ([subView isKindOfClass:[UILabel class]]) {
if (succ) {
succ(subView);
}
}
[self findLabel:subView succ:succ];
}
}
// 自动消失的提示框
-(void)showAutoDismissHUD:(UIViewController *)vc message:(NSString *)message delay:(NSTimeInterval)delay{
if (!self.ccAlertController) {
self.ccAlertController = [UIAlertController alertControllerWithTitle:nil
message:message
preferredStyle:UIAlertControllerStyleAlert];
UIView *view = [[UIView alloc]initWithFrame:self.ccAlertController.view.bounds];
UIActivityIndicatorView *activityView = [[UIActivityIndicatorView alloc]initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleGray];
activityView.center = CGPointMake(view.width/2, view.height/2);
[view addSubview:activityView];
}
[vc presentViewController:self.ccAlertController animated:YES completion:nil];
[NSTimer scheduledTimerWithTimeInterval:delay
target:self
selector:@selector(hideHUD)
userInfo:self.ccAlertController
repeats:NO];
}
-(void)hideHUD{
if (self.ccAlertController) {
[self.ccAlertController dismissViewControllerAnimated:YES completion:^{
}];
}
}
其实这个写到ViewController的分类中,更好!