ios GPUImage简单滤镜 -- 录制视频(保存+聚焦)

最近初学ios以及研究GUPImage第三方库,在度娘及google上查了不少的资料后,勉强写了一个小程序,分享给大家。代码中肯定有不足之处(毕竟初学者),望大家指教!

在写代码之前,要先引入GPUImage库。可以用自已动手引入或直接用cocoapods(  度娘及官网有教程,不懂的请自行搜索 - -)。


下面是代码:

VideoCameraView.h

#import 
#import "GPUImage.h"

@interface VideoCameraView : UIView

{
    GPUImageVideoCamera *videoCamera;
    GPUImageOutput *filter;
    GPUImageMovieWriter *movieWriter;
    NSString *pathToMovie;
    GPUImageView *filteredVideoView;
    CALayer *_focusLayer;
    NSTimer *myTimer;
    UILabel *timeLabel;
    NSDate *fromdate;
    CGRect mainScreenFrame;
}

- (instancetype)initWithFrame:(CGRect)frame NS_DESIGNATED_INITIALIZER; 

@end

VideoCameraView.m

#import "VideoCameraView.h"
@interface VideoCameraView ()

@end

@implementation VideoCameraView
- (instancetype) initWithFrame:(CGRect)frame{
    if (!(self = [super initWithFrame:frame]))
    {
        return nil;
    }
    mainScreenFrame = frame;
    videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset1280x720 cameraPosition:AVCaptureDevicePositionBack];
    videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait;
    [videoCamera addAudioInputsAndOutputs];
    filter = [[GPUImageSaturationFilter alloc] init];
    filteredVideoView = [[GPUImageView alloc] initWithFrame:[[UIScreen mainScreen] bounds]];
    [videoCamera addTarget:filter];

    [filter addTarget:filteredVideoView];
    [videoCamera startCameraCapture];
    [self addSomeView];
    UITapGestureRecognizer *singleFingerOne = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(cameraViewTapAction:)];
    singleFingerOne.numberOfTouchesRequired = 1; //手指数
    singleFingerOne.numberOfTapsRequired = 1; //tap次数
    [filteredVideoView addGestureRecognizer:singleFingerOne];
    [self addSubview:filteredVideoView];
    return self;

}

- (void) addSomeView{
    UISlider *filterSettingsSlider = [[UISlider alloc] initWithFrame:CGRectMake(25.0, 30.0, mainScreenFrame.size.width - 50.0, 40.0)];
    [filterSettingsSlider addTarget:self action:@selector(updateSliderValue:) forControlEvents:UIControlEventValueChanged];
    filterSettingsSlider.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleTopMargin;
    filterSettingsSlider.minimumValue = 0.0;
    filterSettingsSlider.maximumValue = 2.0;
    filterSettingsSlider.value = 1.0;
    [filteredVideoView addSubview:filterSettingsSlider];

    timeLabel = [[UILabel alloc] initWithFrame:CGRectMake(20.0, 60.0, 100, 30.0)];
    timeLabel.font = [UIFont systemFontOfSize:15.0f];
    timeLabel.text = @"00:00:00";
    timeLabel.textAlignment = NSTextAlignmentCenter;
    timeLabel.backgroundColor = [UIColor clearColor];
    timeLabel.textColor = [UIColor whiteColor];
    [filteredVideoView addSubview:timeLabel];

    UIButton *photoCaptureButton = [UIButton buttonWithType:UIButtonTypeRoundedRect];
    [photoCaptureButton.layer setCornerRadius:8];
    photoCaptureButton.frame = CGRectMake(50, mainScreenFrame.size.height - 70.0, 50.0, 40.0);
    photoCaptureButton.backgroundColor = [UIColor whiteColor];
    [photoCaptureButton setTitle:@"开始" forState:UIControlStateNormal];
    photoCaptureButton.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleTopMargin;
    [photoCaptureButton addTarget:self action:@selector(startRecording:) forControlEvents:UIControlEventTouchUpInside];
    [photoCaptureButton setTitleColor:[UIColor grayColor] forState:UIControlStateDisabled];   

    [filteredVideoView addSubview:photoCaptureButton];
    UIButton *cameraChangeButton  = [UIButton buttonWithType:UIButtonTypeRoundedRect];
    [cameraChangeButton.layer setCornerRadius:8];
    cameraChangeButton.frame = CGRectMake(mainScreenFrame.size.width - 150, mainScreenFrame.size.height - 70.0, 100.0, 40.0);
    cameraChangeButton.backgroundColor = [UIColor whiteColor];
    [cameraChangeButton setTitle:@"录制结束" forState:UIControlStateNormal];
    cameraChangeButton.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleTopMargin;
    [cameraChangeButton addTarget:self action:@selector(stopRecording:) forControlEvents:UIControlEventTouchUpInside];
    [cameraChangeButton setTitleColor:[UIColor grayColor] forState:UIControlStateDisabled];
    [filteredVideoView addSubview:cameraChangeButton];

}




- (IBAction)updateSliderValue:(id)sender

{
    [(GPUImageSaturationFilter *)filter setSaturation:[(UISlider *)sender value]];
}

- (IBAction)stopRecording:(id)sender {
    //[filter removeTarget:movieWriter];
    videoCamera.audioEncodingTarget = nil;
    NSLog(@"Path %@",pathToMovie);
    UISaveVideoAtPathToSavedPhotosAlbum(pathToMovie, nil, nil, nil);
    [movieWriter finishRecording];
    [filter removeTarget:movieWriter];
    timeLabel.text = @"00:00:00";
    [myTimer invalidate];
    myTimer = nil;
    //[movieWriter cancelRecording];
  
}



- (IBAction)startRecording:(id)sender {
    pathToMovie = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/Movie.m4v"];
    unlink([pathToMovie UTF8String]); // If a file already exists, AVAssetWriter won't let you record new frames, so delete the old movie
    NSURL *movieURL = [NSURL fileURLWithPath:pathToMovie];
    movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(360.0, 640.0)];
    movieWriter.encodingLiveVideo = YES;
    movieWriter.shouldPassthroughAudio = YES;
    [filter addTarget:movieWriter];
    videoCamera.audioEncodingTarget = movieWriter;
    [movieWriter startRecording];
    NSTimeInterval timeInterval =1.0;
    fromdate = [NSDate date];
    myTimer = [NSTimer scheduledTimerWithTimeInterval:timeInterval
                                               target:self
                                             selector:@selector(updateTimer:)
                                             userInfo:nil
                                              repeats:YES];

    

}

- (void)updateTimer:(NSTimer *)sender{
    NSDateFormatter *dateFormator = [[NSDateFormatter alloc] init];
    dateFormator.dateFormat = @"HH:mm:ss";
    NSDate *todate = [NSDate date];
    NSCalendar *calendar = [NSCalendar currentCalendar];
    NSInteger unitFlags = NSYearCalendarUnit | NSMonthCalendarUnit | NSDayCalendarUnit |
    NSHourCalendarUnit | NSMinuteCalendarUnit | NSSecondCalendarUnit;
    NSDateComponents *comps  = [calendar components:unitFlags fromDate:fromdate toDate:todate options:NSCalendarWrapComponents];
    NSCalendar *gregorian = [[NSCalendar alloc] initWithCalendarIdentifier:NSCalendarIdentifierGregorian];
    NSDate *timer = [gregorian dateFromComponents:comps];
    NSString *date = [dateFormator stringFromDate:timer];
    timeLabel.text = date;
}


- (void)setfocusImage{
    UIImage *focusImage = [UIImage imageNamed:@"96"];
    UIImageView *imageView = [[UIImageView alloc] initWithFrame:CGRectMake(0, 0, focusImage.size.width, focusImage.size.height)];
    imageView.image = focusImage;
    CALayer *layer = imageView.layer;
    layer.hidden = YES;
    [filteredVideoView.layer addSublayer:layer];
    _focusLayer = layer;
}


- (void)layerAnimationWithPoint:(CGPoint)point {
    if (_focusLayer) {
        CALayer *focusLayer = _focusLayer;
        focusLayer.hidden = NO;
        [CATransaction begin];
        [CATransaction setDisableActions:YES];
        [focusLayer setPosition:point];
        focusLayer.transform = CATransform3DMakeScale(2.0f,2.0f,1.0f);
        [CATransaction commit];
        CABasicAnimation *animation = [ CABasicAnimation animationWithKeyPath: @"transform" ];
        animation.toValue = [ NSValue valueWithCATransform3D: CATransform3DMakeScale(1.0f,1.0f,1.0f)];
        animation.delegate = self;
        animation.duration = 0.3f;
        animation.repeatCount = 1;
        animation.removedOnCompletion = NO;
        animation.fillMode = kCAFillModeForwards;
        [focusLayer addAnimation: animation forKey:@"animation"];
        // 0.5秒钟延时
        [self performSelector:@selector(focusLayerNormal) withObject:self afterDelay:0.5f];
    }
}

- (void)animationDidStop:(CAAnimation *)anim finished:(BOOL)flag {

}

- (void)focusLayerNormal {

    filteredVideoView.userInteractionEnabled = YES;

    _focusLayer.hidden = YES;

}


-(void)cameraViewTapAction:(UITapGestureRecognizer *)tgr
{
    if (tgr.state == UIGestureRecognizerStateRecognized && (_focusLayer == NO || _focusLayer.hidden)) {
        CGPoint location = [tgr locationInView:filteredVideoView];
        [self setfocusImage];
        [self layerAnimationWithPoint:location];
        AVCaptureDevice *device = videoCamera.inputCamera;
        CGPoint pointOfInterest = CGPointMake(0.5f, 0.5f);
        NSLog(@"taplocation x = %f y = %f", location.x, location.y);
        CGSize frameSize = [filteredVideoView frame].size;
        if ([videoCamera cameraPosition] == AVCaptureDevicePositionFront) {
            location.x = frameSize.width - location.x;

        }
        pointOfInterest = CGPointMake(location.y / frameSize.height, 1.f - (location.x / frameSize.width));
        if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
            NSError *error;

            if ([device lockForConfiguration:&error]) {
                [device setFocusPointOfInterest:pointOfInterest];

                [device setFocusMode:AVCaptureFocusModeAutoFocus];
                if([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure])

                {

                    [device setExposurePointOfInterest:pointOfInterest];

                    [device setExposureMode:AVCaptureExposureModeContinuousAutoExposure];

                }
                [device unlockForConfiguration];
                NSLog(@"FOCUS OK");
            } else {

                NSLog(@"ERROR = %@", error);
            }
        }
    }
}
@end


将以上两个文件放入你的工程后,只需在viewDidLoad函数中加入一下代码,就可以运行了(记得引入头文件~)

CGRect frame = [[UIScreen mainScreen] bounds];
VideoCameraView *view = [[VideoCameraView alloc] initWithFrame:frame];
[self.view addSubview:view];


程序下载地址: http://download.csdn.net/detail/u012965341/9484245

你可能感兴趣的:(ios)