//
// LittleVideoViewController.h
// uploadVideoDemo
//
// Created by 欧阳荣 on 16/9/5.
// Copyright © 2016年 HengTaiXin. All rights reserved.
//
#import
@protocol LittleVideoDelegate <NSObject>
- (void)finishLittleVideoViewControllerCapture:(NSURL *)filePath;
@end
@interface LittleVideoViewController : UIViewController
@property (nonatomic,weak) id<LittleVideoDelegate> delegate;
@end
//
// LittleVideoViewController.m
// uploadVideoDemo
//
// Created by 欧阳荣 on 16/9/5.
// Copyright © 2016年 HengTaiXin. All rights reserved.
//
#import "LittleVideoViewController.h"
#import
#import "UIView+RMAdditions.h"
#define BLUECOLOR [UIColor colorWithRed:0/255.0 green:155/255.0 blue:225/255.0 alpha:1]
#define REDCOLOR [UIColor colorWithRed:255/255.0 green:27/255.0 blue:86/255.0 alpha:1]
#define kDuration 8.0
#define kTrans SCREEN_WIDTH/kDuration/60.0
typedef NS_ENUM(NSInteger,VideoStatus){
VideoStatusEnded = 0,
VideoStatusStarted
};
@interface LittleVideoViewController ()
{
AVCaptureSession * _captureSession;
AVCaptureDevice *_videoDevice;
AVCaptureDevice *_audioDevice;
AVCaptureDeviceInput *_videoInput;
AVCaptureDeviceInput *_audioInput;
AVCaptureMovieFileOutput *_movieOutput;
AVCaptureVideoPreviewLayer *_captureVideoPreviewLayer;
}
@property (nonatomic,strong) UIView * navView;
@property (nonatomic,strong) UIButton * backBtn;
@property (nonatomic,strong) UIView * videoView;
@property (nonatomic,strong) UIView * bottomView;
@property (nonatomic,strong) UILabel * tapBtn;
@property (nonatomic,assign) VideoStatus status;
@property (nonatomic,strong) NSLayoutConstraint * progressWidth;
@property (nonatomic,strong) UIView *progressView;
@property (nonatomic,strong) CADisplayLink *link;
@property (nonatomic,assign) BOOL canSave;
@property (nonatomic,strong) UILabel * cancelTip;
@property (nonatomic,strong) UIView * focusCircle;
@property (nonatomic,strong) UIButton *changeBtn;
@property (nonatomic,strong) UIButton *flashModelBtn;
@end
@implementation LittleVideoViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.
[self creatNavView];
}
#pragma mark - CreatUI
-(void)creatNavView{
self.videoView = [[UIView alloc]initWithFrame:CGRectMake(0, 0, SCREEN_WIDTH, SCREEN_HEIGHT)];
[self.view addSubview:self.videoView];
self.videoView.layer.masksToBounds = YES;
self.navView = [[UIView alloc]initWithFrame:CGRectMake(0, 0,SCREEN_WIDTH, 64)];
self.navView.backgroundColor = [UIColor colorWithRed:0/255.0 green:0/255.0 blue:0/255.0 alpha:0.6];
[self.view addSubview:self.navView];
[self.navView addSubview:self.flashModelBtn];
[self.navView addSubview:self.changeBtn];
self.backBtn = [UIButton buttonWithType:UIButtonTypeCustom];
// [self.backBtn setTitle:@"取消" forState:UIControlStateNormal];
[_backBtn setImage:[UIImage imageNamed:@"WechatShortVideo_close"] forState:UIControlStateNormal];
self.backBtn.frame = CGRectMake(15,25, 25, 25);
[self.backBtn addTarget:self action:@selector(backBtnClick) forControlEvents:UIControlEventTouchUpInside];
[self.navView addSubview:self.backBtn];
self.bottomView = [[UIView alloc]initWithFrame:CGRectMake(0, SCREEN_HEIGHT - 170/2 - 4, SCREEN_WIDTH, 170/2 + 4)];
self.bottomView.backgroundColor = [UIColor colorWithRed:0/255.0 green:0/255.0 blue:0/255.0 alpha:0.6];
[self.view addSubview:self.bottomView];
self.tapBtn = [[UILabel alloc]initWithFrame:CGRectMake(SCREEN_WIDTH/2 - 60/2, 85/2 - 60/2, 60, 60)];
self.tapBtn.text = @"按住拍";
self.tapBtn.textColor = [UIColor whiteColor];
[self.bottomView addSubview:_tapBtn];
self.tapBtn.font = [UIFont systemFontOfSize:15];
self.tapBtn.textAlignment = NSTextAlignmentCenter;
_tapBtn.layer.borderWidth = 4;
_tapBtn.layer.cornerRadius = 60/2;
_tapBtn.layer.masksToBounds = YES;
_tapBtn.layer.borderColor = BLUECOLOR.CGColor;
//进度条
self.progressView = [[UIView alloc]init];
_progressView.translatesAutoresizingMaskIntoConstraints = NO;
_progressView.backgroundColor = BLUECOLOR;
self.progressView.alpha = 0;
[self.view addSubview:_progressView];
//宽度先设置为
//子view的中心横坐标等于父view的中心横坐标
NSLayoutConstraint *constrant1 = [NSLayoutConstraint constraintWithItem:_progressView attribute:NSLayoutAttributeCenterX relatedBy:NSLayoutRelationEqual toItem:self.view attribute:NSLayoutAttributeCenterX multiplier:1.0 constant:0.0];
//子view的中心纵坐标等于父view的中心纵坐标
NSLayoutConstraint *constrant2 = [NSLayoutConstraint constraintWithItem:_progressView attribute:NSLayoutAttributeCenterY relatedBy:NSLayoutRelationEqual toItem:self.view attribute:NSLayoutAttributeCenterY multiplier:1.0 constant:SCREEN_HEIGHT - 170/2 -2 - SCREEN_HEIGHT/2];
self.progressWidth = [NSLayoutConstraint constraintWithItem:_progressView attribute:NSLayoutAttributeWidth relatedBy:NSLayoutRelationGreaterThanOrEqual toItem:nil attribute:NSLayoutAttributeNotAnAttribute multiplier:1.0 constant:SCREEN_WIDTH];
//子view的高度为4
NSLayoutConstraint *constrant4 = [NSLayoutConstraint constraintWithItem:_progressView attribute:NSLayoutAttributeHeight relatedBy:NSLayoutRelationGreaterThanOrEqual toItem:nil attribute:NSLayoutAttributeNotAnAttribute multiplier:1.0 constant:4];
NSArray *array = [NSArray arrayWithObjects:constrant1, constrant2, self.progressWidth, constrant4,nil];
[self.view addConstraints:array];
[self getAuthorization];
[self addGenstureRecognizer];
}
#pragma mark touchs
-(void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event
{
NSLog(@"touch");
UITouch *touch = [touches anyObject];
CGPoint point = [touch locationInView:self.view];
BOOL condition = [self isInBtnRect:point];
if (condition) {
[self isFitCondition:condition];
[self startAnimation];
self.changeBtn.hidden= self.flashModelBtn.hidden = YES;
}
}
- (void)touchesMoved:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event
{
[super touchesMoved:touches withEvent:event];
NSLog(@"touchesMoved");
UITouch *touch = [touches anyObject];
CGPoint point = [touch locationInView:self.view];
BOOL condition = [self isInBtnRect:point];
[self isFitCondition:condition];
}
- (void)touchesEnded:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event
{
NSLog(@"touchesEnded");
UITouch *touch = [touches anyObject];
CGPoint point = [touch locationInView:self.view];
BOOL condition = [self isInBtnRect:point];
/*
结束时候咱们设定有两种情况依然算录制成功
1.抬手时,录制时长 > 1/3总时长
2.录制进度条完成时,就算手指超出按钮范围也算录制成功 -- 此时 end 方法不会调用,因为用户手指还在屏幕上,所以直接代码调用录制成功的方法,将控制器切换
*/
if (condition) {
NSLog(@"手指还在按钮范围之内");
if (self.progressWidth.constant < SCREEN_WIDTH * 0.67) {
//录制完成
[self recordComplete];
}
}
[self stopAnimation];
self.changeBtn.hidden = self.flashModelBtn.hidden = NO;
}
- (BOOL)isInBtnRect:(CGPoint)point
{
CGFloat x = point.x;
CGFloat y = point.y;
return (x>self.tapBtn.left && x<=self.tapBtn.right) && (y > (self.tapBtn.top + self.bottomView.y) && y <= (self.tapBtn.bottom + self.bottomView.y));
}
//po self.tapBtn.left 130 self.tapBtn.right 190 x CGFloat 146 y CGFloat 523
//self.tapBtn.top 12 self.bottomView.bottom 568
- (void)isFitCondition:(BOOL)condition
{
if (condition) {
self.cancelTip.text = @"↑上滑取消";
self.cancelTip.backgroundColor = [UIColor clearColor];
self.cancelTip.textColor = BLUECOLOR;
self.progressView.backgroundColor = BLUECOLOR;
}else{
self.progressView.backgroundColor = REDCOLOR;
self.cancelTip.text = @"松手取消";
self.cancelTip.backgroundColor = REDCOLOR;
self.cancelTip.textColor = [UIColor whiteColor];
}
}
- (void)startAnimation
{
NSLog(@"startAnimation");
if (self.status == VideoStatusEnded) {
self.status = VideoStatusStarted;
[UIView animateWithDuration:0.5 animations:^{
self.cancelTip.alpha = self.progressView.alpha = 1.0;
self.tapBtn.alpha = 0.0;
self.tapBtn.transform = CGAffineTransformMakeScale(2.0, 2.0);
} completion:^(BOOL finished) {
[self stopLink];
[self.link addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
}];
}
}
- (void)stopAnimation{
NSLog(@"stopAnimation");
if (self.status == VideoStatusStarted) {
self.status = VideoStatusEnded;
[self stopLink];
[self stopRecord];
[UIView animateWithDuration:0.5 animations:^{
self.cancelTip.alpha = self.progressView.alpha = 0.0;
self.tapBtn.alpha = 1.0;
self.tapBtn.transform = CGAffineTransformMakeScale(1.0, 1.0);
} completion:^(BOOL finished) {
self.progressWidth.constant = SCREEN_WIDTH;
}];
}
}
- (CADisplayLink *)link{
if (! _link) {
_link = [CADisplayLink displayLinkWithTarget:self selector:@selector(refresh:)];
self.progressWidth.constant = SCREEN_WIDTH;
[self startRecord];
}
return _link;
}
- (void)stopLink
{
_link.paused = YES;
[_link invalidate];
_link = nil;
}
- (void)refresh:(CADisplayLink *)link
{
if (self.progressWidth.constant <= 0) {
self.progressWidth.constant = 0;
[self recordComplete];
[self stopAnimation];
return;
}
self.progressWidth.constant -= kTrans;
// NSLog(@" self.progressView.frame %@",NSStringFromCGRect(self.progressView.frame));
// self.progressWidth.constant -=kTrans;
}
#pragma mark 录制相关
- (NSURL *)outPutFileURL
{
return [NSURL fileURLWithPath:[NSString stringWithFormat:@"%@%@", NSTemporaryDirectory(), @"outPut.mov"]];
}
- (void)startRecord
{
[_movieOutput startRecordingToOutputFileURL:[self outPutFileURL] recordingDelegate:self];
}
- (void)stopRecord
{
// 取消视频拍摄
[_movieOutput stopRecording];
}
- (void)recordComplete
{
NSLog(@"录制完成");
self.canSave = YES;
}
//这个在完全退出小视频时调用
- (void)quit
{
[_captureSession stopRunning];
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections
{
NSLog(@"---- 开始录制 ----");
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
NSLog(@"---- 录制结束 outputFileURL---%@- captureOutput.outputFileURL - %@ ",outputFileURL,captureOutput.outputFileURL);
if (outputFileURL.absoluteString.length == 0 && captureOutput.outputFileURL.absoluteString.length == 0 ) {
[self showMsgWithTitle:@"出错了" andContent:@"录制视频保存地址出错"];
return;
}
if (self.canSave) {
[self pushToPlay:outputFileURL];
self.canSave = NO;
}
}
- (void)pushToPlay:(NSURL *)url
{
if ([_delegate respondsToSelector:@selector(finishLittleVideoViewControllerCapture:)]) {
[_delegate finishLittleVideoViewControllerCapture:url];
}
[self dismissViewControllerAnimated:YES completion:nil];
}
- (void)getAuthorization
{
/*
AVAuthorizationStatusNotDetermined = 0,// 未进行授权选择
AVAuthorizationStatusRestricted, // 未授权,且用户无法更新,如家长控制情况下
AVAuthorizationStatusDenied, // 用户拒绝App使用
AVAuthorizationStatusAuthorized, // 已授权,可使用
*/
switch ([AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo])
{
case AVAuthorizationStatusAuthorized: //已授权,可使用 The client is authorized to access the hardware supporting a media type.
{
NSLog(@"授权摄像头使用成功");
[self setupAVCaptureInfo];
break;
}
case AVAuthorizationStatusNotDetermined: //未进行授权选择 Indicates that the user has not yet made a choice regarding whether the client can access the hardware.
{
//则再次请求授权
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
if(granted){ //用户授权成功
[self setupAVCaptureInfo];
return;
} else { //用户拒绝授权
[self backBtnClick];
[self showMsgWithTitle:@"出错了" andContent:@"用户拒绝授权摄像头的使用权,返回上一页.请打开\n设置-->隐私/通用等权限设置"];
return;
}
}];
break;
}
default: //用户拒绝授权/未授权
{
[self backBtnClick];
[self showMsgWithTitle:@"出错了" andContent:@"拒绝授权,返回上一页.请检查下\n设置-->隐私/通用等权限设置"];
break;
}
}
}
- (void)setupAVCaptureInfo
{
[self addSession];
[_captureSession beginConfiguration];
[self addVideo];
[self addAudio];
[self addPreviewLayer];
[_captureSession commitConfiguration];
//开启会话-->注意,不等于开始录制
[_captureSession startRunning];
}
- (void)addSession
{
_captureSession = [[AVCaptureSession alloc] init];
//设置视频分辨率
/* 通常支持如下格式
(
AVAssetExportPresetLowQuality,
AVAssetExportPreset960x540,
AVAssetExportPreset640x480,
AVAssetExportPresetMediumQuality,
AVAssetExportPreset1920x1080,
AVAssetExportPreset1280x720,
AVAssetExportPresetHighestQuality,
AVAssetExportPresetAppleM4A
)
*/
//注意,这个地方设置的模式/分辨率大小将影响你后面拍摄照片/视频的大小,
if ([_captureSession canSetSessionPreset:AVAssetExportPresetHighestQuality]) {
[_captureSession setSessionPreset:AVAssetExportPresetHighestQuality];
}
}
- (void)addVideo
{
// 获取摄像头输入设备, 创建 AVCaptureDeviceInput 对象
/* MediaType
AVF_EXPORT NSString *const AVMediaTypeVideo NS_AVAILABLE(10_7, 4_0); //视频
AVF_EXPORT NSString *const AVMediaTypeAudio NS_AVAILABLE(10_7, 4_0); //音频
AVF_EXPORT NSString *const AVMediaTypeText NS_AVAILABLE(10_7, 4_0);
AVF_EXPORT NSString *const AVMediaTypeClosedCaption NS_AVAILABLE(10_7, 4_0);
AVF_EXPORT NSString *const AVMediaTypeSubtitle NS_AVAILABLE(10_7, 4_0);
AVF_EXPORT NSString *const AVMediaTypeTimecode NS_AVAILABLE(10_7, 4_0);
AVF_EXPORT NSString *const AVMediaTypeMetadata NS_AVAILABLE(10_8, 6_0);
AVF_EXPORT NSString *const AVMediaTypeMuxed NS_AVAILABLE(10_7, 4_0);
*/
/* AVCaptureDevicePosition
typedef NS_ENUM(NSInteger, AVCaptureDevicePosition) {
AVCaptureDevicePositionUnspecified = 0,
AVCaptureDevicePositionBack = 1, //后置摄像头
AVCaptureDevicePositionFront = 2 //前置摄像头
} NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
*/
_videoDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionBack];
[self addVideoInput];
[self addMovieOutput];
}
- (void)addVideoInput
{
NSError *videoError;
// 视频输入对象
// 根据输入设备初始化输入对象,用户获取输入数据
_videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:_videoDevice error:&videoError];
if (videoError) {
NSLog(@"---- 取得摄像头设备时出错 ------ %@",videoError);
return;
}
// 将视频输入对象添加到会话 (AVCaptureSession) 中
if ([_captureSession canAddInput:_videoInput]) {
[_captureSession addInput:_videoInput];
}
}
- (void)addMovieOutput
{
// 拍摄视频输出对象
// 初始化输出设备对象,用户获取输出数据
_movieOutput = [[AVCaptureMovieFileOutput alloc] init];
if ([_captureSession canAddOutput:_movieOutput]) {
[_captureSession addOutput:_movieOutput];
AVCaptureConnection *captureConnection = [_movieOutput connectionWithMediaType:AVMediaTypeVideo];
//设置视频旋转方向
/*
typedef NS_ENUM(NSInteger, AVCaptureVideoOrientation) {
AVCaptureVideoOrientationPortrait = 1,
AVCaptureVideoOrientationPortraitUpsideDown = 2,
AVCaptureVideoOrientationLandscapeRight = 3,
AVCaptureVideoOrientationLandscapeLeft = 4,
} NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
*/
// if ([captureConnection isVideoOrientationSupported]) {
// [captureConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
// }
// 视频稳定设置
if ([captureConnection isVideoStabilizationSupported]) {
captureConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
}
captureConnection.videoScaleAndCropFactor = captureConnection.videoMaxScaleAndCropFactor;
}
}
- (void)addAudio
{
NSError *audioError;
// 添加一个音频输入设备
_audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
// 音频输入对象
_audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:_audioDevice error:&audioError];
if (audioError) {
NSLog(@"取得录音设备时出错 ------ %@",audioError);
return;
}
// 将音频输入对象添加到会话 (AVCaptureSession) 中
if ([_captureSession canAddInput:_audioInput]) {
[_captureSession addInput:_audioInput];
}
}
- (void)addPreviewLayer
{
[self.view layoutIfNeeded];
// 通过会话 (AVCaptureSession) 创建预览层
_captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession];
_captureVideoPreviewLayer.frame = self.view.layer.bounds;
/* 填充模式
Options are AVLayerVideoGravityResize, AVLayerVideoGravityResizeAspect and AVLayerVideoGravityResizeAspectFill. AVLayerVideoGravityResizeAspect is default.
*/
//有时候需要拍摄完整屏幕大小的时候可以修改这个
_captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
// 如果预览图层和视频方向不一致,可以修改这个
_captureVideoPreviewLayer.connection.videoOrientation = [_movieOutput connectionWithMediaType:AVMediaTypeVideo].videoOrientation;
_captureVideoPreviewLayer.position = CGPointMake(self.view.width*0.5,self.videoView.height*0.5);
// 显示在视图表面的图层
CALayer *layer = self.videoView.layer;
layer.masksToBounds = true;
[self.view layoutIfNeeded];
[layer addSublayer:_captureVideoPreviewLayer];
}
- (void)showMsgWithTitle:(NSString *)title andContent:(NSString *)content
{
[[[UIAlertView alloc] initWithTitle:title message:content delegate:nil cancelButtonTitle:@"确定" otherButtonTitles:nil] show];
}
#pragma mark 获取摄像头-->前/后
- (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
AVCaptureDevice *captureDevice = devices.firstObject;
for ( AVCaptureDevice *device in devices ) {
if ( device.position == position ) {
captureDevice = device;
break;
}
}
return captureDevice;
}
#pragma mark 交互
//切换闪光灯 闪光模式开启后,并无明显感觉,所以还需要开启手电筒
- (void)changeFlashlight:(UIButton *)sender {
BOOL con1 = [_videoDevice hasTorch]; //支持手电筒模式
BOOL con2 = [_videoDevice hasFlash]; //支持闪光模式
if (con1 && con2)
{
[self changeDevicePropertySafety:^(AVCaptureDevice *captureDevice) {
if (_videoDevice.flashMode == AVCaptureFlashModeOn) //闪光灯开
{
[_videoDevice setFlashMode:AVCaptureFlashModeOff];
[_videoDevice setTorchMode:AVCaptureTorchModeOff];
}else if (_videoDevice.flashMode == AVCaptureFlashModeOff) //闪光灯关
{
[_videoDevice setFlashMode:AVCaptureFlashModeOn];
[_videoDevice setTorchMode:AVCaptureTorchModeOn];
}
// else{ //闪光灯自动
// [_videoDevice setFlashMode:AVCaptureFlashModeAuto];
// [_videoDevice setTorchMode:AVCaptureTorchModeAuto];
// }
NSLog(@"现在的闪光模式是AVCaptureFlashModeOn么?是你就扣1, %zd",_videoDevice.flashMode == AVCaptureFlashModeOn);
}];
sender.selected=!sender.isSelected;
}else{
NSLog(@"不能切换闪光模式");
}
}
//切换前后镜头
- (void)changeCamera{
switch (_videoDevice.position) {
case AVCaptureDevicePositionBack:
_videoDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionFront];
break;
case AVCaptureDevicePositionFront:
_videoDevice = [self deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionBack];
break;
default:
return;
break;
}
[self changeDevicePropertySafety:^(AVCaptureDevice *captureDevice) {
NSError *error;
AVCaptureDeviceInput *newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:_videoDevice error:&error];
if (newVideoInput != nil) {
//必选先 remove 才能询问 canAdd
[_captureSession removeInput:_videoInput];
if ([_captureSession canAddInput:newVideoInput]) {
[_captureSession addInput:newVideoInput];
_videoInput = newVideoInput;
}else{
[_captureSession addInput:_videoInput];
}
} else if (error) {
NSLog(@"切换前/后摄像头失败, error = %@", error);
}
}];
}
/**
* 添加点按手势,点按时聚焦
*/
-(void)addGenstureRecognizer{
UITapGestureRecognizer *singleTapGesture=[[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(singleTap:)];
singleTapGesture.numberOfTapsRequired = 1;
singleTapGesture.delaysTouchesBegan = YES;
UITapGestureRecognizer *doubleTapGesture=[[UITapGestureRecognizer alloc]initWithTarget:self action:@selector(doubleTap:)];
doubleTapGesture.numberOfTapsRequired = 2;
doubleTapGesture.delaysTouchesBegan = YES;
[singleTapGesture requireGestureRecognizerToFail:doubleTapGesture];
[self.videoView addGestureRecognizer:singleTapGesture];
[self.videoView addGestureRecognizer:doubleTapGesture];
}
-(void)singleTap:(UITapGestureRecognizer *)tapGesture{
NSLog(@"单击");
CGPoint point= [tapGesture locationInView:self.videoView];
//将UI坐标转化为摄像头坐标,摄像头聚焦点范围0~1
CGPoint cameraPoint= [_captureVideoPreviewLayer captureDevicePointOfInterestForPoint:point];
[self setFocusCursorAnimationWithPoint:point];
[self changeDevicePropertySafety:^(AVCaptureDevice *captureDevice) {
/*
@constant AVCaptureFocusModeLocked 锁定在当前焦距
Indicates that the focus should be locked at the lens' current position.
@constant AVCaptureFocusModeAutoFocus 自动对焦一次,然后切换到焦距锁定
Indicates that the device should autofocus once and then change the focus mode to AVCaptureFocusModeLocked.
@constant AVCaptureFocusModeContinuousAutoFocus 当需要时.自动调整焦距
Indicates that the device should automatically focus when needed.
*/
//聚焦
if ([captureDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) {
[captureDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
NSLog(@"聚焦模式修改为%zd",AVCaptureFocusModeContinuousAutoFocus);
}else{
NSLog(@"聚焦模式修改失败");
}
//聚焦点的位置
if ([captureDevice isFocusPointOfInterestSupported]) {
[captureDevice setFocusPointOfInterest:cameraPoint];
}
/*
@constant AVCaptureExposureModeLocked 曝光锁定在当前值
Indicates that the exposure should be locked at its current value.
@constant AVCaptureExposureModeAutoExpose 曝光自动调整一次然后锁定
Indicates that the device should automatically adjust exposure once and then change the exposure mode to AVCaptureExposureModeLocked.
@constant AVCaptureExposureModeContinuousAutoExposure 曝光自动调整
Indicates that the device should automatically adjust exposure when needed.
@constant AVCaptureExposureModeCustom 曝光只根据设定的值来
Indicates that the device should only adjust exposure according to user provided ISO, exposureDuration values.
*/
//曝光模式
if ([captureDevice isExposureModeSupported:AVCaptureExposureModeAutoExpose]) {
[captureDevice setExposureMode:AVCaptureExposureModeAutoExpose];
}else{
NSLog(@"曝光模式修改失败");
}
//曝光点的位置
if ([captureDevice isExposurePointOfInterestSupported]) {
[captureDevice setExposurePointOfInterest:cameraPoint];
}
}];
}
//设置焦距
-(void)doubleTap:(UITapGestureRecognizer *)tapGesture{
NSLog(@"双击");
[self changeDevicePropertySafety:^(AVCaptureDevice *captureDevice) {
if (captureDevice.videoZoomFactor == 1.0) {
CGFloat current = 1.5;
if (current < captureDevice.activeFormat.videoMaxZoomFactor) {
[captureDevice rampToVideoZoomFactor:current withRate:10];
}
}else{
[captureDevice rampToVideoZoomFactor:1.0 withRate:10];
}
}];
}
//光圈动画
-(void)setFocusCursorAnimationWithPoint:(CGPoint)point{
self.focusCircle.center = point;
self.focusCircle.transform = CGAffineTransformIdentity;
self.focusCircle.alpha = 1.0;
[UIView animateWithDuration:0.5 animations:^{
self.focusCircle.transform=CGAffineTransformMakeScale(0.5, 0.5);
self.focusCircle.alpha = 0.0;
}];
}
//光圈
- (UIView *)focusCircle{
if (!_focusCircle) {
UIView *focusCircle = [[UIView alloc] init];
focusCircle.frame = CGRectMake(0, 0, 100, 100);
focusCircle.layer.borderColor = BLUECOLOR.CGColor;
focusCircle.layer.borderWidth = 2;
focusCircle.layer.cornerRadius = 50;
focusCircle.layer.masksToBounds =YES;
_focusCircle = focusCircle;
[self.videoView addSubview:focusCircle];
}
return _focusCircle;
}
-(UIButton *)flashModelBtn{
if (!_flashModelBtn) {
UIButton * flashModelBtn = [UIButton buttonWithType:UIButtonTypeCustom];//30/2 40/2
flashModelBtn.frame = CGRectMake(SCREEN_WIDTH/2 - 15/2, 30, 15 + 2, 20 + 3);
[flashModelBtn setImage:[UIImage imageNamed:@"sg1"] forState:UIControlStateNormal];
[flashModelBtn setImage:[UIImage imageNamed:@"sg2"] forState:UIControlStateSelected];
_flashModelBtn = flashModelBtn;
_flashModelBtn.imageView.contentMode = UIViewContentModeScaleAspectFill;
[_flashModelBtn addTarget:self action:@selector(changeFlashlight:) forControlEvents:UIControlEventTouchUpInside];
}
return _flashModelBtn;
}
-(UIButton *)changeBtn{
//40/2 33/2
if (!_changeBtn) {
UIButton * changeBtn = [UIButton buttonWithType:UIButtonTypeCustom];
changeBtn.frame = CGRectMake(SCREEN_WIDTH - 35, 30, 20 + 5, 16 + 4);
[changeBtn setImage:[UIImage imageNamed:@"zhxj"] forState:UIControlStateNormal];
_changeBtn = changeBtn;
_changeBtn.imageView.contentMode = UIViewContentModeScaleAspectFill;
[_changeBtn addTarget:self action:@selector(changeCamera) forControlEvents:UIControlEventTouchUpInside];
}
return _changeBtn;
}
//更改设备属性前一定要锁上
-(void)changeDevicePropertySafety:(void (^)(AVCaptureDevice *captureDevice))propertyChange{
//也可以直接用_videoDevice,但是下面这种更好
AVCaptureDevice *captureDevice= [_videoInput device];
NSError *error;
//注意改变设备属性前一定要首先调用lockForConfiguration:调用完之后使用unlockForConfiguration方法解锁,意义是---进行修改期间,先锁定,防止多处同时修改
BOOL lockAcquired = [captureDevice lockForConfiguration:&error];
NSLog(@"lockForConfiguration");
if (!lockAcquired) {
NSLog(@"锁定设备过程error,错误信息:%@",error.localizedDescription);
}else{
[_captureSession beginConfiguration];
propertyChange(captureDevice);
[captureDevice unlockForConfiguration];
[_captureSession commitConfiguration];
NSLog(@"unlockForConfiguration");
}
}
#pragma mark - 懒加载
-(UILabel *)cancelTip{
if (_cancelTip == nil) {
_cancelTip = [[UILabel alloc]initWithFrame:CGRectMake(IPHONE_WIDTH/2 - 40, SCREEN_HEIGHT - 120, 80, 16)];
_cancelTip.text = @"↑上滑取消";
_cancelTip.font = [UIFont systemFontOfSize:16];
_cancelTip.textAlignment = NSTextAlignmentCenter;
_cancelTip.backgroundColor = [UIColor clearColor];
_cancelTip.textColor = BLUECOLOR;
_cancelTip.alpha = 0;
[self.view addSubview:_cancelTip];
}
return _cancelTip;
}
-(void)backBtnClick{
[self dismissViewControllerAnimated:YES completion:nil];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
/*
#pragma mark - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
// Get the new view controller using [segue destinationViewController].
// Pass the selected object to the new view controller.
}
*/
@end
//
// UIView+RMAdditions.h
// RMCategories
//
// Created by Richard McClellan on 5/27/13.
// Copyright (c) 2013 Richard McClellan. All rights reserved.
//
#define SCREEN_WIDTH ([UIScreen mainScreen].bounds.size.width)
#define SCREEN_HEIGHT ([UIScreen mainScreen].bounds.size.height)
#import
@interface UIView (RMAdditions)
/**
* Shortcut for frame.origin.x.
*
* Sets frame.origin.x = x
*/
@property (nonatomic, assign) CGFloat x;
/**
* Shortcut for frame.origin.x.
*
* Sets frame.origin.x = left
*/
@property (nonatomic, assign) CGFloat left;
/**
* Shortcut for frame.origin.y
*
* Sets frame.origin.y = top
*/
@property (nonatomic, assign) CGFloat top;
/**
* Shortcut for frame.origin.y
*
* Sets frame.origin.y = y
*/
@property (nonatomic, assign) CGFloat y;
/**
* Shortcut for frame.origin.x + frame.size.width
*
* Sets frame.origin.x = right - frame.size.width
*/
@property (nonatomic, assign) CGFloat right;
/**
* Shortcut for frame.origin.y + frame.size.height
*
* Sets frame.origin.y = bottom - frame.size.height
*/
@property (nonatomic, assign) CGFloat bottom;
/**
* Shortcut for frame.size.width
*
* Sets frame.size.width = width
*/
@property (nonatomic, assign) CGFloat width;
/**
* Shortcut for frame.size.height
*
* Sets frame.size.height = height
*/
@property (nonatomic, assign) CGFloat height;
/**
* Shortcut for center.x
*
* Sets center.x = centerX
*/
@property (nonatomic, assign) CGFloat centerX;
/**
* Shortcut for center.y
*
* Sets center.y = centerY
*/
@property (nonatomic, assign) CGFloat centerY;
/**
* Shortcut for origin
*
* Sets frame.origin = origin
*/
@property (nonatomic, assign) CGPoint origin;
/**
* Shortcut for size
*
* Sets frame.size = size
*/
@property (nonatomic, assign) CGSize size;
/**
* Utility to convert UIViewAnimationCurve to UIViewAnimationOptions
*
* Used in UIViewController+RMAdditions for animating view for keyboard changes
*/
+ (UIViewAnimationOptions)animationOptionsWithCurve:(UIViewAnimationCurve)curve;
- (void) addLoadingView;
- (void) addLoadingViewWithText:(NSString *)text;
- (void) removeLoadingView;
@end
//
// UIView+RMAdditions.m
// RMCategories
//
// Created by Richard McClellan on 5/27/13.
// Copyright (c) 2013 Richard McClellan. All rights reserved.
//
#import "UIView+RMAdditions.h"
#import
@implementation UIView (RMAdditions)
- (CGFloat)left {
return self.frame.origin.x;
}
- (void)setLeft:(CGFloat)x {
CGRect frame = self.frame;
frame.origin.x = x;
self.frame = frame;
}
- (CGFloat)x {
return self.frame.origin.x;
}
- (void)setX:(CGFloat)x {
CGRect frame = self.frame;
frame.origin.x = x;
self.frame = frame;
}
- (CGFloat)top {
return self.frame.origin.y;
}
- (void)setTop:(CGFloat)y {
CGRect frame = self.frame;
frame.origin.y = y;
self.frame = frame;
}
- (CGFloat)y {
return self.frame.origin.y;
}
- (void)setY:(CGFloat)y {
CGRect frame = self.frame;
frame.origin.y = y;
self.frame = frame;
}
- (CGFloat)right {
return self.frame.origin.x + self.frame.size.width;
}
- (void)setRight:(CGFloat)right {
CGRect frame = self.frame;
frame.origin.x = right - frame.size.width;
self.frame = frame;
}
- (CGFloat)bottom {
return self.frame.origin.y + self.frame.size.height;
}
- (void)setBottom:(CGFloat)bottom {
CGRect frame = self.frame;
frame.origin.y = bottom - frame.size.height;
self.frame = frame;
}
- (CGFloat)centerX {
return self.center.x;
}
- (void)setCenterX:(CGFloat)centerX {
self.center = CGPointMake(centerX, self.center.y);
}
- (CGFloat)centerY {
return self.center.y;
}
- (void)setCenterY:(CGFloat)centerY {
self.center = CGPointMake(self.center.x, centerY);
}
- (CGFloat)width {
return self.frame.size.width;
}
- (void)setWidth:(CGFloat)width {
CGRect frame = self.frame;
frame.size.width = width;
self.frame = frame;
}
- (CGFloat)height {
return self.frame.size.height;
}
- (void)setHeight:(CGFloat)height {
CGRect frame = self.frame;
frame.size.height = height;
self.frame = frame;
}
- (void)setOrigin:(CGPoint)origin {
CGRect frame = self.frame;
frame.origin = origin;
self.frame = frame;
}
- (CGPoint) origin {
return self.frame.origin;
}
- (void) setSize:(CGSize)size {
CGRect frame = self.frame;
frame.size = size;
self.frame = frame;
}
- (CGSize) size {
return self.frame.size;
}
+ (UIViewAnimationOptions)animationOptionsWithCurve:(UIViewAnimationCurve)curve {
switch(curve) {
case UIViewAnimationCurveEaseIn:
return UIViewAnimationOptionCurveEaseIn;
case UIViewAnimationCurveEaseInOut:
return UIViewAnimationOptionCurveEaseInOut;
case UIViewAnimationCurveEaseOut:
return UIViewAnimationOptionCurveEaseOut;
case UIViewAnimationCurveLinear:
return UIViewAnimationOptionCurveLinear;
}
}
static const void *kLoadingViewKey = @"LoadingViewKey";
- (void) addLoadingView {
[self addLoadingViewWithText:@"Loading..."];
}
- (void) addLoadingViewWithText:(NSString *)text {
[self removeLoadingView];
UIView *loadingView = [[UIView alloc] initWithFrame:self.bounds];
[loadingView setAutoresizingMask:UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight];
[loadingView setBackgroundColor:[UIColor colorWithWhite:0.9 alpha:1.0]];
objc_setAssociatedObject(self, kLoadingViewKey, loadingView, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
UILabel *loadingLabel = [[UILabel alloc] initWithFrame:CGRectZero];
loadingLabel.backgroundColor = [UIColor clearColor];
loadingLabel.font = [UIFont systemFontOfSize:15.0];
loadingLabel.textColor = [UIColor blackColor];
[loadingLabel setText:text];
[loadingLabel sizeToFit];
UIActivityIndicatorView *activityIndicator = [[UIActivityIndicatorView alloc] initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleGray];
activityIndicator.autoresizingMask = UIViewAutoresizingFlexibleTopMargin | UIViewAutoresizingFlexibleBottomMargin;
[activityIndicator startAnimating];
activityIndicator.left = (self.width - activityIndicator.width - loadingLabel.width - 5.0) / 2;
activityIndicator.centerY = self.centerY;
[loadingView addSubview:activityIndicator];
loadingLabel.left = (self.width - activityIndicator.width - loadingLabel.width - 5.0) / 2 + activityIndicator.width + 5.0;
loadingLabel.centerY = self.centerY;
loadingLabel.autoresizingMask = UIViewAutoresizingFlexibleTopMargin | UIViewAutoresizingFlexibleBottomMargin;
[loadingView addSubview:loadingLabel];
[self addSubview:loadingView];
}
- (void) removeLoadingView {
UIView *loadingView = objc_getAssociatedObject(self, kLoadingViewKey);
[loadingView removeFromSuperview];
}
@end
[self create];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(playbackFinished:)name:AVPlayerItemDidPlayToEndTimeNotification object:nil];
#pragma mark - 创建UI
- (void)create
{
_playItem = [AVPlayerItem playerItemWithURL:self.videoUrl];
_player = [AVPlayer playerWithPlayerItem:_playItem];
_playerLayer =[AVPlayerLayer playerLayerWithPlayer:_player];
_playerLayer.frame = CGRectMake(IPHONE_WIDTH - 102, 8, 95, 95);
_playerLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;//视频填充模式
[self.view.layer addSublayer:_playerLayer];
[_player play];
}
#pragma mark 保存压缩
- (NSURL *)compressedURL
{
NSDateFormatter *formater = [[NSDateFormatter alloc] init];//用时间给文件全名,以免重复,在测试的时候其实可以判断文件是否存在若存在,则删除,重新生成文件即可
[formater setDateFormat:@"yyyy-MM-dd-HH:mm:ss"];
return [NSURL fileURLWithPath:[[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, true) lastObject] stringByAppendingPathComponent:[NSString stringWithFormat:@"%@output.mp4",[formater stringFromDate:[NSDate date]]]]];
}
- (CGFloat)fileSize:(NSURL *)path
{
return [[NSData dataWithContentsOfURL:path] length]/1024.00 /1024.00;
}
- (void)convertVideoQuailtyWithInputURL:(NSURL*)inputURL
outputURL:(NSURL*)outputURL
completeHandler:(void (^)(AVAssetExportSession*))handler
{
[HTXSVProgressHUDTool showWithStatus:@"视频压缩..."];
AVURLAsset *avAsset = [AVURLAsset URLAssetWithURL:inputURL options:nil];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:avAsset presetName:AVAssetExportPresetMediumQuality];//AVAssetExportPresetLowQuality AVAssetExportPresetMediumQuality
// NSLog(resultPath); AVAssetExportPresetHighestQuality
exportSession.outputURL = outputURL;
exportSession.outputFileType = AVFileTypeMPEG4;
exportSession.shouldOptimizeForNetworkUse= YES;
exportSession.videoComposition = [self getVideoComposition:avAsset];
[exportSession exportAsynchronouslyWithCompletionHandler:^(void)
{
[HTXSVProgressHUDTool dismiss];
switch (exportSession.status) {
case AVAssetExportSessionStatusCancelled:
NSLog(@"AVAssetExportSessionStatusCancelled");
break;
case AVAssetExportSessionStatusUnknown:
NSLog(@"AVAssetExportSessionStatusUnknown");
break;
case AVAssetExportSessionStatusWaiting:
NSLog(@"AVAssetExportSessionStatusWaiting");
break;
case AVAssetExportSessionStatusExporting:
NSLog(@"AVAssetExportSessionStatusExporting");
break;
case AVAssetExportSessionStatusCompleted:
{
NSLog(@"AVAssetExportSessionStatusCompleted");
NSLog(@"压缩完毕,压缩后大小 %f MB",[self fileSize:outputURL]);
//UISaveVideoAtPathToSavedPhotosAlbum([outputURL path], self, nil, NULL);//这个是保存到手机相册
[[NSFileManager defaultManager] removeItemAtPath:[inputURL path] error:nil];//取消之后就删除,以免占用手机硬盘空间
[self uploadVideo:outputURL];
}
break;
case AVAssetExportSessionStatusFailed:
NSLog(@"AVAssetExportSessionStatusFailed");
break;
}
}];
}
#pragma mark - 解决录像保存角度问题
-(AVMutableVideoComposition *) getVideoComposition:(AVAsset *)asset
{
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
CGSize videoSize = videoTrack.naturalSize;
BOOL isPortrait_ = [self isVideoPortrait:asset];
if(isPortrait_) {
NSLog(@"video is portrait ");
videoSize = CGSizeMake(videoSize.height, videoSize.width);
}
composition.naturalSize = videoSize;
videoComposition.renderSize = videoSize;
// videoComposition.renderSize = videoTrack.naturalSize; //
videoComposition.frameDuration = CMTimeMakeWithSeconds( 1 / videoTrack.nominalFrameRate, 600);
AVMutableCompositionTrack *compositionVideoTrack;
compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionLayerInstruction *layerInst;
layerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
[layerInst setTransform:videoTrack.preferredTransform atTime:kCMTimeZero];
AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
inst.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
inst.layerInstructions = [NSArray arrayWithObject:layerInst];
videoComposition.instructions = [NSArray arrayWithObject:inst];
return videoComposition;
}
-(BOOL) isVideoPortrait:(AVAsset *)asset
{
BOOL isPortrait = FALSE;
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if([tracks count] > 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
CGAffineTransform t = videoTrack.preferredTransform;
// Portrait
if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0)
{
isPortrait = YES;
}
// PortraitUpsideDown
if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) {
isPortrait = YES;
}
// LandscapeRight
if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0)
{
isPortrait = FALSE;
}
// LandscapeLeft
if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0)
{
isPortrait = FALSE;
}
}
return isPortrait;
}
-(void)uploadVideo:(NSURL*)URL{
NSLog(@"上传视频");
//分界线的标识符
NSString *TWITTERFON_FORM_BOUNDARY = @"AaB03x";
NSURL *url = [NSURL URLWithString:KCreateDynamic];
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
//分界线 -- AaB03x
NSString *MPboundary = [[NSString alloc]initWithFormat:@"--%@",TWITTERFON_FORM_BOUNDARY];
// 结束符 AaB03x--
NSString *endMPboundary = [[NSString alloc]initWithFormat:@"%@--",MPboundary];
//http body的字符串
NSMutableString *body = [[NSMutableString alloc]init];
//参数的集合的所有key的集合
// NSDictionary *params = @{@"appSecret":@"4124bc0a9335c27f086f24ba207a4912",@"memberId":[UserModel sharedInstance].memberId,@"message":self.messageText1.text,@"positionX":@0,@"positionY":@0,@"positionYn":@"N",@"appId":@"ET12412"};
NSString *memStr = [UserModel sharedInstance].memberId;
NSMutableDictionary * _param = [NSMutableDictionary dictionary];
NSString *paraString = [MD5 encodeString:self.messageText1.text];
[_param setValue:memStr forKey:@"memberId"];
[_param setValue:self.messageText1.text forKey:@"message"];
[_param setValue:KAPPID forKey:@"appId"];
[_param setValue:KappSecret forKey:@"appSecret"];
[_param setValue:[CommonTools getTimeScamp] forKey:@"timestamp"];
NSArray * paramArr = @[[NSString stringWithFormat:@"memberId%@",_param[@"memberId"]],[NSString stringWithFormat:@"message%@",paraString],[NSString stringWithFormat:@"appId%@",_param[@"appId"]],[NSString stringWithFormat:@"appSecret%@",_param[@"appSecret"]],[NSString stringWithFormat:@"timestamp%@",_param[@"timestamp"]]];
[_param setValue:[CommonTools tokenmd5StringFromArray:paramArr] forKey:@"digest"];
NSArray *keys= [_param allKeys];
//遍历keys
for(int i=0;i<[keys count];i++) {
//得到当前key
NSString *key=[keys objectAtIndex:i];
//如果key不是pic,说明value是字符类型,比如name:Boris
//if(![key isEqualToString:@"pic"]) {
//添加分界线,换行
[body appendFormat:@"%@\r\n",MPboundary];
//添加字段名称,换2行
[body appendFormat:@"Content-Disposition: form-data; name=\"%@\"\r\n\r\n",key];
//[body appendString:@"Content-Transfer-Encoding: 8bit"];
//添加字段的值
[body appendFormat:@"%@\r\n",[_param objectForKey:key]];
//}
}
////添加分界线,换行
//[body appendFormat:@"%@\r\n",MPboundary];
NSLog(@"------------body---------%@",body);
//声明myRequestData,用来放入http body
NSMutableData *myRequestData=[NSMutableData data];
//将body字符串转化为UTF8格式的二进制
[myRequestData appendData:[body dataUsingEncoding:NSUTF8StringEncoding]];
//循环加入上传图片
for (int i = 0 ; i < 1; i ++ ) {
//要上传的图片
// image = self.chosenPostImages[i];
//得到图片的data
// NSData *data;
NSData *data = [NSData dataWithContentsOfURL:URL];
// if (UIImagePNGRepresentation(image)) {
// //返回为png图像。
// data = UIImagePNGRepresentation(image);
// }else {
// //返回为JPEG图像。
// data = UIImageJPEGRepresentation(image, 1);
// }
// NSData *data = UIImageJPEGRepresentation(image, 0.00001);
// NSUInteger length = [data length];
NSMutableString *imgBody = [[NSMutableString alloc]init];
//此处循环添加图片文件
//添加图片信息字段
//声明pic字段,文件名为boris.png
//[body appendFormat:[NSString stringWithFormat: @"Content-Disposition: form-data; name=\"File\"; filename=\"%@\"\r\n", [keys objectAtIndex:i]]];
////添加分界线,换行
[imgBody appendFormat:@"%@\r\n",MPboundary];
[imgBody appendFormat:@"Content-Disposition: form-data; name=\"movie\"; filename=\"output.mp4\"\r\n"];
//声明上传文件的格式
// [imgBody appendFormat:@"Content-Type: application/octet-stream"];
//charset=utf-8\r\n\r\n"];
[imgBody appendFormat:@"Content-Type: video/mp4\r\n\r\n"];
//将body字符串转化为UTF8格式的二进制
//[myRequestData appendData:[body dataUsingEncoding:NSUTF8StringEncoding]];
[myRequestData appendData:[imgBody dataUsingEncoding:NSUTF8StringEncoding]];
//将image的data加入
NSLog(@"----imgBody------- %@",imgBody);
[myRequestData appendData:data];
[myRequestData appendData:[ @"\r\n" dataUsingEncoding:NSUTF8StringEncoding]];
}
//声明结束符:--AaB03x--
// NSString *end=[[NSString alloc]initWithFormat:@"%@\r\n",endMPboundary];
NSString *end=[[NSString alloc]initWithFormat:@"\r\n%@",endMPboundary];
//加入结束符--AaB03x--
[myRequestData appendData:[end dataUsingEncoding:NSUTF8StringEncoding]];
//设置HTTPHeader中Content-Type的值
NSString *content=[[NSString alloc]initWithFormat:@"multipart/form-data; boundary=%@",TWITTERFON_FORM_BOUNDARY];
//设置HTTPHeader
[request setValue:content forHTTPHeaderField:@"Content-Type"];
//[request setValue:@"keep-alive" forHTTPHeaderField:@"connection"];
//[request setValue:@"UTF-8" forHTTPHeaderField:@"Charsert"];
//设置Content-Length
[request setValue:[NSString stringWithFormat:@"%lu", (unsigned long)[myRequestData length]] forHTTPHeaderField:@"Content-Length"];
//设置http body
[request setHTTPBody:myRequestData];
//http method
[request setHTTPMethod:@"POST"];
[HTXSVProgressHUDTool showWithStatus:@"正在发送..."];
// 发送请求
[NSURLConnection sendAsynchronousRequest:request queue:[NSOperationQueue mainQueue] completionHandler:^(NSURLResponse *response, NSData *data, NSError *connectionError) {
if (data) {
NSDictionary *dict = [NSJSONSerialization JSONObjectWithData:data options:NSJSONReadingMutableLeaves error:nil];
NSLog(@"发表小视频状态请求 == %@", dict);
// _isOne = NO;
if([dict[@"returnCode"]isEqualToString:@"000000"]){
[[NSFileManager defaultManager] removeItemAtPath:[URL path] error:nil];//取消之后就删除,以免占用手机硬盘空间
NSLog(@"上传视频成功");
PostStatusViewController * cvc = [[PostStatusViewController alloc] init];
cvc.view.tag = 300;
NSDictionary *dict = [[NSDictionary alloc]initWithObjectsAndKeys:cvc, @"FreshView",nil] ;
NSNotification *notification = [NSNotification notificationWithName:@"FreshView" object:nil userInfo:dict];
[[NSNotificationCenter defaultCenter]postNotification:notification];
//通知出现标签栏
UIViewController *cvc1 = [[UIViewController alloc]init];
cvc1.view.tag = 1;
NSDictionary *dict1 = [[NSDictionary alloc]initWithObjectsAndKeys:cvc1,@"Hidden", nil];
NSNotification *notification1 = [NSNotification notificationWithName:@"tongzhiHidden" object:nil userInfo:dict1];
[[NSNotificationCenter defaultCenter]postNotification:notification1];
[self.navigationController popViewControllerAnimated:YES];
}
else if ([dict[@"returnCode"]integerValue] == 3){
UIAlertView *alertView = [[UIAlertView alloc]initWithTitle:@"提示" message:@"对不起,请求数据失败!" delegate:self cancelButtonTitle:@"确定" otherButtonTitles:nil, nil];
[alertView show];
}else if ([dict[@"returnCode"]integerValue]==43){
UIAlertView * aler = [[UIAlertView alloc]initWithTitle:[NSString stringWithFormat:@"%@",dict[@"description"]] message:[NSString stringWithFormat:@"%@",dict[@"reason"]] delegate:self cancelButtonTitle:@"确定" otherButtonTitles:nil, nil];
[aler show];
}else{
UIAlertView *alertView = [[UIAlertView alloc]initWithTitle:@"提示" message:@"对不起,发布失败!" delegate:self cancelButtonTitle:@"确定" otherButtonTitles:nil, nil];
[alertView show];
}
} else {
NSLog(@"上传失败");
UIAlertView *alertView = [[UIAlertView alloc]initWithTitle:@"提示" message:@"上传失败!" delegate:self cancelButtonTitle:@"确定" otherButtonTitles:nil, nil];
[alertView show];
}
[HTXSVProgressHUDTool dismiss];
}];
}