因IPHONE录制的视频为MOV格式,通过IM发送给安卓端时,安卓端无法播放,不得是转为MP4进行发送。
实现代码如下:SDAVAssetExportSession是个三方类库,进行了修正修改。
//
// SDAVAssetExportSession.h
//
// This file is part of the SDAVAssetExportSession package.
//
// Created by Olivier Poitrey on 13/03/13.
// Copyright 2013 Olivier Poitrey. All rights servered.
//
// For the full copyright and license information, please view the LICENSE
// file that was distributed with this source code.
//
#import
#import
@protocol SDAVAssetExportSessionDelegate;
/**
* An `SDAVAssetExportSession` object transcodes the contents of an AVAsset source object to create an output
* of the form described by specified video and audio settings. It implements most of the API of Apple provided
* `AVAssetExportSession` but with the capability to provide you own video and audio settings instead of the
* limited set of Apple provided presets.
*
* After you have initialized an export session with the asset that contains the source media, video and audio
* settings, and the output file type (outputFileType), you can start the export running by invoking
* `exportAsynchronouslyWithCompletionHandler:`. Because the export is performed asynchronously, this method
* returns immediately — you can observe progress to check on the progress.
*
* The completion handler you supply to `exportAsynchronouslyWithCompletionHandler:` is called whether the export
* fails, completes, or is cancelled. Upon completion, the status property indicates whether the export has
* completed successfully. If it has failed, the value of the error property supplies additional information
* about the reason for the failure.
*/
@interface SDAVAssetExportSession : NSObject
@property (nonatomic, weak) id delegate;
/**
* The asset with which the export session was initialized.
*/
@property (nonatomic, strong, readonly) AVAsset *asset;
/**
* Indicates whether video composition is enabled for export, and supplies the instructions for video composition.
*
* You can observe this property using key-value observing.
*/
@property (nonatomic, copy) AVVideoComposition *videoComposition;
/**
* Indicates whether non-default audio mixing is enabled for export, and supplies the parameters for audio mixing.
*/
@property (nonatomic, copy) AVAudioMix *audioMix;
/**
* The type of file to be written by the session.
*
* The value is a UTI string corresponding to the file type to use when writing the asset.
* For a list of constants specifying UTIs for standard file types, see `AV Foundation Constants Reference`.
*
* You can observe this property using key-value observing.
*/
@property (nonatomic, copy) NSString *outputFileType;
/**
* The URL of the export session’s output.
*
* You can observe this property using key-value observing.
*/
@property (nonatomic, copy) NSURL *outputURL;
/**
* The settings used for input video track.
*
* The dictionary’s keys are from .
*/
@property (nonatomic, copy) NSDictionary *videoInputSettings;
/**
* The settings used for encoding the video track.
*
* A value of nil specifies that appended output should not be re-encoded.
* The dictionary’s keys are from .
*/
@property (nonatomic, copy) NSDictionary *videoSettings;
/**
* The settings used for encoding the audio track.
*
* A value of nil specifies that appended output should not be re-encoded.
* The dictionary’s keys are from .
*/
@property (nonatomic, copy) NSDictionary *audioSettings;
/**
* The time range to be exported from the source.
*
* The default time range of an export session is `kCMTimeZero` to `kCMTimePositiveInfinity`,
* meaning that (modulo a possible limit on file length) the full duration of the asset will be exported.
*
* You can observe this property using key-value observing.
*
*/
@property (nonatomic, assign) CMTimeRange timeRange;
/**
* Indicates whether the movie should be optimized for network use.
*
* You can observe this property using key-value observing.
*/
@property (nonatomic, assign) BOOL shouldOptimizeForNetworkUse;
/**
* The metadata to be written to the output file by the export session.
*/
@property (nonatomic, copy) NSArray *metadata;
/**
* Describes the error that occurred if the export status is `AVAssetExportSessionStatusFailed`
* or `AVAssetExportSessionStatusCancelled`.
*
* If there is no error to report, the value of this property is nil.
*/
@property (nonatomic, strong, readonly) NSError *error;
/**
* The progress of the export on a scale from 0 to 1.
*
*
* A value of 0 means the export has not yet begun, 1 means the export is complete.
*
* Unlike Apple provided `AVAssetExportSession`, this property can be observed using key-value observing.
*/
@property (nonatomic, assign, readonly) float progress;
/**
* The status of the export session.
*
* For possible values, see “AVAssetExportSessionStatus.”
*
* You can observe this property using key-value observing. (TODO)
*/
@property (nonatomic, assign, readonly) AVAssetExportSessionStatus status;
/**
* Returns an asset export session configured with a specified asset.
*
* @param asset The asset you want to export
* @return An asset export session initialized to export `asset`.
*/
+ (id)exportSessionWithAsset:(AVAsset *)asset;
/**
* Initializes an asset export session with a specified asset.
*
* @param asset The asset you want to export
* @return An asset export session initialized to export `asset`.
*/
- (id)initWithAsset:(AVAsset *)asset;
/**
* Starts the asynchronous execution of an export session.
*
* This method starts an asynchronous export operation and returns immediately. status signals the terminal
* state of the export session, and if a failure occurs, error describes the problem.
*
* If internal preparation for export fails, handler is invoked synchronously. The handler may also be called
* asynchronously, after the method returns, in the following cases:
*
* 1. If a failure occurs during the export, including failures of loading, re-encoding, or writing media data to the output.
* 2. If cancelExport is invoked.
* 3. After the export session succeeds, having completely written its output to the outputURL.
*
* @param handler A block that is invoked when writing is complete or in the event of writing failure.
*/
- (void)exportAsynchronouslyWithCompletionHandler:(void (^)(void))handler;
/**
* Cancels the execution of an export session.
*
* You can invoke this method when the export is running.
*/
- (void)cancelExport;
//获取视频帧大小
+ (CGSize)videoSizeInAssets:(AVURLAsset *)asset;
@end
@protocol SDAVAssetExportSessionDelegate
- (void)exportSession:(SDAVAssetExportSession *)exportSession renderFrame:(CVPixelBufferRef)pixelBuffer withPresentationTime:(CMTime)presentationTime toBuffer:(CVPixelBufferRef)renderBuffer;
@end
SDAVAssetExportSession.m文件
//
// SDAVAssetExportSession.m
//
// This file is part of the SDAVAssetExportSession package.
//
// Created by Olivier Poitrey on 13/03/13.
// Copyright 2013 Olivier Poitrey. All rights servered.
//
// For the full copyright and license information, please view the LICENSE
// file that was distributed with this source code.
//
#import "SDAVAssetExportSession.h"
@interface SDAVAssetExportSession ()
@property (nonatomic, assign, readwrite) float progress;
@property (nonatomic, strong) AVAssetReader *reader;
@property (nonatomic, strong) AVAssetReaderVideoCompositionOutput *videoOutput;
@property (nonatomic, strong) AVAssetReaderAudioMixOutput *audioOutput;
@property (nonatomic, strong) AVAssetWriter *writer;
@property (nonatomic, strong) AVAssetWriterInput *videoInput;
@property (nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor *videoPixelBufferAdaptor;
@property (nonatomic, strong) AVAssetWriterInput *audioInput;
@property (nonatomic, strong) dispatch_queue_t inputQueue;
@property (nonatomic, strong) void (^completionHandler)(void);
@end
@implementation SDAVAssetExportSession
{
NSError *_error;
NSTimeInterval duration;
CMTime lastSamplePresentationTime;
}
+ (id)exportSessionWithAsset:(AVAsset *)asset
{
return [SDAVAssetExportSession.alloc initWithAsset:asset];
}
- (id)initWithAsset:(AVAsset *)asset
{
if ((self = [super init]))
{
_asset = asset;
_timeRange = CMTimeRangeMake(kCMTimeZero, kCMTimePositiveInfinity);
}
return self;
}
- (void)exportAsynchronouslyWithCompletionHandler:(void (^)(void))handler
{
NSParameterAssert(handler != nil);
[self cancelExport];
self.completionHandler = handler;
if (!self.outputURL)
{
_error = [NSError errorWithDomain:AVFoundationErrorDomain code:AVErrorExportFailed userInfo:@
{
NSLocalizedDescriptionKey: @"Output URL not set"
}];
handler();
return;
}
NSError *readerError;
self.reader = [AVAssetReader.alloc initWithAsset:self.asset error:&readerError];
if (readerError)
{
_error = readerError;
handler();
return;
}
NSError *writerError;
self.writer = [AVAssetWriter assetWriterWithURL:self.outputURL fileType:self.outputFileType error:&writerError];
if (writerError)
{
_error = writerError;
handler();
return;
}
self.reader.timeRange = self.timeRange;
self.writer.shouldOptimizeForNetworkUse = self.shouldOptimizeForNetworkUse;
self.writer.metadata = self.metadata;
NSArray *videoTracks = [self.asset tracksWithMediaType:AVMediaTypeVideo];
if (CMTIME_IS_VALID(self.timeRange.duration) && !CMTIME_IS_POSITIVE_INFINITY(self.timeRange.duration))
{
duration = CMTimeGetSeconds(self.timeRange.duration);
}
else
{
duration = CMTimeGetSeconds(self.asset.duration);
}
//
// Video output
//
if (videoTracks.count > 0) {
self.videoOutput = [AVAssetReaderVideoCompositionOutput assetReaderVideoCompositionOutputWithVideoTracks:videoTracks videoSettings:self.videoInputSettings];
self.videoOutput.alwaysCopiesSampleData = NO;
if (self.videoComposition)
{
self.videoOutput.videoComposition = self.videoComposition;
}
else
{
self.videoOutput.videoComposition = [self buildDefaultVideoComposition];
}
if ([self.reader canAddOutput:self.videoOutput])
{
[self.reader addOutput:self.videoOutput];
}
//
// Video input
//
self.videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:self.videoSettings];
self.videoInput.expectsMediaDataInRealTime = NO;
if ([self.writer canAddInput:self.videoInput])
{
[self.writer addInput:self.videoInput];
}
NSDictionary *pixelBufferAttributes = @
{
(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
(id)kCVPixelBufferWidthKey: @(self.videoOutput.videoComposition.renderSize.width),
(id)kCVPixelBufferHeightKey: @(self.videoOutput.videoComposition.renderSize.height),
@"IOSurfaceOpenGLESTextureCompatibility": @YES,
@"IOSurfaceOpenGLESFBOCompatibility": @YES,
};
self.videoPixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.videoInput sourcePixelBufferAttributes:pixelBufferAttributes];
}
//
//Audio output
//
NSArray *audioTracks = [self.asset tracksWithMediaType:AVMediaTypeAudio];
if (audioTracks.count > 0) {
self.audioOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:audioTracks audioSettings:nil];
self.audioOutput.alwaysCopiesSampleData = NO;
self.audioOutput.audioMix = self.audioMix;
if ([self.reader canAddOutput:self.audioOutput])
{
[self.reader addOutput:self.audioOutput];
}
} else {
// Just in case this gets reused
self.audioOutput = nil;
}
//
// Audio input
//
if (self.audioOutput) {
self.audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:self.audioSettings];
self.audioInput.expectsMediaDataInRealTime = NO;
if ([self.writer canAddInput:self.audioInput])
{
[self.writer addInput:self.audioInput];
}
}
[self.writer startWriting];
[self.reader startReading];
[self.writer startSessionAtSourceTime:self.timeRange.start];
__block BOOL videoCompleted = NO;
__block BOOL audioCompleted = NO;
__weak typeof(self) wself = self;
self.inputQueue = dispatch_queue_create("VideoEncoderInputQueue", DISPATCH_QUEUE_SERIAL);
if (videoTracks.count > 0) {
[self.videoInput requestMediaDataWhenReadyOnQueue:self.inputQueue usingBlock:^
{
if (![wself encodeReadySamplesFromOutput:wself.videoOutput toInput:wself.videoInput])
{
@synchronized(wself)
{
videoCompleted = YES;
if (audioCompleted)
{
[wself finish];
}
}
}
}];
}
else {
videoCompleted = YES;
}
if (!self.audioOutput) {
audioCompleted = YES;
} else {
[self.audioInput requestMediaDataWhenReadyOnQueue:self.inputQueue usingBlock:^
{
if (![wself encodeReadySamplesFromOutput:wself.audioOutput toInput:wself.audioInput])
{
@synchronized(wself)
{
audioCompleted = YES;
if (videoCompleted)
{
[wself finish];
}
}
}
}];
}
}
- (BOOL)encodeReadySamplesFromOutput:(AVAssetReaderOutput *)output toInput:(AVAssetWriterInput *)input
{
while (input.isReadyForMoreMediaData)
{
CMSampleBufferRef sampleBuffer = [output copyNextSampleBuffer];
if (sampleBuffer)
{
BOOL handled = NO;
BOOL error = NO;
if (self.reader.status != AVAssetReaderStatusReading || self.writer.status != AVAssetWriterStatusWriting)
{
handled = YES;
error = YES;
}
if (!handled && self.videoOutput == output)
{
// update the video progress
lastSamplePresentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
lastSamplePresentationTime = CMTimeSubtract(lastSamplePresentationTime, self.timeRange.start);
self.progress = duration == 0 ? 1 : CMTimeGetSeconds(lastSamplePresentationTime) / duration;
if ([self.delegate respondsToSelector:@selector(exportSession:renderFrame:withPresentationTime:toBuffer:)])
{
CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferRef renderBuffer = NULL;
CVPixelBufferPoolCreatePixelBuffer(NULL, self.videoPixelBufferAdaptor.pixelBufferPool, &renderBuffer);
[self.delegate exportSession:self renderFrame:pixelBuffer withPresentationTime:lastSamplePresentationTime toBuffer:renderBuffer];
if (![self.videoPixelBufferAdaptor appendPixelBuffer:renderBuffer withPresentationTime:lastSamplePresentationTime])
{
error = YES;
}
CVPixelBufferRelease(renderBuffer);
handled = YES;
}
}
if (!handled && ![input appendSampleBuffer:sampleBuffer])
{
error = YES;
}
CFRelease(sampleBuffer);
if (error)
{
return NO;
}
}
else
{
[input markAsFinished];
return NO;
}
}
return YES;
}
+ (CGSize)videoSizeInAssets:(AVURLAsset *)asset
{
AVAssetTrack *videoTrack = nil;
if ([[asset tracksWithMediaType:AVMediaTypeVideo] count] != 0)
{
videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
}
CGSize naturalSize = [videoTrack naturalSize];
CGAffineTransform transform = videoTrack.preferredTransform;
// Workaround radar 31928389, see https://github.com/rs/SDAVAssetExportSession/pull/70 for more info
if (transform.ty == -560) {
transform.ty = 0;
}
if (transform.tx == -560) {
transform.tx = 0;
}
CGFloat videoAngleInDegree = atan2(transform.b, transform.a) * 180 / M_PI;
if (videoAngleInDegree == 90 || videoAngleInDegree == -90)
{
CGFloat width = naturalSize.width;
naturalSize.width = naturalSize.height;
naturalSize.height = width;
}
return naturalSize;
}
- (AVMutableVideoComposition *)buildDefaultVideoComposition
{
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
AVAssetTrack *videoTrack = [[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
// get the frame rate from videoSettings, if not set then try to get it from the video track,
// if not set (mainly when asset is AVComposition) then use the default frame rate of 30
float trackFrameRate = 0;
if (self.videoSettings)
{
NSDictionary *videoCompressionProperties = [self.videoSettings objectForKey:AVVideoCompressionPropertiesKey];
if (videoCompressionProperties)
{
NSNumber *frameRate = [videoCompressionProperties objectForKey:AVVideoAverageNonDroppableFrameRateKey];
if (frameRate)
{
trackFrameRate = frameRate.floatValue;
}
}
}
else
{
trackFrameRate = [videoTrack nominalFrameRate];
}
if (trackFrameRate == 0)
{
trackFrameRate = 30;
}
videoComposition.frameDuration = CMTimeMake(1, trackFrameRate);
CGSize targetSize = CGSizeMake([self.videoSettings[AVVideoWidthKey] floatValue], [self.videoSettings[AVVideoHeightKey] floatValue]);
CGSize naturalSize = [videoTrack naturalSize];
CGAffineTransform transform = videoTrack.preferredTransform;
// Workaround radar 31928389, see https://github.com/rs/SDAVAssetExportSession/pull/70 for more info
if (transform.ty == -560) {
transform.ty = 0;
}
if (transform.tx == -560) {
transform.tx = 0;
}
CGFloat videoAngleInDegree = atan2(transform.b, transform.a) * 180 / M_PI;
if (videoAngleInDegree == 90 || videoAngleInDegree == -90) {
CGFloat width = naturalSize.width;
naturalSize.width = naturalSize.height;
naturalSize.height = width;
}
videoComposition.renderSize = naturalSize;
// center inside
{
float ratio;
float xratio = targetSize.width / naturalSize.width;
float yratio = targetSize.height / naturalSize.height;
ratio = MIN(xratio, yratio);
float postWidth = naturalSize.width * ratio;
float postHeight = naturalSize.height * ratio;
float transx = (targetSize.width - postWidth) / 2;
float transy = (targetSize.height - postHeight) / 2;
CGAffineTransform matrix = CGAffineTransformMakeTranslation(transx / xratio, transy / yratio);
matrix = CGAffineTransformScale(matrix, ratio / xratio, ratio / yratio);
transform = CGAffineTransformConcat(transform, matrix);
}
// Make a "pass through video track" video composition.
AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
passThroughInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.asset.duration);
AVMutableVideoCompositionLayerInstruction *passThroughLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
[passThroughLayer setTransform:transform atTime:kCMTimeZero];
passThroughInstruction.layerInstructions = @[passThroughLayer];
videoComposition.instructions = @[passThroughInstruction];
return videoComposition;
}
- (void)finish
{
// Synchronized block to ensure we never cancel the writer before calling finishWritingWithCompletionHandler
if (self.reader.status == AVAssetReaderStatusCancelled || self.writer.status == AVAssetWriterStatusCancelled)
{
return;
}
if (self.writer.status == AVAssetWriterStatusFailed)
{
[self complete];
}
else if (self.reader.status == AVAssetReaderStatusFailed) {
[self.writer cancelWriting];
[self complete];
}
else
{
[self.writer finishWritingWithCompletionHandler:^
{
[self complete];
}];
}
}
- (void)complete
{
if (self.writer.status == AVAssetWriterStatusFailed || self.writer.status == AVAssetWriterStatusCancelled)
{
[NSFileManager.defaultManager removeItemAtURL:self.outputURL error:nil];
}
if (self.completionHandler)
{
self.completionHandler();
self.completionHandler = nil;
}
}
- (NSError *)error
{
if (_error)
{
return _error;
}
else
{
return self.writer.error ? : self.reader.error;
}
}
- (AVAssetExportSessionStatus)status
{
switch (self.writer.status)
{
default:
case AVAssetWriterStatusUnknown:
return AVAssetExportSessionStatusUnknown;
case AVAssetWriterStatusWriting:
return AVAssetExportSessionStatusExporting;
case AVAssetWriterStatusFailed:
return AVAssetExportSessionStatusFailed;
case AVAssetWriterStatusCompleted:
return AVAssetExportSessionStatusCompleted;
case AVAssetWriterStatusCancelled:
return AVAssetExportSessionStatusCancelled;
}
}
- (void)cancelExport
{
if (self.inputQueue)
{
dispatch_async(self.inputQueue, ^
{
[self.writer cancelWriting];
[self.reader cancelReading];
[self complete];
[self reset];
});
}
}
- (void)reset
{
_error = nil;
self.progress = 0;
self.reader = nil;
self.videoOutput = nil;
self.audioOutput = nil;
self.writer = nil;
self.videoInput = nil;
self.videoPixelBufferAdaptor = nil;
self.audioInput = nil;
self.inputQueue = nil;
self.completionHandler = nil;
}
@end
使用方法:mov转mp4方法封装:
+ (NSURL *)mov2Mp4:(NSURL *)movUrl
{
__block NSURL *m = nil;
dispatch_semaphore_t wait = dispatch_semaphore_create(0l);
[self mov2Mp4:movUrl completeBlock:^(AVAssetExportSessionStatus status, NSURL *outPath) {
m = outPath;
dispatch_semaphore_signal(wait);
}];
dispatch_semaphore_wait(wait, DISPATCH_TIME_FOREVER);
return m;
}
+ (NSURL *)mov2Mp4:(NSURL *)movUrl frameSize:(CGSize)frameSize
{
__block NSURL *m = nil;
dispatch_semaphore_t wait = dispatch_semaphore_create(0l);
[self mov2Mp4:movUrl frameSize:frameSize completeBlock:^(AVAssetExportSessionStatus status, NSURL *outPath) {
m = outPath;
dispatch_semaphore_signal(wait);
}];
dispatch_semaphore_wait(wait, DISPATCH_TIME_FOREVER);
return m;
}
+ (void)mov2Mp4:(NSURL *)movUrl completeBlock:(void (^)(AVAssetExportSessionStatus status, NSURL *outPath))completeBlock
{
[self mov2Mp4:movUrl frameSize:CGSizeZero completeBlock:completeBlock];
}
+ (void)mov2Mp4:(NSURL *)movUrl frameSize:(CGSize)frameSize completeBlock:(void (^)(AVAssetExportSessionStatus status, NSURL *outPath))completeBlock
{
NSDictionary *dic = @{AVURLAssetPreferPreciseDurationAndTimingKey:@(YES)};
AVURLAsset *avAsset = [AVURLAsset URLAssetWithURL:movUrl options:dic];
CGSize vs = [SDAVAssetExportSession videoSizeInAssets:avAsset];
if (frameSize.width > vs.width && frameSize.height > vs.height)
{
#if DEBUG
NSAssert(NO, @"frameSize错误,无法使用.");
#endif
completeBlock(AVAssetExportSessionStatusCancelled,nil);
}
else
{
NSArray *arr = [movUrl.path componentsSeparatedByString:@"/"];
NSArray *arr1 = [arr.lastObject componentsSeparatedByString:@"."];
NSString *mp4Path = [NSString stringWithFormat:@"%@/%@%@.mp4", [self dataPath], arr1.firstObject,arr1[1]];
NSURL *mp4Url = [NSURL fileURLWithPath:mp4Path];
[[NSFileManager defaultManager] removeItemAtURL:mp4Url error:nil];
SDAVAssetExportSession *encoder = [SDAVAssetExportSession.alloc initWithAsset:avAsset];
encoder.outputFileType = AVFileTypeMPEG4;
encoder.outputURL = mp4Url;
NSInteger width = vs.width/[UIScreen mainScreen].scale;
NSInteger height = vs.height/[UIScreen mainScreen].scale;
if (frameSize.width > 0 && frameSize.height > 0)
{
width = MIN(width, frameSize.width);
height = MIN(width, frameSize.height);
}
NSDictionary *settings = @
{
AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: [NSNumber numberWithInteger:width],
AVVideoHeightKey: [NSNumber numberWithInteger:height],
AVVideoCompressionPropertiesKey: @
{
AVVideoAverageBitRateKey: @6000000,
AVVideoProfileLevelKey: AVVideoProfileLevelH264High40,
},
};
encoder.videoSettings = settings;
encoder.audioSettings = @
{
AVFormatIDKey: @(kAudioFormatMPEG4AAC),
AVNumberOfChannelsKey: @2,
AVSampleRateKey: @44100,
AVEncoderBitRateKey: @128000,
};
[encoder exportAsynchronouslyWithCompletionHandler:^
{
completeBlock(encoder.status,mp4Url);
}];
}
}
+ (NSString*)dataPath
{
NSString *dataPath = [NSString stringWithFormat:@"%@/Library/appdata/chatbuffer", NSHomeDirectory()];
NSFileManager *fm = [NSFileManager defaultManager];
if(![fm fileExistsAtPath:dataPath])
{
[fm createDirectoryAtPath:dataPath withIntermediateDirectories:YES attributes:nil error:nil];
}
return dataPath;
}