iOS音视频相关知识

1、将CVPixelBufferRef对象转换为UIImage对象

CVPixelBufferRef pixelBuffer;
CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
            
CIContext *temporaryContext = [CIContext contextWithOptions:nil];
CGImageRef videoImage = [temporaryContext createCGImage:ciImage fromRect:CGRectMake(0, 0, CVPixelBufferGetWidth(pixelBuffer), CVPixelBufferGetHeight(pixelBuffer))];
            
UIImage *uiImage = [UIImage imageWithCGImage:videoImage];
CGImageRelease(videoImage);

2、获取视频中的音频信息

+ (CGSize)videoSizeTransformFromVideoPath:(NSString *)videoPath {
    
    if (![[NSFileManager defaultManager] fileExistsAtPath:videoPath]) {
        return CGSizeZero;
    }
    
    AVURLAsset *asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:videoPath]];
    AVAssetTrack *track = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
    AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] firstObject];
    CMAudioFormatDescriptionRef item = (__bridge CMAudioFormatDescriptionRef)audioTrack.formatDescriptions.firstObject;
    const  AudioStreamBasicDescription *audioStreamDescription = CMAudioFormatDescriptionGetStreamBasicDescription(item);
    audioStreamDescription->mSampleRate;
    
    CGSize dimensions = CGSizeApplyAffineTransform(track.naturalSize, track.preferredTransform);
    return CGSizeMake(fabs(dimensions.width), fabs(dimensions.height));
}

3、获取视频的分辨率

+ (CGSize)videoSizeFromVideoPath:(NSString *)videoPath {
    if (![[NSFileManager defaultManager] fileExistsAtPath:videoPath]) {
        return CGSizeZero;
    }
    
    AVURLAsset *asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:videoPath]];
    AVAssetTrack *track = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
    CGSize dimensions = CGSizeApplyAffineTransform(track.naturalSize, track.preferredTransform);
    return CGSizeMake(fabs(dimensions.width), fabs(dimensions.height));
}

4、获取视频的帧率

+ (CGFloat)nominalFrameRateFromVideoPath:(NSString *)videoPath {
    CGFloat fps = 0.00;
    if (![[NSFileManager defaultManager] fileExistsAtPath:videoPath]) {
        return fps;
    }
    AVURLAsset *asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:videoPath]];
    AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
    fps = videoTrack.nominalFrameRate; 
    return fps;
} 

5、获取视频时长(单位毫秒)

+ (NSTimeInterval)videoDurationFromVideoPath:(NSString *)videoPath { 
    if (![[NSFileManager defaultManager] fileExistsAtPath:videoPath]) {
        return 0.00;
    }
    NSDictionary *inputOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
    AVURLAsset *urlAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:videoPath] options:inputOptions];
    return 1000.0 * urlAsset.duration.value / urlAsset.duration.timescale;
} 

6、AVFrame转换为UIImage(针对YUV420p数据)


+ (UIImage *)converUIImageFromAVFrame:(AVFrame*)frame {
    CVPixelBufferRef pixelBuffer = [GTVideoTool converCVPixelBufferRefFromAVFrame:frame];
    
    CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
    
    CIContext *temporaryContext = [CIContext contextWithOptions:nil];
    CGImageRef videoImage = [temporaryContext
                             createCGImage:ciImage
                             fromRect:CGRectMake(0, 0,
                                                 CVPixelBufferGetWidth(pixelBuffer),
                                                 CVPixelBufferGetHeight(pixelBuffer))];
    
    UIImage *uiImage = [UIImage imageWithCGImage:videoImage];
    
    CVPixelBufferRelease(pixelBuffer); 
    CGImageRelease(videoImage);
    
    return uiImage;
}

+ (CVPixelBufferRef)converCVPixelBufferRefFromAVFrame:(AVFrame *)avframe {
    if (!avframe || !avframe->data[0]) {
        return NULL;
    }
    
    CVPixelBufferRef outputPixelBuffer = NULL;
    
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             
                             @(avframe->linesize[0]), kCVPixelBufferBytesPerRowAlignmentKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferOpenGLESCompatibilityKey,
                             [NSDictionary dictionary], kCVPixelBufferIOSurfacePropertiesKey,
                             nil];
    
    
    if (avframe->linesize[1] != avframe->linesize[2]) {
        return  NULL;
    }
    
    size_t srcPlaneSize = avframe->linesize[1]*avframe->height/2;
    size_t dstPlaneSize = srcPlaneSize *2;
    uint8_t *dstPlane = malloc(dstPlaneSize);
    
    // interleave Cb and Cr plane
    for(size_t i = 0; idata[1][i];
        dstPlane[2*i+1]=avframe->data[2][i];
    }
    
    
    int ret = CVPixelBufferCreate(kCFAllocatorDefault,
                                  avframe->width,
                                  avframe->height,
                                  kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
                                  (__bridge CFDictionaryRef)(options),
                                  &outputPixelBuffer);
    
    CVPixelBufferLockBaseAddress(outputPixelBuffer, 0);
    
    size_t bytePerRowY = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 0);
    size_t bytesPerRowUV = CVPixelBufferGetBytesPerRowOfPlane(outputPixelBuffer, 1);
    
    void* base =  CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 0);
    memcpy(base, avframe->data[0], bytePerRowY*avframe->height);
    
    base = CVPixelBufferGetBaseAddressOfPlane(outputPixelBuffer, 1);
    memcpy(base, dstPlane, bytesPerRowUV*avframe->height/2);
    
    CVPixelBufferUnlockBaseAddress(outputPixelBuffer, 0);
    
    free(dstPlane);
    
    if(ret != kCVReturnSuccess)
    {
        NSLog(@"CVPixelBufferCreate Failed");
        return NULL;
    }
    
    return outputPixelBuffer;
}

7、BGRA转换为UIImage对象

+ (UIImage *)converUIImageFromRGBA:(ST_GTV_RGBA *)argb {
    UIImage *image = [GTVideoTool imageFromBRGABytes:argb->p_rgba imageSize:CGSizeMake(argb->width, argb->heigh)];
    gtv_queue_rgba_free(argb);
    return image;
}

+ (UIImage *)imageFromBRGABytes:(unsigned char *)imageBytes imageSize:(CGSize)imageSize {
    CGImageRef imageRef = [self imageRefFromBGRABytes:imageBytes imageSize:imageSize];
    UIImage *image = [UIImage imageWithCGImage:imageRef];
    CGImageRelease(imageRef);
    
    return image;
}

+ (CGImageRef)imageRefFromBGRABytes:(unsigned char *)imageBytes imageSize:(CGSize)imageSize {
    
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(imageBytes,
                                                 imageSize.width,
                                                 imageSize.height,
                                                 8,
                                                 imageSize.width * 4,
                                                 colorSpace,
                                                 kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    CGImageRef imageRef = CGBitmapContextCreateImage(context);
    CGContextRelease(context);
    CGColorSpaceRelease(colorSpace);
    
    return imageRef;
}

8、从视频中抽取多张图片

- (NSArray *)generateThumbnailFromVideoPath:(NSString *)videoPath
                                            frameCount:(NSInteger)frameCount
                                         clipStartTime:(NSTimeInterval)clipStartTime
                                           clipEndTime:(NSTimeInterval)clipEndTime {
    if (frameCount == 0) {
        NSLog(@"error frameCount is equal to zero");
        return nil;
    }
    CGFloat videoDuration = clipEndTime -  clipStartTime;
    if (videoDuration <= 0) {
        NSLog(@"error videoDuration is less than zero");
        return nil;
    }
    CGFloat delayTime = videoDuration / frameCount;
    
    NSMutableArray *frameTimes = [[NSMutableArray alloc] initWithCapacity:frameCount];
    for (int currentFrame = 0; currentFrame < frameCount; currentFrame++) {
        CGFloat currentTime = (clipStartTime + currentFrame * delayTime) * 25;
        CMTime time = CMTimeMake(currentTime, 25);
        [frameTimes addObject:[NSValue valueWithCMTime:time]];
    }
    
    __block NSUInteger successCount = 0;
    NSMutableDictionary *imageDict = [NSMutableDictionary dictionary];
    dispatch_semaphore_t sema = dispatch_semaphore_create(0);
    
    AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:videoPath] options:nil];
    AVAssetImageGenerator *generator = [AVAssetImageGenerator assetImageGeneratorWithAsset:asset];
    generator.appliesPreferredTrackTransform = YES;
    [generator generateCGImagesAsynchronouslyForTimes:frameTimes
                                    completionHandler:^(CMTime requestedTime, CGImageRef  _Nullable cgImageRef, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * _Nullable error) {
                                        if(result == AVAssetImageGeneratorSucceeded) {
                                            successCount ++;
                                            UIImage *image = [[UIImage alloc] initWithCGImage:cgImageRef scale:[UIScreen mainScreen].scale orientation:UIImageOrientationUp];
                                            
                                            for(int i = 0 ; i < frameTimes.count; i++){
                                                CMTime time = [[frameTimes objectAtIndex:i] CMTimeValue];
                                                if(CMTimeCompare(time , requestedTime) == 0){
                                                    [imageDict setObject:image forKey:@(i)];
                                                    break;
                                                }
                                            }
                                            
                                            if (successCount == frameTimes.count) {
                                                dispatch_semaphore_signal(sema);
                                            }
                                            
                                        } else {
                                            dispatch_semaphore_signal(sema);
                                        }
                                    }];
    
    dispatch_semaphore_wait(sema, DISPATCH_TIME_FOREVER);
    [generator cancelAllCGImageGeneration];
    return imageDict.allValues;
}

9、在视频硬编码时,出现视频前几帧没有编码成功的问题,在github上搜索appendPixelBuffer相关的代码块。尝试在编码前,将CVPixelBufferRef拷贝一份再编码,解决了此问题。

- (void)appendPixelBuffer:(CVPixelBufferRef)videoPixelBuffer withTimestamp:(int)milli {
    [self startSessionIfNeededAtTime:CMTimeMake(milli, 1000)];
    if(![self.videoInput isReadyForMoreMediaData]) {
        GTVLog(@"videoInput is not ready");
        return;
    }
    
    if((int)(milli-self.videoTimestamp) > 60) {
        GTVLog(@"### video fps low (%d,%d)!", milli, (int)(milli-self.videoTimestamp));
    }
    
    if(milli <= self.videoTimestamp) {
        GTVLog(@"appendPixelBuffer: invalid pts (%d,%d)", milli, (int)self.videoTimestamp);
        return;
    }
    
    if(videoPixelBuffer != NULL) {
        if( self.videoWriter.status != AVAssetWriterStatusWriting || self.videoInput.readyForMoreMediaData == false ) {
            GTVLog(@"###videoWriter status %ld %@ %@", (long)self.videoWriter.status, self.videoWriter.error, self.videoInput.readyForMoreMediaData?@"YES":@"NO");
        } else {
            @try {
                CMTime presentTime = CMTimeMake(milli, 1000);
                
                // 修复前几帧视频编码失败问题
                int bufferW = (int)CVPixelBufferGetWidth(videoPixelBuffer);
                int bufferH = (int)CVPixelBufferGetHeight(videoPixelBuffer);
                
                CVPixelBufferRef pixelBufferCopy = NULL;
                if (CVPixelBufferCreate(kCFAllocatorDefault, bufferW, bufferH, kCVPixelFormatType_32BGRA, NULL, &pixelBufferCopy) == kCVReturnSuccess) {
                    self.writeFrameCount ++;
                    
                    CVPixelBufferLockBaseAddress(videoPixelBuffer, 0);
                    CVPixelBufferLockBaseAddress(pixelBufferCopy, 0);
                    
                    uint8_t *baseAddress = CVPixelBufferGetBaseAddress(videoPixelBuffer);
                    uint8_t *copyBaseAddress = CVPixelBufferGetBaseAddress(pixelBufferCopy);
                    memcpy(copyBaseAddress, baseAddress, bufferH * CVPixelBufferGetBytesPerRow(videoPixelBuffer));
                    
                    [self.pixelAdaptor appendPixelBuffer:pixelBufferCopy withPresentationTime:presentTime];
                    
                    CVPixelBufferUnlockBaseAddress(videoPixelBuffer, 0);
                    CVPixelBufferUnlockBaseAddress(pixelBufferCopy, 0);
                    
                    CVPixelBufferRelease(pixelBufferCopy);
                    
                } else {
                    self.writeFrameCount ++;
                    
                    [self.pixelAdaptor appendPixelBuffer:videoPixelBuffer withPresentationTime:presentTime];
                }
            }
            @catch(NSException * ex) {
                GTVLog(@"mp4muxer exception %@", ex);
            }
        }
    } else {
        GTVLog(@"preparePixelBuffer failed.");
    }
    
    self.videoTimestamp = milli;
}

10.获取视频中的关键帧数量(I帧)

- (NSInteger)numOfKeyFrameFromVideoPath:(AVURLAsset *)urlAsset{
    MYTimer *timer = [MYTimer timerWithFlag:@"获取关键帧数量"];
    NSError *error;
    AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:urlAsset error:&error];
    if (error) {
        [timer dot];
        return 0;
    }

    AVAssetTrack *videoTrack = [[urlAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    AVAssetReaderTrackOutput *trackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:nil];
    [assetReader addOutput:trackOutput];
    [assetReader startReading];

    NSInteger keyFrames = 0;
    while (YES) {
        CMSampleBufferRef sampleBuffer = [trackOutput copyNextSampleBuffer];
        if (sampleBuffer) {
            // NB: not every sample buffer corresponds to a frame!
            CFArrayRef attachmentarr = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, TRUE);
            if (attachmentarr && (CFArrayGetCount(attachmentarr) > 0)) {
                Boolean isKeyFrame = !CFDictionaryContainsKey(CFArrayGetValueAtIndex(attachmentarr, 0), kCMSampleAttachmentKey_NotSync);
                if (isKeyFrame) {
                    keyFrames += 1;
                }
            }
        } else {
            break;
        }
    }
    [timer dot];
    return keyFrames;
}

10.GIF倒放

+ (void)invertGIFWithModel:(NSString *)sourcePath
           progressHandler:(void (^_Nullable)(CGFloat progress))progressHandler
                   success:(void (^_Nullable)(NSURL * _Nullable outputURL))success
                   failure:(void (^_Nullable)(NSError * _Nullable error))failure
                   shouldCancel:(BOOL (^_Nullable)(void))shouldCancel
                    cancel:(void (^_Nullable)(void))cancel {
    YYImage *gifImage = [YYImage imageWithContentsOfFile:[GDVideoEditUtilManager cutFileStringVideoPath:sourcePath]];
    NSUInteger imageCount = [gifImage animatedImageFrameCount];
    if (imageCount == 0) {
        !failure ? : failure(nil);
        return;
    }
    
    dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
        NSDictionary *fileProperties = @{(__bridge id)kCGImagePropertyGIFDictionary: @{(__bridge id)kCGImagePropertyGIFLoopCount: @0}};
        NSURL *outputURL = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:@"%f-gaoding-reverse.gif", [[NSDate dateWithTimeIntervalSinceNow:0] timeIntervalSince1970]]]];
        if ([[NSFileManager defaultManager] fileExistsAtPath:outputURL.path]) {
            [[NSFileManager defaultManager] removeItemAtURL:outputURL error:nil];
        }
        
        CGImageDestinationRef destination = CGImageDestinationCreateWithURL((__bridge CFURLRef)outputURL, kUTTypeGIF, imageCount, NULL);
        CGImageDestinationSetProperties(destination, (__bridge CFDictionaryRef)fileProperties);
        
        for (NSInteger index = imageCount - 1; index >= 0; index--) {
            if ([GDMediaFrameRetriever cancelByUser:shouldCancel]) { // 用户取消
                CFRelease(destination);
                [[NSFileManager defaultManager] removeItemAtURL:outputURL error:nil];
                dispatch_async(dispatch_get_main_queue(), ^{
                    !cancel ? : cancel();
                });
                return;
            }
            
            UIImage *image = [gifImage animatedImageFrameAtIndex:index];
            NSTimeInterval duration = [gifImage animatedImageDurationAtIndex:index];
            NSDictionary *frameProperties = @{(__bridge id)kCGImagePropertyGIFDictionary: @{(__bridge id)kCGImagePropertyGIFDelayTime: @(duration)}};
            CGImageDestinationAddImage(destination, image.CGImage, (__bridge CFDictionaryRef)frameProperties);
            CGFloat progress = (1 - index / (imageCount * 1.0)) * 0.9;
            dispatch_async(dispatch_get_main_queue(), ^{
                !progressHandler ? : progressHandler(progress);
            });
        }
         
        BOOL writtenImageSuccess = CGImageDestinationFinalize(destination);
        
        if (!writtenImageSuccess) {
            dispatch_async(dispatch_get_main_queue(), ^{
                NSError *error = [NSError errorWithDomain:@"GDMVideoEdit" code:9999 userInfo:@{@"视频生成失败")}];
                !failure ? : failure(error);
            });
        } else {
            dispatch_async(dispatch_get_main_queue(), ^{
                !progressHandler ? : progressHandler(1.0);
            });
        }
        CFRelease(destination);
        dispatch_async(dispatch_get_main_queue(), ^{
            !success ? : success(outputURL);
        });
    });
}

11.获取图片尺寸

+ (CGSize)imageSizeWithFile:(NSString *)filePath {
    NSURL *imageFileURL = [NSURL fileURLWithPath:filePath];
    CGImageSourceRef imageSource = CGImageSourceCreateWithURL((CFURLRef)imageFileURL, NULL);
    if (imageSource == NULL) {
        return CGSizeZero;
    }
    
    CGFloat width = 0.0f, height = 0.0f;
    CFDictionaryRef imageProperties = CGImageSourceCopyPropertiesAtIndex(imageSource, 0, NULL);
    CFRelease(imageSource);
    
    if (imageProperties != NULL) {
        CFNumberRef widthNum  = CFDictionaryGetValue(imageProperties, kCGImagePropertyPixelWidth);
        if (widthNum != NULL) {
            CFNumberGetValue(widthNum, kCFNumberCGFloatType, &width);
        }
        
        CFNumberRef heightNum = CFDictionaryGetValue(imageProperties, kCGImagePropertyPixelHeight);
        if (heightNum != NULL) {
            CFNumberGetValue(heightNum, kCFNumberCGFloatType, &height);
        }
        
        CFNumberRef orientationNum = CFDictionaryGetValue(imageProperties, kCGImagePropertyOrientation);
        if (orientationNum != NULL) {
            int orientation;
            CFNumberGetValue(orientationNum, kCFNumberIntType, &orientation);
            if (orientation > 4) {
                CGFloat temp = width;
                width = height;
                height = temp;
            }
        }
        
        CFRelease(imageProperties);
    }
    
    return CGSizeMake(width, height);
}

你可能感兴趣的:(iOS音视频相关知识)