// 获取视频每一帧
- (UIImage *)getVideoPreViewImageByTime:(float)t
{
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:_playUrl options:nil];
AVAssetImageGenerator *gen = [[AVAssetImageGenerator alloc] initWithAsset:asset];
gen.appliesPreferredTrackTransform = YES;
CMTime time = CMTimeMakeWithSeconds(t, 1); //0.0 600
NSError *error = nil;
CMTime actualTime;
CGImageRef image = [gen copyCGImageAtTime:time actualTime:&actualTime error:&error];
UIImage *img = [[UIImage alloc] initWithCGImage:image];
CGImageRelease(image);
// 创建一个亮度的滤镜
GPUImageBrightnessFilter *passthroughFilter = [[GPUImageBrightnessFilter alloc] init];
passthroughFilter.brightness = 0.5f;
// 设置要渲染的区域
[passthroughFilter forceProcessingAtSize:img.size];
// ???
[passthroughFilter useNextFrameForImageCapture];
// 获取数据源
GPUImagePicture *stillImageSource = [[GPUImagePicture alloc] initWithImage:img];
// 添加上滤镜
[stillImageSource addTarget:passthroughFilter];
// 开始渲染
[stillImageSource processImage];
// 获取渲染后的图片
UIImage *nearestNeighborImage = [passthroughFilter imageFromCurrentFramebuffer];
// 加载出来处理后的
UIImageView *imageView = [[UIImageView alloc] initWithImage:nearestNeighborImage];
imageView.backgroundColor = [UIColor redColor];
imageView.image = nearestNeighborImage;
imageView.frame = CGRectMake(0, pointY, 100, 60);
[self.view addSubview:imageView];
pointY += 70;
NSLog(@"� %g", t);
[self.imageArr addObject:nearestNeighborImage];
return img;
}
// 在需要时调用:(此处是每0.1秒调用一次)
AVAsset *movieAsset = [AVAsset assetWithURL:fileUrl]; // fileUrl:文件路径
int second = (int)movieAsset.duration.value / movieAsset.duration.timescale; // 获取视频总时长,单位秒
NSLog(@"movie duration : %d", second);
for (float i = 0.0; i < second;) {
[self getVideoPreViewImageByTime:i];
i += 0.1;
}