FFmpeg学习之开发Mac播放器(六):FFmpeg与Mac编解码器混合使用

Mac和iOS支持使用VideoToolBox硬件编解码H264和H265的视频流,这次使用FFmpeg解封装使用VideoToolBox解码器解码,还有从Mac采集的数据用FFmpeg编码封装。

FFmpeg解封装+VideoToolBox解码
FFmpeg中AVPacket对应Mac中的CMBlockBufferRef

//用于解析AVCodecContext->extradata中的sps和pps
static void parseH264SequenceHeader(uint8_t * extra_data, uint8_t ** sps, size_t * sps_size, uint8_t ** pps, size_t * pps_size) {
    int spsSize = (extra_data[6] << 8) + extra_data[7];
    *sps_size = spsSize;
    *sps = &extra_data[8];
    int ppsSize = (extra_data[8 + spsSize + 1] << 8) + extra_data[8 + spsSize + 2];
    *pps = &extra_data[8 + spsSize + 3];
    *pps_size = ppsSize;
}

- (void)main {
    ...
    //初始化VideoToolBox解码器
    parseH264SequenceHeader(codec_ctx->extradata, &sps, &sps_size, &pps, &pps_size);
    uint8_t * parameterSetPointers[2] = {sps, pps};
    size_t parameterSetSizes[2] = {sps_size, pps_size};
    OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault, 2, (const uint8_t * const *)parameterSetPointers, parameterSetSizes, 4, &formatDescRef);
    if (status != noErr) {
        NSLog(@"Create video description failed...");
    }
    VTDecompressionOutputCallbackRecord callback;
    callback.decompressionOutputCallback = didDecompress;
    callback.decompressionOutputRefCon = (__bridge void *)self;
    NSDictionary * destinationImageBufferAttributes = @{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8Planar)};
    status = VTDecompressionSessionCreate(kCFAllocatorDefault, formatDescRef, NULL, (__bridge CFDictionaryRef)destinationImageBufferAttributes, &callback, &sessionRef);
    if (status != noErr) {
        NSLog(@"Create decompression session failed status = %@", @(status));
    }
    ...
    AVPacket * pkt = av_packet_alloc();
    while (av_read_frame(format_ctx, pkt) >= 0) {
        if (pkt->stream_index == video_index) {
            CMBlockBufferRef blockBuffer = NULL;
            //使用AVPacket中的数据直接创建BlockBuffer,AVPacket中的nalu数据需要AVCC格式,如果是Annex B格式要转换成AVCC格式
            OSStatus status = CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, pkt->data, pkt->size, kCFAllocatorNull, NULL, 0, pkt->size, 0, &blockBuffer);
            if (status != kCMBlockBufferNoErr) {
                NSLog(@"Create BlockBuffer failed status = %@", @(status));
            }
            const size_t sampleSize = pkt->size;
            CMSampleBufferRef sampleBuffer = NULL;
            status = CMSampleBufferCreate(kCFAllocatorDefault, blockBuffer, true, NULL, NULL, formatDescRef, 1, 0, NULL, 1, &sampleSize, &sampleBuffer);
            if (status != noErr) {
                NSLog(@"SampleBuffer create failed");
            }
            
            VTDecodeFrameFlags flags = kVTDecodeFrame_EnableAsynchronousDecompression;
            VTDecodeInfoFlags flagOut;
            status = VTDecompressionSessionDecodeFrame(sessionRef, sampleBuffer, flags, &sampleBuffer, &flagOut);
            if (status == noErr) {
                VTDecompressionSessionWaitForAsynchronousFrames(sessionRef);
            }
            CFRelease(blockBuffer);
            CFRelease(sampleBuffer);
        }
    }
}
//解码回调方法
void didDecompress( void *decompressionOutputRefCon, void *sourceFrameRefCon, OSStatus status, VTDecodeInfoFlags infoFlags, CVImageBufferRef imageBuffer, CMTime presentationTimeStamp, CMTime presentationDuration ) {
  if (status == noError && imageBuffer) {
    for (int i = 0; i < CVPixelBufferGetPlaneCount(imageBuffer); i++) {
      void * baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, i);
      size_t bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, i);
      size_t height = CVPixelBufferGetHeightOfPlane(imageBuffer, i);
      fwrite(baseAddress, bytesPerRow * height, 1, file);
      //这里直接写入yuv文件会出现问题,VideoToolBox解码得到的数据是按照DTS顺序,需要按照PTS排序然后再写入yuv文件
    }
  }
}
7D30E197-D064-4763-9D20-F734B7BBBD37.png
Mac采集+FFmpeg编码

- (void)initEncoder {
  format_ctx = avformat_alloc_context();
  if (avformat_alloc_output_context2(&format_ctx, NULL, NULL, outputString.UTF8String) < 0) {
    NSLog(@"Open output path failed");
  }
  codec = avcodec_find_encoder(AV_CODEC_ID_H264);
  codec_ctx = avcodec_alloc_context3(codec);
  codec_ctx->bit_rate = 5000000;
  codec_ctx->width = 1280;  //使用AVFoundation设置摄像头采集视频的宽和高
  codec_ctx->height = 720;
  codec_ctx->time_base = (AVRational){1, 24};  //视频为24帧
  codec_ctx->framerate = (AVRational){24, 1};
  codec_ctx->gop_size = 10;
  codec_ctx->max_b_frames = 1;
  codec_ctx->pix_fmt = AV_PIX_FMT_NV12;  //Mac和iPhone摄像头采集的pixel format为NV12
  codec_ctx->color_range = AVCOL_RANGE_JPEG;
  av_opt_set(codec_ctx->priv_data, "present", "slow", 0);

  if (format_ctx->oformat->flags & AVFMT_GLOBALHEADER) {
    codec_ctx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; //填充codec_ctx中extradata
  }

  AVStream * stream = avformat_new_stream(format_ctx, NULL); //创建视频流
  ret = avcodec_parameters_from_context(stream->codecpar, codec_ctx);
  if (ret < 0) {
    NSLog(@"Failed to copy encoder parameters to output stream 0");
  }
  stream->time_base = codec_ctx->time_base;

  av_dump_format(format_ctx, 0, outputString.UTF8String, 1);
  if (!(format_ctx->oformat->flags & AVFMT_NOFILE)) {
    ret = avio_open(&format_ctx->pb, outputString.UTF8String, AVIO_FLAG_WRITE);
    if (ret < 0) {
      NSLog(@"Could not open output file");
    }
  }
    
  ret = avformat_write_header(format_ctx, NULL);
  if (ret < 0) {
    NSLog(@"Error occurred when opening output file");
  }
}
//摄像头采集回调方法
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
  CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  CMTime duration = CMSampleBufferGetDuration(sampleBuffer);
  
  AVFrame * frame = av_frame_alloc(); //创建AVFrame存储像素数据
  frame->height = (int)CVPixelBufferGetHeight(imageBuffer);
  frame->width = (int)CVPixelBufferGetWidth(imageBuffer);
  frame->format = AV_PIX_FMT_NV12;
  frame->color_range = AVCOL_RANGE_JPEG;
  av_frame_get_buffer(frame, 0); //为frame中存储像素数据的data分配空间,调用这个方法之前要设置pixel format(视频)或者sample format(音频),视频的宽高,音频的nb_samples和channel_layout

  CVPixelBufferLockBaseAddress(imageBuffer, 0);
  void * baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
  size_t bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);
  size_t height = CVPixelBufferGetHeightOfPlane(imageBuffer, 0);
  frame->linesize[0] = (int)bytesPerRow;
  memcpy(frame->data[0], baseAddress, bytesPerRow * height);
    
  baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 1);
  bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 1);
  height = CVPixelBufferGetHeightOfPlane(imageBuffer, 1);
  frame->linesize[1] = (int)bytesPerRow;
  memcpy(frame->data[1], baseAddress, bytesPerRow * height);
  CVPixelBufferUnlockBaseAddress(imageBuffer, 0);

  int ret = avcodec_send_frame(codec_ctx, frame);
  AVPacket * pkt = av_packet_alloc();
  while (ret >= 0) {
    ret = avcodec_receive_packet(codec_ctx, pkt);
    if (ret == AVERROR(EAGAIN)) {
      NSLog(@"Output is not available in the current state");
      break;
    } else if (ret == AVERROR_EOF) {
      NSLog(@"The encoder has been fully flushed, and there will be no more output packets");
      break;
    } else if (ret < 0) {
      NSLog(@"Error during encoding");
      break;
    }
        
    pkt->stream_index = 0;
    pkt->pts = _pts;
    pkt->dts = pkt->pts;
    pkt->duration = duration.value;
        
    _pts++;
        
    av_packet_rescale_ts(pkt, codec_ctx->time_base, format_ctx->streams[0]->time_base);
    if (av_write_frame(format_ctx, pkt) >= 0) {
      NSLog(@"Write success");
    }
  }
  av_packet_free(&pkt);
  av_frame_free(&frame);
}

- (void)EndRecord {
  [_captureSession stopRunning];

  int ret = avcodec_send_frame(codec_ctx, NULL); //flush data
  AVPacket * pkt = av_packet_alloc();
  while (ret >= 0) {
    ret = avcodec_receive_packet(codec_ctx, pkt);
    if (ret == AVERROR(EAGAIN)) {
      NSLog(@"Output is not available in the current state");
      break;
    } else if (ret == AVERROR_EOF) {
      NSLog(@"The encoder has been fully flushed, and there will be no more output packets");
      break;
    } else if (ret < 0) {
      NSLog(@"Error during encoding");
      break;
    }
    pkt->stream_index = 0;
    pkt->pts = _pts;
    pkt->dts = pkt->pts;
    pkt->duration = duration.value;
    av_packet_rescale_ts(pkt, codec_ctx->time_base, format_ctx->streams[0]->time_base);
    if (av_write_frame(format_ctx, pkt) >= 0) {
      NSLog(@"Write success");
    }
    _pts++;
  }
  avcodec_close(codec_ctx);
  av_write_trailer(format_ctx);
}

你可能感兴趣的:(FFmpeg学习之开发Mac播放器(六):FFmpeg与Mac编解码器混合使用)