FFmpeg学习之开发Mac播放器(五):使用FFmpeg硬解码视频

MacOS和iOS支持VideoToolbox进行硬件解码H264编码的视频,FFmpeg也支持VideoToolbox,参考官方的example实现FFmpeg硬解视频。

    enum AVHWDeviceType type = av_hwdevice_find_type_by_name("videotoolbox"); //MacOS和iOS可以固定写videotoolbox
    if (avformat_open_input(&input_ctx, [inputString UTF8String], NULL, NULL) != 0) {
        fprintf(stderr, "Can not open input file '%s'\n", [inputString UTF8String]);
        return -1;
    }
    if (avformat_find_stream_info(input_ctx, NULL) < 0) {
        fprintf(stderr, "Can not find stream information\n");
        return -1;
    }
    ret = av_find_best_stream(input_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, &decoder, 0);
    if (ret < 0) {
        fprintf(stderr, "Can not find a video stream in the input file\n");
        return -1;
    }
    video_stream = ret;
    for (i = 0; ; i++) {
        const AVCodecHWConfig * config = avcodec_get_hw_config(decoder, i);
        if (!config) {
            fprintf(stderr, "Decoder %s does not support device type %s.\n", decoder->name, av_hwdevice_get_type_name(type));
            return -1;
        }
        if (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX && config->device_type == type) {
            hw_pix_fmt = config->pix_fmt; //获取硬件支持的图像格式
            break;
        }
    }
    if (!(decoder_ctx = avcodec_alloc_context3(decoder))) {
        return AVERROR(ENOMEM);
    }
    video = input_ctx->streams[video_stream];
    if (avcodec_parameters_to_context(decoder_ctx, video->codecpar) < 0) {
        return -1;
    }
    decoder_ctx->get_format = get_hw_format;   //将硬件支持的图像格式传给解码器的方法
    if (hw_decoder_init(decoder_ctx, type) < 0) {
        return -1;
    }
    if ((ret = avcodec_open2(decoder_ctx, decoder, NULL)) < 0) {
        fprintf(stderr, "Failed to open codec for stream #%u\n", video_stream);
        return -1;
    }
    output_file = fopen([outputString UTF8String], "wb");
    while (ret >= 0) {
        if ((ret = av_read_frame(input_ctx, &packet)) < 0) {
            break;
        }
        if (video_stream == packet.stream_index) {
            ret = decode_write(decoder_ctx, &packet);
        }
        av_packet_unref(&packet);
    }
    packet.data = NULL;
    packet.size = 0;
    ret = decode_write(decoder_ctx, &packet);
    av_packet_unref(&packet);

    if (output_file) {
        fclose(output_file);
    }
    avcodec_free_context(&decoder_ctx);
    avformat_close_input(&input_ctx);
    av_buffer_unref(&hw_device_ctx);
    return 0;
static int hw_decoder_init(AVCodecContext * ctx, const enum AVHWDeviceType type) {
    int err = 0;
    if ((err = av_hwdevice_ctx_create(&hw_device_ctx, type, NULL, NULL, 0)) < 0) {
        fprintf(stderr, "Failed to create specified HW device.\n");
        return err;
    }
    ctx->hw_device_ctx = av_buffer_ref(hw_device_ctx); //创建hw_device_ctx传给解码器上下文,必须在avcodec_open2之前并且之后不能修改
    return err;
}

static enum AVPixelFormat get_hw_format(AVCodecContext * ctx, const enum AVPixelFormat * pix_fmts) {
    const enum AVPixelFormat * p;
    for (p = pix_fmts; *p != -1; p++) {
        if (*p == hw_pix_fmt) {
            return *p;
        }
    }
    fprintf(stderr, "Failed to get HW surface format.\n");
    return AV_PIX_FMT_NONE;
}
static int decode_write(AVCodecContext * avctx, AVPacket * packet) {  //解码的方法
    AVFrame * frame = NULL, * sw_frame = NULL;
    AVFrame * tmp_frame = NULL;
    uint8_t * buffer = NULL;
    int size;
    int ret = 0;

    ret = avcodec_send_packet(avctx, packet);
    if (ret < 0) {
        fprintf(stderr, "Error during decoding\n");
        return ret;
    }
    while (1) {
        if (!(frame = av_frame_alloc()) || !(sw_frame = av_frame_alloc())) {
            fprintf(stderr, "Can not alloc frame\n");
            ret = AVERROR(ENOMEM);
            goto fail;
        }
        ret = avcodec_receive_frame(avctx, frame);
        if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
            av_frame_free(&frame);
            av_frame_free(&sw_frame);
            return 0;
        } else if (ret < 0) {
            fprintf(stderr, "Error while decoding\n");
            goto fail;
        }
        if (frame->format == hw_pix_fmt) {
            if ((ret = av_hwframe_transfer_data(sw_frame, frame, 0)) < 0) { //从GPU取出解码的数据
                fprintf(stderr, "Error transferring the data to system memory\n");
                goto fail;
            }
            tmp_frame = sw_frame;
        } else {
            tmp_frame = frame;
        }
        //这里硬解出的sw_frame中的图像格式是23,对应AV_PIX_FMT_NV12,所以sw_frame中data[0]存放的是Y数据,data[1]中存放的是UV交叉数据,和CVPixelBuffer存储YUV数据格式一样,可以很方便的转换成CVPixelBuffer
        size = avctx->width * avctx->height;   //这里我遇到一个小坑,sw_frame中height和视频的原始高度不符合,导致写入的数据不能正常播放,这里用了解码器上下文的宽和高
        fwrite(tmp_frame->data[0], 1, size, output_file);    
        fwrite(tmp_frame->data[1], 1, size / 2, output_file);   //直接写入yuv文件中,可以使用ffplay进行播放,ffplay -video_size 1920x1080 -pix_fmt nv12 xxx.yuv
    fail:
        av_frame_free(&frame);
        av_frame_free(&sw_frame);
        if (ret < 0) {
            return ret;
        }
    }
}

你可能感兴趣的:(FFmpeg学习之开发Mac播放器(五):使用FFmpeg硬解码视频)