YUV原始数据缩放和格式转换

使用的ffmpeg里的libswscale模块做了个demo,其实libyuv也可以做图像缩放,libswscale效率更高,支持的格式更多。

swscale_demo.c

/* 
* 需設定 src_filename 及 dst_filename, 長寬等資訊 
* 需 link libswscale 
* 主要有三個 function 
* sws_getContext() 是 initial 用, sws_freeContext() 是結束用 
* sws_scale() 是主要運作的 function 
* 預設只會轉換第一張 YUV, 如果要轉換整個檔, 可以把 Decoding loop 的註解拿掉 
*/
 
#include 
#include 
 
int main(int argc, char **argv)
{
    enum AVPixelFormat src_pix_fmt = AV_PIX_FMT_YUV420P;//AV_PIX_FMT_YUYV422;//AV_PIX_FMT_NV12;
    enum AVPixelFormat dst_pix_fmt = AV_PIX_FMT_YUV420P;//AV_PIX_FMT_RGB24;
    struct SwsContext *sws_ctx;
    
    
    const char *src_filename = "in_640x360_yuv420p.yuv";
    const char *src_size = "640x360";
    const char *dst_filename = "out_320x240_yuv420p.yuv";
    const char *dst_size = "320x240";
    
    uint8_t *src_data[4]; 
    uint8_t *dst_data[4];
    int src_linesize[4];
    int dst_linesize[4];
    int src_bufsize;
    int dst_bufsize;
    int src_w ;
    int src_h ;
    int dst_w ; 
    int dst_h ;
    int i; 
    int ret;
 
    FILE *src_file;
    FILE *dst_file;
    
    /*source file parse*/
    if (av_parse_video_size(&src_w, &src_h, src_size) < 0) {
        fprintf(stderr,
                "Invalid size '%s', must be in the form WxH or a valid size abbreviation\n",
                dst_size);
        return -1;
    }
    src_file = fopen(src_filename, "rb");
    if (!src_file) {
        fprintf(stderr, "Could not open source file %s\n", dst_filename);
        return -1;
    }
 
    /*destination file parse*/
    if (av_parse_video_size(&dst_w, &dst_h, dst_size) < 0) {
        fprintf(stderr,
                "Invalid size '%s', must be in the form WxH or a valid size abbreviation\n",
                dst_size);
        return -1;
    }
    dst_file = fopen(dst_filename, "wb");
    if (!dst_file) {
        fprintf(stderr, "Could not open destination file %s\n", dst_filename);
        return -1;
    }
 
    /* create scaling context */
    sws_ctx = sws_getContext(src_w, src_h, src_pix_fmt, dst_w, dst_h, dst_pix_fmt,
                             SWS_BILINEAR, NULL, NULL, NULL);
    if (!sws_ctx) {
        fprintf(stderr,
                "Impossible to create scale context for the conversion "
                "fmt:%s s:%dx%d -> fmt:%s s:%dx%d\n",
                av_get_pix_fmt_name(src_pix_fmt), src_w, src_h,
                av_get_pix_fmt_name(dst_pix_fmt), dst_w, dst_h);
        ret = AVERROR(EINVAL);
        goto end;
    }
 
    /* allocate source and destination image buffers */
    if ((ret = av_image_alloc(src_data, src_linesize, src_w, src_h, src_pix_fmt, 16)) < 0) {
        fprintf(stderr, "Could not allocate source image\n");
        goto end;
    }
    src_bufsize = ret;
 
    /* buffer is going to be written to rawvideo file, no alignment */
    if ((ret = av_image_alloc(dst_data, dst_linesize, dst_w, dst_h, dst_pix_fmt, 1)) < 0) {
        fprintf(stderr, "Could not allocate destination image\n");
        goto end;
    }
    dst_bufsize = ret;

    while (!feof(src_file)){
     
        fread(src_data[0], 1, src_bufsize, src_file);
        sws_scale(sws_ctx, (const uint8_t * const*)src_data,  src_linesize, 0, src_h, dst_data, dst_linesize);
        
        fprintf(stderr, "Scaling succeeded. Play the output file with the command:\n"
               "ffplay -f rawvideo -pix_fmt %s -video_size %dx%d %s\n",
               av_get_pix_fmt_name(dst_pix_fmt), dst_w, dst_h, dst_filename);
     
        /* write scaled image to file */
        fwrite(dst_data[0], 1, dst_bufsize, dst_file);

    }
 
end:
    fclose(dst_file);
    fclose(src_file);
    av_freep(&src_data[0]);
    av_freep(&dst_data[0]);
    sws_freeContext(sws_ctx);
    return ret < 0;
}

 

gcc -o swscale_demo swscale_demo.c -lavformat -lavutil -lavdevice -lavcodec -lswresample -lavfilter -lswscale -lz -lm  -lpthread

./swscale_demo

ffplay -f rawvideo -pix_fmt yuv420p -video_size 320x240 out_320x240_yuv420p.yuv

 

整理一下

/* 
* 需設定 src_filename 及 dst_filename, 長寬等資訊 
* 需 link libswscale 
* 主要有三個 function 
* sws_getContext() 是 initial 用, sws_freeContext() 是結束用 
* sws_scale() 是主要運作的 function 
* 預設只會轉換第一張 YUV, 如果要轉換整個檔, 可以把 Decoding loop 的註解拿掉 
*/
 
#include 
#include 

#define raw_data_test (1)

#if raw_data_test
const char *src_filename = "in_1920x1080_yuv420p.yuv";
const char *dst_filename = "out_1080x720_yuv420p.yuv";
FILE *src_file;
FILE *dst_file;
#endif

enum AVPixelFormat src_pix_fmt = AV_PIX_FMT_YUV420P;//AV_PIX_FMT_YUYV422;//AV_PIX_FMT_NV12;
enum AVPixelFormat dst_pix_fmt = AV_PIX_FMT_YUV420P;
struct SwsContext *sws_ctx;

static uint8_t *src_data[4]; 
static uint8_t *dst_data[4];
static int src_linesize[4];
static int dst_linesize[4];
static int src_bufsize;
static int dst_bufsize;

int en_swscale_init(int src_w, int src_h, int dst_w, int dst_h)
{
    int ret;

    /* create scaling context */
    sws_ctx = sws_getContext(src_w, src_h, src_pix_fmt, dst_w, dst_h, dst_pix_fmt,
                             SWS_BILINEAR, NULL, NULL, NULL);
    if (!sws_ctx) {
        fprintf(stderr,
                "Impossible to create scale context for the conversion "
                "fmt:%s s:%dx%d -> fmt:%s s:%dx%d\n",
                av_get_pix_fmt_name(src_pix_fmt), src_w, src_h,
                av_get_pix_fmt_name(dst_pix_fmt), dst_w, dst_h);
        ret = AVERROR(EINVAL);
        return -1;
    }
 
    /* allocate source and destination image buffers */
    if ((ret = av_image_alloc(src_data, src_linesize, src_w, src_h, src_pix_fmt, 16)) < 0) {
        fprintf(stderr, "Could not allocate source image\n");
        return -1;
    }
    src_bufsize = ret;
 
    /* buffer is going to be written to rawvideo file, no alignment */
    if ((ret = av_image_alloc(dst_data, dst_linesize, dst_w, dst_h, dst_pix_fmt, 1)) < 0) {
        fprintf(stderr, "Could not allocate destination image\n");
        return -1;
    }
    dst_bufsize = ret;

    return 0;
}

int en_swscale_run(char *y, char *u, char *v, int src_w, int src_h, int dst_w, int dst_h)
{
    memcpy(src_data[0], y, src_w * src_h);
    memcpy(src_data[0] + src_w * src_h, u, src_w * src_h / 4);
    memcpy(src_data[0] + src_w * src_h + src_w * src_h / 4, v, src_w * src_h / 4);

    sws_scale(sws_ctx, (const uint8_t * const*)src_data,  src_linesize, 0, src_h, dst_data, dst_linesize);
         
    /* write scaled image to file */
    fwrite(dst_data[0], 1, dst_bufsize, dst_file);

#if raw_data_test
    fprintf(stderr, "Scaling succeeded. Play the output file with the command:\n"
           "ffplay -f rawvideo -pix_fmt %s -video_size %dx%d %s\n",
           av_get_pix_fmt_name(dst_pix_fmt), dst_w, dst_h, dst_filename);
#endif
 
    return 0;
}

int en_swscale_release()
{
#if raw_data_test
    fclose(dst_file);
    fclose(src_file);
#endif
    av_freep(&src_data[0]);
    av_freep(&dst_data[0]);
    sws_freeContext(sws_ctx);
    return 0;
}

int main(int argc, char** argv)
{
    const int src_w = 1920;
    const int src_h = 1080; 
    const int dst_w = 1080;
    const int dst_h = 720;

    char y[src_w * src_h];
    char u[src_w * src_h / 4];
    char v[src_w * src_h / 4];

    en_swscale_init(src_w, src_h, dst_w, dst_h);

#if raw_data_test
    src_file = fopen(src_filename, "rb");
    if (!src_file) {
        fprintf(stderr, "Could not open source file %s\n", dst_filename);
        return -1;
    }
 
    dst_file = fopen(dst_filename, "wb");
    if (!dst_file) {
        fprintf(stderr, "Could not open destination file %s\n", dst_filename);
        return -1;
    }

    while (!feof(src_file)){
        fread(y,src_w * src_h,1,src_file);             //Y
        fread(u,src_w * src_h / 4, 1, src_file);       //U
        fread(v,src_w * src_h / 4, 1, src_file);       //V
        en_swscale_run(y,u,v,src_w, src_h, dst_w, dst_h);
    }
#else

    en_swscale_run(y,u,v,src_w, src_h, dst_w, dst_h);

    en_swscale_release();

#endif

    return 0;
}

./swscale_demo 

ffplay -f rawvideo -pix_fmt yuv420p -video_size 1080x720 out_1080x720_yuv420p.yuv

 

这还有有个demo

https://blog.csdn.net/hsq1596753614/article/details/82229215

 

转载于:https://www.cnblogs.com/dong1/p/11051708.html

你可能感兴趣的:(YUV原始数据缩放和格式转换)