libyuv使用

RGBA转I420,缩放后转NV12

int transfer_32bgra_to_I420_ScaleToSize(CVPixelBufferRef source_pixelBuffer,CGSize targetSize,CVPixelBufferRef dst_pixelBuffer) {
    
    CVPixelBufferLockBaseAddress(source_pixelBuffer, 0);
    CVPixelBufferLockBaseAddress(dst_pixelBuffer, 0);
    
    //source-size
    size_t width = CVPixelBufferGetWidth(source_pixelBuffer);//图像宽度(像素)
    size_t height = CVPixelBufferGetHeight(source_pixelBuffer);//图像高度(像素)
    uint8_t *rgbaBuffer = (uint8_t *)CVPixelBufferGetBaseAddress(source_pixelBuffer);
    
    int yuvBufSize = width * height * 3 / 2;
    uint8_t* yuvBuf= new uint8_t[yuvBufSize];
    
    //source-stride
    int Dst_Stride_Y = width;
    const int32 uv_stride = (width+1) / 2;
    
    //source-length
    const int y_length = width * height;
    int uv_length = uv_stride * ((height+1) / 2);
    
    //source-data
    unsigned char *Y_data_Dst = yuvBuf;
    unsigned char *U_data_Dst = yuvBuf + y_length;
    unsigned char *V_data_Dst = U_data_Dst + uv_length;
    
    //BGRAToI420, 内存顺序是BGRA,所以用方法得反过来ARGB
    libyuv::ARGBToI420(rgbaBuffer,
                       width * 4,
                       Y_data_Dst, Dst_Stride_Y,
                       U_data_Dst, uv_stride,
                       V_data_Dst, uv_stride,
                       width, height);

    //scale-size
    int scale_yuvBufSize = targetSize.width * targetSize.height * 3 / 2;
    uint8_t* scale_yuvBuf= new uint8_t[scale_yuvBufSize];
    
    //scale-stride
    int scale_Dst_Stride_Y = targetSize.width;
    const int32 scale_uv_stride = (targetSize.width+1) / 2;
    
    //scale-length
    const int scale_y_length = targetSize.width * targetSize.height;
    int scale_uv_length = scale_uv_stride * ((targetSize.height+1) / 2);
    
    //scale-data
    unsigned char *scale_Y_data_Dst = scale_yuvBuf;
    unsigned char *scale_U_data_Dst = scale_yuvBuf + scale_y_length;
    unsigned char *scale_V_data_Dst = scale_U_data_Dst + scale_uv_length;
    
    libyuv::I420Scale(Y_data_Dst, Dst_Stride_Y,
                      U_data_Dst, uv_stride,
                      V_data_Dst, uv_stride,
                      width, height,
                      scale_Y_data_Dst, scale_Dst_Stride_Y,
                      scale_U_data_Dst, scale_uv_stride,
                      scale_V_data_Dst, scale_uv_stride,
                      targetSize.width, targetSize.height,
                      libyuv::kFilterNone);
    
    //final-data
    uint8_t *final_y_buffer = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(dst_pixelBuffer, 0);
    uint8_t *final_uv_buffer = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(dst_pixelBuffer, 1);
    
    libyuv::I420ToNV12(scale_Y_data_Dst, scale_Dst_Stride_Y,
                       scale_U_data_Dst, scale_uv_stride,
                       scale_V_data_Dst, scale_uv_stride,
                       final_y_buffer, scale_Dst_Stride_Y,
                       final_uv_buffer, scale_uv_stride*2,  //因为u的宽度 = y * 0.5,v的宽度 = y * 0.5
                       targetSize.width, targetSize.height);
    
    CVPixelBufferUnlockBaseAddress(source_pixelBuffer, 0);
    CVPixelBufferUnlockBaseAddress(dst_pixelBuffer, 0);
    
    delete [] yuvBuf;
    delete [] scale_yuvBuf;
    
    return yuvBufSize;
}

RGB直接缩放后转NV12

int transfer_32bgra_to_NV12_ScaleToSize(CVPixelBufferRef source_pixelBuffer,CGSize targetSize,CVPixelBufferRef dst_pixelBuffer) {
    
    CVPixelBufferLockBaseAddress(source_pixelBuffer, 0);
    CVPixelBufferLockBaseAddress(dst_pixelBuffer, 0);
    
    //source-size
    size_t width = CVPixelBufferGetWidth(source_pixelBuffer);//图像宽度(像素)
    size_t height = CVPixelBufferGetHeight(source_pixelBuffer);//图像高度(像素)
    uint8_t *rgbaBuffer = (uint8_t *)CVPixelBufferGetBaseAddress(source_pixelBuffer);
    int rgb_length = width * height * 4;
    
    //scale-size
    uint8_t* argb_scale_dst_buf= new uint8_t[rgb_length];
    
    //BGRA--Scale
    libyuv::ARGBScale(rgbaBuffer, width * 4,
                      width, height,
                      argb_scale_dst_buf, targetSize.width * 4,
                      targetSize.width, targetSize.height,
                      libyuv::kFilterNone);
    
    //yuv-stride
    int Dst_Stride_Y = targetSize.width;
    const int32 uv_stride = (targetSize.width+1) / 2;

    //yuv-data
    uint8_t *final_y_buffer = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(dst_pixelBuffer, 0);
    uint8_t *final_uv_buffer = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(dst_pixelBuffer, 1);
    
    //trans
    libyuv::ARGBToNV12(argb_scale_dst_buf, targetSize.width * 4,
                       final_y_buffer, Dst_Stride_Y,
                       final_uv_buffer, uv_stride*2,
                       targetSize.width, targetSize.height);
    
    CVPixelBufferUnlockBaseAddress(source_pixelBuffer, 0);
    CVPixelBufferUnlockBaseAddress(dst_pixelBuffer, 0);
    
    delete [] argb_scale_dst_buf;
    
    return 0;
}

你可能感兴趣的:(libyuv使用)