I420数据保存为图片

前提:webrtc::VideoFrame 转化为图片保存

// 定义YUV转RGB的函数
void YUV2RGB(unsigned char Y, unsigned char U, unsigned char V,
	unsigned char* R, unsigned char* G, unsigned char* B)
{
	int C = Y - 16;
	int D = U - 128;
	int E = V - 128;

	*R = (unsigned char)((298 * C + 409 * E + 128) >> 8);
	*G = (unsigned char)((298 * C - 100 * D - 208 * E + 128) >> 8);
	*B = (unsigned char)((298 * C + 516 * D + 128) >> 8);
}

void OnFrame(const webrtc::VideoFrame& frame)
{
    const uint8_t* Y = frame.video_frame_buffer()->ToI420()->DataY();  // w * h
    const uint8_t* U = frame.video_frame_buffer()->ToI420()->DataU();  // w * h * 5 / 4
    const uint8_t* V = frame.video_frame_buffer()->ToI420()->DataV();  // w * h * 5 / 4

	int width = frame.width();
	int height = frame.height();

    std::vector rgb_data(width * height * 3);

	for (int i = 0; i < height; i++)
    {
		for (int j = 0; j < frame.width(); j++)
        {
			int Y_idx = i * frame.width() + j;
			int U_idx = (i / 2) * (frame.width() / 2) + (j / 2);
			int V_idx = (i / 2) * (frame.width() / 2) + (j / 2);
			unsigned char R, G, B;

			YUV2RGB(Y[Y_idx], U[U_idx], V[V_idx], &R, &G, &B);

			int RGB_idx = i * width * 3 + j * 3;

			rgb_data[RGB_idx] = R;
			rgb_data[RGB_idx + 1] = G;
			rgb_data[RGB_idx + 2] = B;
		}
	}

    QImage image(rgb_data.data(), width, height, QImage::Format_RGB888);
    std::string fileName = "D:/Picture/output" + std::to_string(i) + ".jpg";
    QPixmap pixmap;
    pixmap.convertFromImage(image);
    pixmap.save(QString::fromStdString(fileName));
    ++i;

}

你可能感兴趣的:(WebRTC,YUV,WEBRTC,音视频)