一、关于VCD/SVCD/DVD
1、VCD采用的是MPEG-1技术标准,清晰度在250线左右,仅相当于普通录像机的水平;VCD是1路立体声输出,唱卡拉OK时两个声道可分别存储原唱和纯伴音两路信号。
2、SVCD采用的是MPEG-2技术标准,清晰度达到了350线;SVCD可以提供2路立体声和4路单声道。
3、DVD采用的是MPEG-2技术标准,清晰度已达到500线;DVD可提供2个立体声声道和1个5+1杜比AC-3环绕立体声声道,可提供高质量的环绕立体声。
除此之外VCD、SVCD、DVD还增加了多种语言的字幕等功能。三者均采用直径为12cm的光碟,但容量不一样。DVD碟片的存储容量最大达120分钟以上;SVCD的容量最小,为45分钟;VCD介于三者之间,为74分钟。比如存储同一部120分钟的故事片,需用一张DVD碟片,两张VCD碟片,3-4张SVCD碟片。
二、关于PAL/NTSC制式
NTSC和PAL属于全球两大主要的电视广播制式,但是由于系统投射颜色影像的频率而有所不同。NTSC是National Television System Committee的缩写,其标准主要应用于日本、美国,加拿大、墨西哥等地,NTSC每秒29.97帧(简化为30帧),每帧有525线(60Hz);PAL 则是Phase Alternating Line的缩写,主要应用于中国,香港、中东地区和欧洲一带,PAL每秒25帧,每帧有625线(50Hz)。这两种制式是不能互相兼容的,如果在PAL制式的电视上播放NTSC的影像,画面将变成黑白,NTSC制式的也是一样。
三、使用FFMPEG保存MPEG-1、MPEG-2代码如下:
1、这是06年5月写的部分代码,暂时不支持音频,因为FFMPEG音频裁减功能还不强大,也不支持背景声音,以后会加入,有兴趣的可以继续关注此帖。
2、ffmpeg的dll大家可以到 http://www.chinavideo.org/ 直接去下载编译好的。
//STMPEG.h
#pragma once
#include "avformat.h"
class CSTMPEG
{
public:
CSTMPEG();
~CSTMPEG(void);
//打开一个文件写mpeg
bool OpenMPEG(const char* strfile);
//写一帧图象数据到mpeg文件
int AddFrame(int width, int height, int bpp, uint8_t* pRGBBuffer);
//释放资源
void CloseMPEG();
enum MPEG_TYPE{
VCD_PAL,
VCD_NTSC,
SVCD_PAL,
SVCD_NTSC,
DVD_PAL,
DVD_NTSC,
CUSTOM_MPEG1,
CUSTOM_MPEG2
};
//设置MPEG的类型
void SetMPEGFormat(MPEG_TYPE type);
//设置画面宽度
void SetMPEGWidth(int width);
//设置画面高度
void SetMPEGHeight(int height);
//设置帧率(fps)
void SetFrameRate(float rate);
//设置视频码率(kbits/sec) rate: 3000 4000 6000 8000
void SetMPEGVideoBitRate(float rate);
private:
void InitMPEGData(MPEG_TYPE type);
//把图象颠倒过来
void RGBBuffer2RGBFrame(AVFrame *pRGBFrame, int width, int height, int bpp, uint8_t* pRGBBuffer);
AVFrame *alloc_picture(int pix_fmt, int width, int height);
bool open_video(AVFormatContext *pFormatContext, AVStream *pVideoStream);
AVStream *add_video_stream(AVFormatContext *pFormatContext, int codec_id);
bool write_video_frame(AVFormatContext *pFormatcontext, AVStream *pAudioStream);
void close_video(AVFormatContext *pFormatcontext, AVStream *pVideoStream);
private:
MPEG_TYPE m_MPEGType;
int m_nWidth, m_nHeight;
float m_fFrameRate;
float m_fBitRate;
int m_nFrame;
AVFrame *m_pRGBFrame; //YUV帧数据
AVFrame *m_pYUVFrame; //RGB帧数据
uint8_t* m_pOutBuf; //将一帧数据编码到这个缓冲区,用于写入到文件
const int m_nOutBufSize; //编码缓冲区的大小
//使用流来读写文件
AVOutputFormat *m_pOutputFormat;
AVFormatContext *m_pFormatContext;
AVStream *m_pVideoStream;
AVStream *m_pAudioStream;
};
//STMPEG.cpp
#include "StdAfx.h"
#include "stmpeg.h"
#define LineWidthBytes(biWidth, biBitCount) ((biWidth * biBitCount + 31) / 32 * 4)
CSTMPEG::CSTMPEG(): m_nOutBufSize(200000)
{
m_MPEGType = VCD_PAL;
m_nWidth = 352;
m_nHeight = 288;
m_fFrameRate = 25;
m_fBitRate = 800000;
m_nFrame = -1;
m_pRGBFrame = NULL;
m_pYUVFrame = NULL;
m_pFormatContext= NULL;
m_pVideoStream = NULL;
m_pAudioStream = NULL;
}
CSTMPEG::~CSTMPEG(void)
{
}
void CSTMPEG::InitMPEGData(MPEG_TYPE type)
{
switch(type) {
case VCD_PAL:
{
m_nWidth = 352;
m_nHeight = 288;
m_fFrameRate = 25;
}
break;
case VCD_NTSC:
{
m_nWidth = 352;
m_nHeight = 240;
m_fFrameRate = 29.97;
}
break;
case SVCD_PAL:
{
m_nWidth = 480;
m_nHeight = 576;
m_fFrameRate = 25;
}
break;
case SVCD_NTSC:
{
m_nWidth = 480;
m_nHeight = 480;
m_fFrameRate = 23.976;
}
break;
case DVD_PAL:
{
m_nWidth = 720;
m_nHeight = 576;
m_fFrameRate = 25;
}
break;
case DVD_NTSC:
{
m_nWidth = 720;
m_nHeight = 480;
m_fFrameRate = 29.97;
}
break;
default:
break;
}
}
void CSTMPEG::SetMPEGFormat(MPEG_TYPE type)
{
if (type != m_MPEGType) {
m_MPEGType = type;
InitMPEGData(type);
}
}
void CSTMPEG::SetMPEGWidth(int width)
{
if (m_MPEGType == CUSTOM_MPEG1 || m_MPEGType == CUSTOM_MPEG2 )
m_nWidth = width;
}
void CSTMPEG::SetMPEGHeight(int height)
{
if (m_MPEGType == CUSTOM_MPEG1 || m_MPEGType == CUSTOM_MPEG2 )
m_nHeight = height;
}
void CSTMPEG::SetFrameRate(float rate)
{
if (m_MPEGType == CUSTOM_MPEG1 || m_MPEGType == CUSTOM_MPEG2 )
m_fFrameRate = rate;
}
void CSTMPEG::SetMPEGVideoBitRate(float rate)
{
m_fBitRate = rate * 1000;
}
void CSTMPEG::RGBBuffer2RGBFrame(AVFrame *pRGBFrame, int width, int height, int bpp, uint8_t* pRGBBuffer)
{
ASSERT( pRGBFrame && pRGBBuffer);
int linebytes = LineWidthBytes(width, bpp);
int nPixels = bpp / 8;
int x, y;
for(y=0;y<height;y++) {
for(x=0;x<width;x++) {
pRGBFrame->data[0][y * pRGBFrame->linesize[0] + x * 3 ] =
pRGBBuffer[ (height - y - 1) * linebytes + x * nPixels ]; //B
pRGBFrame->data[0][y * pRGBFrame->linesize[0] + x * 3 + 1 ] =
pRGBBuffer[ (height - y - 1) * linebytes + x * nPixels + 1]; //G
pRGBFrame->data[0][y * pRGBFrame->linesize[0] + x * 3 + 2 ] =
pRGBBuffer[ (height - y - 1) * linebytes + x * nPixels + 2]; //R
}
}
}
AVFrame *CSTMPEG::alloc_picture(int pix_fmt, int width, int height)
{
AVFrame* picture = avcodec_alloc_frame();
if (!picture)
return NULL;
int size = avpicture_get_size(pix_fmt, width, height);
uint8_t *picture_buf = (uint8_t *)malloc(size);
if (!picture_buf) {
av_free(picture);
return NULL;
}
avpicture_fill((AVPicture *)picture, picture_buf,
pix_fmt, width, height);
return picture;
}
bool CSTMPEG::OpenMPEG(const char* strfile)
{
av_register_all();
switch(m_MPEGType) {
case VCD_PAL:
case VCD_NTSC:
m_pOutputFormat = guess_format("vcd", NULL, NULL);
break;
case SVCD_PAL:
case SVCD_NTSC:
m_pOutputFormat = guess_format("svcd", NULL, NULL);
break;
case DVD_PAL:
case DVD_NTSC:
m_pOutputFormat = guess_format("dvd", NULL, NULL);
break;
default:
m_pOutputFormat = guess_format(NULL, strfile, NULL);
break;
}
//初始化输出文件格式
if ( NULL == m_pOutputFormat)
m_pOutputFormat = guess_format("mpeg", NULL, NULL);
if ( NULL == m_pOutputFormat)
return false;
//初始化文件格式上下文
m_pFormatContext = av_alloc_format_context();
if ( NULL == m_pFormatContext )
return false;
m_pFormatContext->oformat = m_pOutputFormat;
_sntprintf(m_pFormatContext->filename, sizeof(m_pFormatContext->filename), "%s", strfile);
//创建视频流,初始化编解码器上下文
if (m_pOutputFormat->video_codec != CODEC_ID_NONE)
m_pVideoStream = add_video_stream(m_pFormatContext, m_pOutputFormat->video_codec);
//创建音频流,初始化编解码器上下文
// if (m_pOutputFormat->audio_codec != CODEC_ID_NONE)
// m_pAudioStream = add_audio_stream(m_pFormatContext, m_pOutputFormat->audio_codec);
if (av_set_parameters(m_pFormatContext, NULL) < 0)
return false;
//打开编解码器,给每一帧分配空间
if (m_pVideoStream){
if( !open_video(m_pFormatContext, m_pVideoStream))
return false;
}
// if (m_pAudioStream)
// open_audio(m_pFormatContext, m_pAudioStream);
if ( !(m_pOutputFormat->flags & AVFMT_NOFILE) ) {
if (url_fopen(&m_pFormatContext->pb, strfile, URL_WRONLY) < 0) {
return false;
}
}
//写文件头
av_write_header(m_pFormatContext);
return true;
}
AVStream* CSTMPEG::add_video_stream(AVFormatContext *pFormatContext, int codec_id)
{
ASSERT(pFormatContext);
AVStream *pStream = av_new_stream(pFormatContext, 0);
if ( NULL == pStream )
return NULL;
AVCodecContext *pCodecContext = pStream->codec;
pCodecContext->codec_id = (CodecID)codec_id;
pCodecContext->codec_type = CODEC_TYPE_VIDEO;
pCodecContext->bit_rate = 1150000;//800000;//m_fBitRate;
pCodecContext->width = m_nWidth;
pCodecContext->height = m_nHeight;
pCodecContext->time_base.den = 25;//m_fFrameRate;
pCodecContext->time_base.num = 1;
pCodecContext->gop_size = 18;
pCodecContext->pix_fmt = PIX_FMT_YUV420P;
if (pCodecContext->codec_id == CODEC_ID_MPEG2VIDEO)
pCodecContext->max_b_frames = 2;
if (pCodecContext->codec_id == CODEC_ID_MPEG1VIDEO)
pCodecContext->mb_decision=2;
if(!strcmp(pFormatContext->oformat->name, "mp4") || !strcmp(pFormatContext->oformat->name, "mov") ||
!strcmp(pFormatContext->oformat->name, "3gp"))
pCodecContext->flags |= CODEC_FLAG_GLOBAL_HEADER;
return pStream;
}
bool CSTMPEG::open_video(AVFormatContext *pFormatContext, AVStream *pVideoStream)
{
AVCodec *pCodec = NULL;
AVCodecContext *pCodecContext = NULL;
pCodecContext = pVideoStream->codec;
// find the video encoder
pCodec = avcodec_find_encoder(pCodecContext->codec_id);
if (NULL == pCodec)
return false;
// open the codec
if (avcodec_open(pCodecContext, pCodec) < 0) return false;
m_pYUVFrame = alloc_picture(pCodecContext->pix_fmt, pCodecContext->width, pCodecContext->height);
if (!m_pYUVFrame) return false;
m_pRGBFrame = alloc_picture(PIX_FMT_BGR24, pCodecContext->width, pCodecContext->height);
if (!m_pRGBFrame) return false;
m_pOutBuf = (uint8_t*)malloc(m_nOutBufSize);
if (!m_pOutBuf) return false;
return true;
}
int CSTMPEG::AddFrame(int width, int height, int bpp, uint8_t* pRGBBuffer)
{
ASSERT(pRGBBuffer);
RGBBuffer2RGBFrame(m_pRGBFrame, width, height, bpp, pRGBBuffer);
if ( false == write_video_frame(m_pFormatContext, m_pVideoStream) ) {
return -1;
}
m_nFrame++;
return 0;
}
bool CSTMPEG::write_video_frame(AVFormatContext *pFormatcontext, AVStream *pVideoStream)
{
ASSERT(pFormatcontext && pVideoStream);
int ret;
AVCodecContext *pCodecContext = pVideoStream->codec;
img_convert( (AVPicture *)m_pYUVFrame, pCodecContext->pix_fmt, (AVPicture *)m_pRGBFrame,
PIX_FMT_BGR24, pCodecContext->width, pCodecContext->height);
int out_size = avcodec_encode_video(pCodecContext, m_pOutBuf, m_nOutBufSize, m_pYUVFrame);
if (out_size > 0){
AVPacket pkt;
av_init_packet(&pkt);
pkt.pts = av_rescale_q(pCodecContext->coded_frame->pts, pCodecContext->time_base,
pVideoStream->time_base);
if( pCodecContext->coded_frame->key_frame )
pkt.flags |= PKT_FLAG_KEY;
pkt.stream_index = pVideoStream->index;
pkt.data= m_pOutBuf;
pkt.size= out_size;
ret = av_write_frame(pFormatcontext, &pkt);
}else{
ret = 0;
}
if (ret != 0) return false;
return true;
}
void CSTMPEG::close_video(AVFormatContext *pFormatcontext, AVStream *pVideoStream)
{
avcodec_close(pVideoStream->codec);
if (m_pYUVFrame) {
free(m_pYUVFrame->data[0]);
av_free(m_pYUVFrame);
}
if (m_pRGBFrame) {
free(m_pRGBFrame->data[0]);
av_free(m_pRGBFrame);
}
free(m_pOutBuf);
}
void CSTMPEG::CloseMPEG()
{
//关闭编码器,释放帧占用的内存;
close_video(m_pFormatContext, m_pVideoStream);
av_write_trailer(m_pFormatContext);
//释放编码器和流缓冲区。
for(int i = 0; i < m_pFormatContext->nb_streams; i++) {
av_freep(&m_pFormatContext->streams[i]->codec);
av_freep(&m_pFormatContext->streams[i]);
}
if ( !(m_pOutputFormat->flags & AVFMT_NOFILE) )
url_fclose( &m_pFormatContext->pb );
//释放文件。
av_free(m_pFormatContext);
}
四、调用代码示例如下:
/*CSTMpegFile是我又写的一个代理类,来代理CSTMpeg的实现方法,将接口与实现分离,便于将CSTMpeg封装成一个独立组件。*/
CSTMpegFile* m_pMpegFile;
//开始捕获视频,初试化MPEG参数。
bool SceneViewer::CaptureVideo(IVideoValueObject& value)
{
SAFE_DELETE(m_pMpegFile);
m_pMpegFile = new CSTMpegFile;
m_pMpegFile->SetMPEGFormat( CSTMpegFile::MPEG_TYPE(value.m_nMpegFormat) );
m_pMpegFile->SetMPEGWidth(viewerW);
m_pMpegFile->SetMPEGHeight(viewerH);
m_pMpegFile->SetFrameRate(value.m_fFrameRate );
m_pMpegFile->SetMPEGVideoBitRate(value.m_fBitRate);
char *pAnsi = new char[MAX_PATH];
//宽字符转单字符
WideCharToMultiByte(CP_ACP, 0, (LPCTSTR)value.m_strVideoPath, -1, pAnsi, MAX_PATH, NULL,NULL);
BOOL bRet = m_pMpegFile->OpenMPEG(pAnsi);
if ( bRet ) {
bCaptureVideo = true;
}else{
bCaptureVideo = false;
}
SAFE_DELETE(pAnsi);
return bRet;
}
//抓取单帧图象,在动画刷新函数里调用
void SceneViewer::CaptureFrame()
{
m_pMpegFile->AddFrame(viewerW, viewerH, 32, viewerBits);
}
//捕获结束,释放资源。在动画刷新结束调用
void SceneViewer::EndCapture()
{
m_pMpegFile->CloseMPEG();
bCaptureVideo = false;
}
转自 http://blog.csdn.net/still05017/article/details/1487141