树莓派4使用USB3.0 HDMI视频采集卡H.264编码测试程序
/*=============================================================================
# 树莓派4使用USB3.0 HDMI视频采集卡H.264编码测试程序
# 定义一个AVFrame *frame帧
# 编译:gcc ./v4l2_encode_2.c -o v4l2_encode_2 -lavcodec -lavutil
=============================================================================*/
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#define FILE_VIDEO "/dev/video0"
#define WIDTH 1920
#define HEIGHT 1080
#define LOOP 30*60*60*2//2个小时
#define BIT_RATE 1024000*4
typedef struct{
void *start;
int length;
}BUFTYPE;
BUFTYPE *usr_buf;
static unsigned int n_buffer = 0;
struct timeval first_time, second_time;//用时间差统计帧速率
int j=0;//开始计时标志
const char *filename = "test.264";
const char *codec_name = "h264_omx";//libx264 软编码 h264_omx 树莓派硬编码
const AVCodec *codec;
AVCodecContext *c= NULL;
FILE *f;
AVFrame *frame;
AVPacket *pkt;
uint8_t endcode[] = { 0, 0, 1, 0xb7 };
//将YUYV422转换成YUV420P
void YUYV422toYUV420P(uint8_t *yuyv422, int width, int height, uint8_t *yuv420p, int length) {
int i, j;
uint8_t *pY = yuv420p;
uint8_t *pU = yuv420p + width * height;
uint8_t *pV = pU + (width * height) / 4;
uint8_t *pYUVTemp = yuyv422;
uint8_t *pYUVTempNext = yuyv422+width * 2;
for (i = 0; i < height; i += 2) {
for (j = 0; j < width; j += 2) {
pY[j] = *pYUVTemp++;
pY[j + width] = *pYUVTempNext++;
pU[j / 2] =(*(pYUVTemp) + *(pYUVTempNext)) / 2;
pYUVTemp++;
pYUVTempNext++;
pY[j + 1] = *pYUVTemp++;
pY[j + 1 + width] = *pYUVTempNext++;
pV[j / 2] =(*(pYUVTemp) + *(pYUVTempNext)) / 2;
pYUVTemp++;
pYUVTempNext++;
}
pYUVTemp += width * 2;
pYUVTempNext += width * 2;
pY += width * 2;
pU += width / 2;
pV += width / 2;
}
}
static void encode(AVCodecContext *enc_ctx, AVFrame *frame, AVPacket *pkt, FILE *outfile)
{
int ret;
int key_frame=0;
ret = avcodec_send_frame(enc_ctx, frame);
if (ret < 0) {
fprintf(stderr, "Error sending a frame for encoding\n");
exit(1);
}
while (ret >= 0) {
ret = avcodec_receive_packet(enc_ctx, pkt);
if(ret == AVERROR_EOF){
return;
}else if (ret == AVERROR(EAGAIN)){
return;
}else if (ret < 0) {
fprintf(stderr, "Error during encoding\n");
exit(1);
}
if(pkt->flags & AV_PKT_FLAG_KEY){//by aphero
key_frame=1;
printf("@-");//输出的是关键帧
}
//if(!(pkt->pts % 30)){//30个压缩帧统计一次
gettimeofday(&second_time, NULL);
double time_val = (second_time.tv_sec - first_time.tv_sec) * 1000000 + second_time.tv_usec - first_time.tv_usec;
//printf("time_val(ms) = %lf\n", time_val / 1000.0);
printf("+encode packet %3"PRId64" (size=%7d) time(ms) = %lf fps=%lf\n", pkt->pts, pkt->size,time_val / 1000.0, (pkt->pts)*1000.0/(time_val/1000.0));
//}
fwrite(pkt->data, 1, pkt->size, outfile);//将编码后的h.264数据写入文件
av_packet_unref(pkt);
}
}
int openencoder()
{
int ret;
avcodec_register_all();
codec = avcodec_find_encoder_by_name(codec_name);
if (!codec) {
fprintf(stderr, "Codec '%s' not found\n", codec_name);
exit(1);
}
c = avcodec_alloc_context3(codec);
if (!c) {
fprintf(stderr, "Could not allocate video codec context\n");
exit(1);
}
pkt = av_packet_alloc();
if (!pkt){
printf("av_packet_alloc error\n");
exit(1);
}
c->bit_rate = BIT_RATE; //编码速率
c->width = WIDTH; //视频宽度
c->height = HEIGHT; //视频高度
c->time_base.num = 1; //视频帧速率
c->time_base.den = 30; //视频帧速率
c->thread_count = 4; //编码线程数
c->gop_size = 30; //I帧间隔
c->max_b_frames = 0; //B帧数
c->pix_fmt = AV_PIX_FMT_YUV420P;//AV_PIX_FMT_YUV420P; 树莓派在ffmpeg中不支持AV_PIX_FMT_YUYV422格式编码
if (codec->id == AV_CODEC_ID_H264){
av_opt_set(c->priv_data, "profile", "baseline", 0);//H.264的profile:baseline/main/h
av_opt_set(c->priv_data, "zerocopy", "0", 0);//修改了ffmpeg-3.4.6中omx.c的源码,禁用了强制树莓派为1,s->input_zerocopy = 1
//c->me_range = 16;
//c->qmin = 10;
//c->qmax = 51;
//c->max_qdiff = 4;
//av_opt_set(c->priv_data, "preset", "ultrafast", 0); //ultrafast,superfast, veryfast, faster, fast, medium, slow, slower, veryslow,
//av_opt_set(c->priv_data, "tune", "zerolatency", 0);
}
//c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;//第1个包不会输出sps信息,而是将sps pps存入到extradata中
ret = avcodec_open2(c, codec, NULL);
if (ret < 0) {
fprintf(stderr, "Could not open codec: %s\n", av_err2str(ret));
exit(1);
}
f = fopen(filename, "wb");
if (!f) {
fprintf(stderr, "Could not open %s\n", filename);
exit(1);
}
frame = av_frame_alloc();
if (!frame) {
fprintf(stderr, "Could not allocate video frame\n");
exit(1);
}
frame->format = c->pix_fmt;
frame->width = c->width;
frame->height = c->height;
ret = av_frame_get_buffer(frame, 32);//必须设置,否则卡死
if (ret < 0) {
fprintf(stderr, "Could not allocate the video frame data\n");
exit(1);
}
}
int stopencoder()
{
avcodec_free_context(&c);
av_frame_free(&frame);
av_packet_free(&pkt);
}
/*set video capture ways(mmap)*/
int init_mmap(int fd)
{
/*to request frame cache, contain requested counts*/
struct v4l2_requestbuffers reqbufs;
memset(&reqbufs, 0, sizeof(reqbufs));
reqbufs.count = 3; /*the number of buffer*/
reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
reqbufs.memory = V4L2_MEMORY_MMAP;
if(-1 == ioctl(fd,VIDIOC_REQBUFS,&reqbufs)) {
perror("Fail to ioctl 'VIDIOC_REQBUFS'");
exit(EXIT_FAILURE);
}
n_buffer = reqbufs.count;
printf("n_buffer = %d\n", n_buffer);
//usr_buf = calloc(reqbufs.count, sizeof(usr_buf));
usr_buf = calloc(reqbufs.count, sizeof(BUFTYPE));
if(usr_buf == NULL) {
printf("Out of memory\n");
exit(-1);
}
/*map kernel cache to user process*/
for(n_buffer = 0; n_buffer < reqbufs.count; ++n_buffer) {
//stand for a frame
struct v4l2_buffer buf;
memset(&buf, 0, sizeof(buf));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffer;
/*check the information of the kernel cache requested*/
if(-1 == ioctl(fd,VIDIOC_QUERYBUF,&buf)){
perror("Fail to ioctl : VIDIOC_QUERYBUF");
exit(EXIT_FAILURE);
}
usr_buf[n_buffer].length = buf.length;
usr_buf[n_buffer].start = (char *)mmap(NULL,buf.length,PROT_READ | PROT_WRITE,MAP_SHARED, fd,buf.m.offset);
//由原来的MAP_PRIVATE 模式改为MAP_SHARED方式
if(MAP_FAILED == usr_buf[n_buffer].start){
perror("Fail to mmap");
exit(EXIT_FAILURE);
}
}
}
int open_camera(void)
{
int fd;
struct v4l2_input inp;
fd = open(FILE_VIDEO, O_RDWR | O_NONBLOCK,0);//camera 打开由阻塞打开改为了非阻塞方式打开
if(fd < 0){
fprintf(stderr, "%s open err \n", FILE_VIDEO);
exit(EXIT_FAILURE);
}
inp.index = 0;
if (-1 == ioctl (fd, VIDIOC_S_INPUT, &inp)) {
fprintf(stderr, "VIDIOC_S_INPUT \n");
}
return fd;
}
int init_camera(int fd)
{
struct v4l2_capability cap; /* decive fuction, such as video input */
struct v4l2_format tv_fmt; /* frame format */
struct v4l2_fmtdesc fmtdesc; /* detail control value */
struct v4l2_control ctrl;
int ret;
/*show all the support format*/
memset(&fmtdesc, 0, sizeof(fmtdesc));
fmtdesc.index = 0 ; /* the number to check */
fmtdesc.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
/* check video decive driver capability */
if(ret=ioctl(fd, VIDIOC_QUERYCAP, &cap)<0){
fprintf(stderr, "fail to ioctl VIDEO_QUERYCAP \n");
exit(EXIT_FAILURE);
}
/*judge wherher or not to be a video-get device*/
if(!(cap.capabilities & V4L2_BUF_TYPE_VIDEO_CAPTURE)){
fprintf(stderr, "The Current device is not a video capture device \n");
exit(EXIT_FAILURE);
}
/*judge whether or not to supply the form of video stream*/
if(!(cap.capabilities & V4L2_CAP_STREAMING)){
printf("The Current device does not support streaming i/o\n");
exit(EXIT_FAILURE);
}
printf("\ncamera driver name is : %s\n",cap.driver);
printf("camera device name is : %s\n",cap.card);
printf("camera bus information: %s\n",cap.bus_info);
/*display the format device support*/
printf("\n");
while(ioctl(fd,VIDIOC_ENUM_FMT,&fmtdesc)!=-1)
{
printf("support device %d.%s\n",fmtdesc.index+1,fmtdesc.description);
fmtdesc.index++;
}
printf("\n");
/*set the form of camera capture data*/
tv_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; /*v4l2_buf_typea,camera must use V4L2_BUF_TYPE_VIDEO_CAPTURE*/
tv_fmt.fmt.pix.width = WIDTH;
tv_fmt.fmt.pix.height = HEIGHT;
tv_fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;//V4L2_PIX_FMT_YUV420; //V4L2_PIX_FMT_YUYV hdmi采集卡的格式
tv_fmt.fmt.pix.field = V4L2_FIELD_NONE; /*V4L2_FIELD_NONE*/
if (ioctl(fd, VIDIOC_S_FMT, &tv_fmt)< 0) {
fprintf(stderr,"VIDIOC_S_FMT set err\n");
exit(-1);
close(fd);
}
struct v4l2_streamparm* setfps;
setfps=(struct v4l2_streamparm *) calloc(1, sizeof(struct v4l2_streamparm));
memset(setfps, 0, sizeof(struct v4l2_streamparm));
setfps->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
setfps->parm.capture.timeperframe.numerator = 1;
setfps->parm.capture.timeperframe.denominator = 30;
if (-1 == ioctl(fd, VIDIOC_S_PARM, setfps)) {
fprintf(stderr,"v4l2: set fps fail\n");
exit(-1);
close(fd);
}
init_mmap(fd);
}
int start_capture(int fd)
{
unsigned int i;
enum v4l2_buf_type type;
/*place the kernel cache to a queue*/
for(i = 0; i < n_buffer; i++){
struct v4l2_buffer buf;
memset(&buf, 0, sizeof(buf));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if(-1 == ioctl(fd, VIDIOC_QBUF, &buf)){
perror("Fail to ioctl 'VIDIOC_QBUF'");
exit(EXIT_FAILURE);
}
}
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(-1 == ioctl(fd, VIDIOC_STREAMON, &type)) {
printf("i=%d.\n", i);
perror("VIDIOC_STREAMON");
close(fd);
exit(EXIT_FAILURE);
}
return 0;
}
int process_image(void *addr, int length){
int y_size;
int ret;
int i,x,y;
unsigned char *yuv420_buf = (unsigned char *)malloc(3*WIDTH*HEIGHT/2*sizeof(unsigned char));
if(j==0) gettimeofday(&first_time, NULL);//开始计时
y_size = WIDTH * HEIGHT;
ret = av_frame_make_writable(frame);
if (ret < 0){
printf("av_frame_make_writable error\n");
exit(1);
}
//转换色彩空间
YUYV422toYUV420P(addr, WIDTH, HEIGHT, yuv420_buf, y_size*2);
frame->data[0] = yuv420_buf;
frame->data[1] = yuv420_buf+y_size;
frame->data[2] = yuv420_buf+5*y_size/4;
//if(!(j % 30)){
gettimeofday(&second_time, NULL);
double time_val = (second_time.tv_sec - first_time.tv_sec) * 1000000 + second_time.tv_usec - first_time.tv_usec;
//printf("time_val(ms) = %lf\n", time_val / 1000.0);
printf("-capture frame %3d (size=%5d) time(ms) = %lf fps=%lf\n", j, length,time_val / 1000.0, j*1000.0/(time_val/1000.0));
//}
frame->pts = j;
encode(c, frame, pkt, f);
j++;
free(yuv420_buf);
return 0;
}
int read_frame(int fd){
struct v4l2_buffer buf;
unsigned int i;
memset(&buf, 0, sizeof(buf));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
//put cache from queue
if(-1 == ioctl(fd, VIDIOC_DQBUF,&buf)){
perror("Fail to ioctl 'VIDIOC_DQBUF'");
exit(EXIT_FAILURE);
}
assert(buf.index < n_buffer);
//read process space's data to a file
process_image(usr_buf[buf.index].start, usr_buf[buf.index].length);
if(-1 == ioctl(fd, VIDIOC_QBUF,&buf)){
perror("Fail to ioctl 'VIDIOC_QBUF'");
exit(EXIT_FAILURE);
}
return 1;
}
int mainloop(int fd)
{
int count = LOOP;
while(count-- > 0){
for(;;) {
fd_set fds;
struct timeval tv;
int r;
FD_ZERO(&fds);
FD_SET(fd,&fds);
/*Timeout*/
tv.tv_sec = 1;
tv.tv_usec = 0;
r = select(fd + 1,&fds,NULL,NULL,&tv);
if(-1 == r){
printf("rrr\n");
if(EINTR == errno)
continue;
perror("Fail to select");
exit(EXIT_FAILURE);
}
if(0 == r){
fprintf(stderr,"select Timeout\n");
exit(-1);
}
if(read_frame(fd)){
break;
}else{
printf("error\n");
}
}
}
return 0;
}
void stop_capture(int fd)
{
enum v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(-1 == ioctl(fd,VIDIOC_STREAMOFF,&type)){
perror("Fail to ioctl 'VIDIOC_STREAMOFF'");
exit(EXIT_FAILURE);
}
}
void close_camera_device(int fd)
{
unsigned int i;
for(i = 0;i < n_buffer; i++){
if(-1 == munmap(usr_buf[i].start,usr_buf[i].length)){
exit(-1);
}
}
free(usr_buf);
if(-1 == close(fd)){
perror("Fail to close fd");
exit(EXIT_FAILURE);
}
}
void main(void)
{
int fd;
fd = open_camera();
init_camera(fd);
openencoder();
start_capture(fd);
mainloop(fd);
stop_capture(fd);
close_camera_device(fd);
stopencoder();
}
测试结果:(在采集1080P30fps情况下编码,编码速率26fps左右)
./v4l2_encode_2
...............
+encode packet 683 (size= 3867) time(ms) = 26053.245000 fps=26.215544
-capture frame 685 (size=4147200) time(ms) = 26082.934000 fps=26.262383
+encode packet 684 (size= 4031) time(ms) = 26091.628000 fps=26.215306
-capture frame 686 (size=4147200) time(ms) = 26120.953000 fps=26.262441
+encode packet 685 (size= 202) time(ms) = 26130.025000 fps=26.215053
-capture frame 687 (size=4147200) time(ms) = 26159.614000 fps=26.261855
+encode packet 686 (size= 27643) time(ms) = 26168.684000 fps=26.214539