~/code/MediaPlayer$ tree
.
├── main.cpp
├── mediaplayer.cpp
├── mediaplayer.h
├── MediaPlayer.pro
└── MediaPlayer.pro.user
MediaPlayer.pro
#-------------------------------------------------
#
# Project created by QtCreator 2020-08-17T13:43:30
#
#-------------------------------------------------
QT += core gui
greaterThan(QT_MAJOR_VERSION, 4): QT += widgets
TARGET = MediaPlayer
TEMPLATE = app
# The following define makes your compiler emit warnings if you use
# any feature of Qt which as been marked as deprecated (the exact warnings
# depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS
# You can also make your code fail to compile if you use deprecated APIs.
# In order to do so, uncomment the following line.
# You can also select to disable deprecated APIs only up to a certain version of Qt.
#DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0
SOURCES += main.cpp\
mediaplayer.cpp
HEADERS += mediaplayer.h
#LIBS += -L/usr/lib/x86_64-linux-gnu
LIBS += -ljpeg
win32:CONFIG(release, debug|release): LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/release/ -ljpeg
else:win32:CONFIG(debug, debug|release): LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/debug/ -ljpeg
else:unix: LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/ -ljpeg
INCLUDEPATH += $$PWD/../../../../usr/lib/x86_64-linux-gnu
DEPENDPATH += $$PWD/../../../../usr/lib/x86_64-linux-gnu
win32:CONFIG(release, debug|release): LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/release/ -lx264
else:win32:CONFIG(debug, debug|release): LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/debug/ -lx264
else:unix: LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/ -lx264
INCLUDEPATH += $$PWD/../../../../usr/lib/x86_64-linux-gnu
DEPENDPATH += $$PWD/../../../../usr/lib/x86_64-linux-gnu
win32:CONFIG(release, debug|release): LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/release/ -lasound
else:win32:CONFIG(debug, debug|release): LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/debug/ -lasound
else:unix: LIBS += -L$$PWD/../../../../usr/lib/x86_64-linux-gnu/ -lasound
INCLUDEPATH += $$PWD/../../../../usr/lib/x86_64-linux-gnu
DEPENDPATH += $$PWD/../../../../usr/lib/x86_64-linux-gnu
main.cpp
#include "mediaplayer.h"
#include
int main(int argc, char *argv[])
{
QApplication a(argc, argv);
MediaPlayer w;
w.show();
return a.exec();
}
mediaplayer.h
#ifndef MEDIAPLAYER_H
#define MEDIAPLAYER_H
#include
#include
#include
#include
/*
* capturing from UVC cam
* requires: libjpeg-dev
* build: gcc -std=c99 capture.c -ljpeg -o capture
*/
extern "C" {
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
}
#include
using namespace std;
#include
class MediaPlayer : public QMainWindow
{
Q_OBJECT
private:
QMutex m_mutex;
QImage m_image;
public:
MediaPlayer(QWidget *parent = 0);
void onRecvFrame( char *data, int width, int height);
void paintEvent(QPaintEvent *);
void PaintImage(QPainter &painter);
~MediaPlayer();
};
#endif // MEDIAPLAYER_H
mediaplayer.cpp
#include "mediaplayer.h"
void quit(const char * msg)
{
fprintf(stderr, "[%s] %d: %s\n", msg, errno, strerror(errno));
exit(EXIT_FAILURE);
}
int xioctl(int fd, int request, void* arg)
{
for (int i = 0; i < 100; i++) {
int r = ioctl(fd, request, arg);
if (r != -1 || errno != EINTR) return r;
}
return -1;
}
typedef struct {
uint8_t* start;
size_t length;
} buffer_t;
typedef struct {//add by zyk
x264_param_t *param;
x264_t *handle;
x264_picture_t *picture; //一个视频序列中每帧特点
x264_nal_t *nal;
} X264Encoder;
typedef struct {
X264Encoder encoder;/*add by zyk*/
unsigned char *h264_buf; //encoded buffer
unsigned int encodedLength;
int fd;
uint32_t width;
uint32_t height;
size_t buffer_count;
buffer_t* buffers;
buffer_t head;
} camera_t;
camera_t* camera_open(const char * device, uint32_t width, uint32_t height)
{
int fd = open(device, O_RDWR | O_NONBLOCK, 0);
if (fd == -1) quit("camera_open ");
camera_t* camera = (camera_t*)malloc(sizeof (camera_t));
camera->fd = fd;
camera->width = width;
camera->height = height;
camera->buffer_count = 0;
camera->buffers = NULL;
camera->head.length = 0;
camera->head.start = NULL;
return camera;
}
void camera_init(camera_t* camera) {
struct v4l2_capability cap;
if (xioctl(camera->fd, VIDIOC_QUERYCAP, &cap) == -1) quit("VIDIOC_QUERYCAP");
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) quit("no capture");
if (!(cap.capabilities & V4L2_CAP_STREAMING)) quit("no streaming");
struct v4l2_cropcap cropcap;
memset(&cropcap, 0, sizeof cropcap);
cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl(camera->fd, VIDIOC_CROPCAP, &cropcap) == 0) {
struct v4l2_crop crop;
crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
crop.c = cropcap.defrect;
if (xioctl(camera->fd, VIDIOC_S_CROP, &crop) == -1) {
// cropping not supported
}
}
struct v4l2_format format;
memset(&format, 0, sizeof format);
format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
format.fmt.pix.width = camera->width;
format.fmt.pix.height = camera->height;
format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
format.fmt.pix.field = V4L2_FIELD_NONE;
if (xioctl(camera->fd, VIDIOC_S_FMT, &format) == -1) quit("VIDIOC_S_FMT");
struct v4l2_requestbuffers req;
memset(&req, 0, sizeof req);
req.count = 4;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if (xioctl(camera->fd, VIDIOC_REQBUFS, &req) == -1) quit("VIDIOC_REQBUFS");
camera->buffer_count = req.count;
camera->buffers = (buffer_t*)calloc(req.count, sizeof (buffer_t));
size_t buf_max = 0;
for (size_t i = 0; i < camera->buffer_count; i++) {
struct v4l2_buffer buf;
memset(&buf, 0, sizeof buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (xioctl(camera->fd, VIDIOC_QUERYBUF, &buf) == -1)
quit("VIDIOC_QUERYBUF");
if (buf.length > buf_max) buf_max = buf.length;
camera->buffers[i].length = buf.length;
camera->buffers[i].start =
( uint8_t*)mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED,
camera->fd, buf.m.offset);
if (camera->buffers[i].start == MAP_FAILED) quit("mmap");
}
camera->head.start = ( uint8_t*)malloc(buf_max);
}
void camera_start(camera_t* camera)
{
for (size_t i = 0; i < camera->buffer_count; i++) {
struct v4l2_buffer buf;
memset(&buf, 0, sizeof buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (xioctl(camera->fd, VIDIOC_QBUF, &buf) == -1) quit("VIDIOC_QBUF");
}
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl(camera->fd, VIDIOC_STREAMON, &type) == -1)
quit("VIDIOC_STREAMON");
}
void camera_stop(camera_t* camera)
{
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl(camera->fd, VIDIOC_STREAMOFF, &type) == -1)
quit("VIDIOC_STREAMOFF");
}
void camera_finish(camera_t* camera)
{
for (size_t i = 0; i < camera->buffer_count; i++) {
munmap(camera->buffers[i].start, camera->buffers[i].length);
}
free(camera->buffers);
camera->buffer_count = 0;
camera->buffers = NULL;
free(camera->head.start);
camera->head.length = 0;
camera->head.start = NULL;
}
void camera_close(camera_t* camera)
{
if (close(camera->fd) == -1) quit("close");
free(camera);
}
void h264_encoder_init(X264Encoder *encoder,int width,int height)
{
encoder->param = (x264_param_t *) malloc(sizeof(x264_param_t));
encoder->picture = (x264_picture_t *) malloc(sizeof(x264_picture_t));
x264_param_default(encoder->param);
x264_param_default_preset(encoder->param, "veryfast", "zerolatency");
encoder->param->i_width = width;
encoder->param->i_height = height;
encoder->param->rc.i_lookahead = 0; //i帧向前缓冲区
encoder->param->i_fps_num = 15;
encoder->param->i_fps_den = 1;
encoder->param->b_annexb = 1;
encoder->param->i_keyint_max=100;
encoder->param->i_keyint_min=90;
encoder->param->i_bframe=3;
encoder->param->b_repeat_headers=1;
x264_param_apply_profile(encoder->param, "baseline"); //使用baseline
if ((encoder->handle = x264_encoder_open(encoder->param)) == 0) {
printf("x264_encoder_open error!\n");
return;
}
x264_picture_alloc(encoder->picture, X264_CSP_I420, encoder->param->i_width,encoder->param->i_height);
encoder->picture->img.i_csp = X264_CSP_I420;
encoder->picture->img.i_plane = 3;
return;
}
static int pts_time=0;
int h264_compress_frame(X264Encoder * encoder, int type, uint8_t * in, uint8_t * out)
{
x264_picture_t pic_out;
int nNal = -1;
int result = 0;
//int i = 0;
uint8_t *p_out = out;
unsigned int i,j;
unsigned int base_h;
char *y = (char *)encoder->picture->img.plane[0];
char *u = (char *)encoder->picture->img.plane[1];
char *v = (char *)encoder->picture->img.plane[2];
int is_y = 1, is_u = 1;
int y_index = 0, u_index = 0, v_index = 0;
int yuv422_length = 2 * encoder->param->i_width * encoder->param->i_height;
printf("%s %d\n",__FUNCTION__,__LINE__);
for(i=0; i<yuv422_length; i+=2){
*(y+y_index) = *(in+i);
y_index++;
}
printf("%s %d\n",__FUNCTION__,__LINE__);
for(i=0; i<encoder->param->i_height; i+=2){
base_h = i*encoder->param->i_width*2;
for(j=base_h+1; j<base_h+encoder->param->i_width*2; j+=2){
if(is_u){
*(u+u_index) = *(in+j);
u_index++;
is_u = 0;
}
else{
*(v+v_index) = *(in+j);
v_index++;
is_u = 1;
}
}
}
printf("%s %d\n",__FUNCTION__,__LINE__);
switch (type) {
case 0:
encoder->picture->i_type = X264_TYPE_P;
break;
case 1:
encoder->picture->i_type = X264_TYPE_IDR;
break;
case 2:
encoder->picture->i_type = X264_TYPE_I;
break;
default:
encoder->picture->i_type = X264_TYPE_AUTO;
break;
}
printf("%s %d\n",__FUNCTION__,__LINE__);
encoder->picture->i_pts=pts_time;
if (x264_encoder_encode(encoder->handle, &(encoder->nal), &nNal, encoder->picture,&pic_out) < 0) {
printf("x264_encoder_encode error,type:%08x!\n",encoder->picture->img.i_csp);
return -1;
}
printf("%s %d\n",__FUNCTION__,__LINE__);
for (i = 0; i < nNal; i++) {
printf("%s %d\n",__FUNCTION__,__LINE__);
memcpy(p_out, encoder->nal[i].p_payload, encoder->nal[i].i_payload);
printf("%s %d\n",__FUNCTION__,__LINE__);
p_out += encoder->nal[i].i_payload;
result += encoder->nal[i].i_payload;
}
printf("%s %d\n",__FUNCTION__,__LINE__);
pts_time=pts_time+1;
printf("%s %d\n",__FUNCTION__,__LINE__);
return result;
}
ofstream f("binary.h264",ios::binary);
static void h264_encode_frame(camera_t* cam, uint8_t * yuv_frame,
size_t yuv_length)
{
int encLength = 0;
printf("%s %d\n",__FUNCTION__,__LINE__);
encLength = h264_compress_frame(&cam->encoder, -1, yuv_frame, cam->h264_buf);
cam->encodedLength=encLength;
f.write((char*)cam->h264_buf,cam->encodedLength);
return;
}
snd_pcm_t *captureHandle;
int retValue;
char *readBuffer;
snd_pcm_uframes_t frames=1024;
ofstream a("binary.pcm",ios::binary);
int camera_capture(camera_t* camera)
{
struct v4l2_buffer buf;
memset(&buf, 0, sizeof buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
printf("%s %d\n",__FUNCTION__,__LINE__);
if (xioctl(camera->fd, VIDIOC_DQBUF, &buf) == -1) return FALSE;
printf("%s %d\n",__FUNCTION__,__LINE__);
memcpy(camera->head.start, camera->buffers[buf.index].start, buf.bytesused);
camera->head.length = buf.bytesused;
printf("%s %d\n",__FUNCTION__,__LINE__);
if (xioctl(camera->fd, VIDIOC_QBUF, &buf) == -1) return FALSE;
printf("%s %d\n",__FUNCTION__,__LINE__);
// h264_encode_frame(camera,camera->head.start,camera->head.length);
return TRUE;
}
int camera_record(camera_t* camera)
{
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl(camera->fd, VIDIOC_STREAMON, &type) < 0) {
printf("VIDIOC_STREAMON\n");
return TRUE;
}
return FALSE;
}
int camera_stop_record(camera_t* camera)
{
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl(camera->fd, VIDIOC_STREAMOFF, &type) < 0) {
printf("VIDIOC_STREAMOFF\n");
return TRUE;
}
return FALSE;
}
int camera_frame(camera_t* camera, struct timeval timeout) {
fd_set fds;
FD_ZERO(&fds);
FD_SET(camera->fd, &fds);
int r = select(camera->fd + 1, &fds, 0, 0, &timeout);
if (r == -1) quit("select");
if (r == 0) return FALSE;
return camera_capture(camera);
}
void jpeg(FILE* dest, uint8_t* rgb, uint32_t width, uint32_t height, int quality)
{
JSAMPARRAY image;
image = (JSAMPARRAY)calloc(height, sizeof (JSAMPROW));
for (size_t i = 0; i < height; i++) {
image[i] = (JSAMPROW)calloc(width * 3, sizeof (JSAMPLE));
for (size_t j = 0; j < width; j++) {
image[i][j * 3 + 0] = rgb[(i * width + j) * 3 + 0];
image[i][j * 3 + 1] = rgb[(i * width + j) * 3 + 1];
image[i][j * 3 + 2] = rgb[(i * width + j) * 3 + 2];
}
}
struct jpeg_compress_struct compress;
struct jpeg_error_mgr error;
compress.err = jpeg_std_error(&error);
jpeg_create_compress(&compress);
jpeg_stdio_dest(&compress, dest);
compress.image_width = width;
compress.image_height = height;
compress.input_components = 3;
compress.in_color_space = JCS_RGB;
jpeg_set_defaults(&compress);
jpeg_set_quality(&compress, quality, TRUE);
jpeg_start_compress(&compress, TRUE);
jpeg_write_scanlines(&compress, image, height);
jpeg_finish_compress(&compress);
jpeg_destroy_compress(&compress);
for (size_t i = 0; i < height; i++) {
free(image[i]);
}
free(image);
}
int minmax(int min, int v, int max)
{
return (v < min) ? min : (max < v) ? max : v;
}
uint8_t* yuyv2rgb(uint8_t* yuyv, uint32_t width, uint32_t height)
{
uint8_t* rgb = (uint8_t*)calloc(width * height * 3, sizeof (uint8_t));
for (size_t i = 0; i < height; i++) {
for (size_t j = 0; j < width; j += 2) {
size_t index = i * width + j;
int y0 = yuyv[index * 2 + 0] << 8;
int u = yuyv[index * 2 + 1] - 128;
int y1 = yuyv[index * 2 + 2] << 8;
int v = yuyv[index * 2 + 3] - 128;
rgb[index * 3 + 0] = minmax(0, (y0 + 359 * v) >> 8, 255);
rgb[index * 3 + 1] = minmax(0, (y0 + 88 * v - 183 * u) >> 8, 255);
rgb[index * 3 + 2] = minmax(0, (y0 + 454 * u) >> 8, 255);
rgb[index * 3 + 3] = minmax(0, (y1 + 359 * v) >> 8, 255);
rgb[index * 3 + 4] = minmax(0, (y1 + 88 * v - 183 * u) >> 8, 255);
rgb[index * 3 + 5] = minmax(0, (y1 + 454 * u) >> 8, 255);
}
}
return rgb;
}
int detachThreadCreate(pthread_t *thread, void * start_routine, void *arg)
{
pthread_attr_t attr;
pthread_t thread_t;
int ret = 0;
if(thread==NULL){
thread = &thread_t;
}
//初始化线程的属性
if(pthread_attr_init(&attr))
{
printf("pthread_attr_init fail!\n");
return -1;
}
//设置线程detachstate属性。该表示新线程是否与进程中其他线程脱离同步
if(pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED))
{//新线程不能用pthread_join()来同步,且在退出时自行释放所占用的资源。
printf("pthread_attr_setdetachstate fail!\n");
goto error;
}
ret = pthread_create(thread, &attr, (void *(*)(void *))start_routine, arg);
if(ret < 0){
printf("pthread_create fail!\n");
goto error;
}
//将状态改为unjoinable状态,确保资源的释放。
ret = pthread_detach(thread_t);
error:
pthread_attr_destroy(&attr);
return ret;
}
void MediaPlayer::onRecvFrame( char *data, int width, int height)
{
QMutexLocker locker(&m_mutex);
//QImage::Format_RGB888等于24
const int imageSize = (((width * 24 + 31) >> 5) << 2) * height;
uint8_t *dataTmp = new uint8_t[imageSize];
//将数据拷贝到一个临时内存中,因为OnRecvFrame是异步执行的,data可能已经无效。
try
{
memcpy(dataTmp, data, imageSize);
}
catch (...)
{
delete[] dataTmp;
return;
}
//转化rgb数据为可显示的图像对象。
QImage image = QImage(dataTmp, width, height, QImage::Format_RGB888);
if (image.isNull())
{
qDebug()<<"Receive frame error, width:%d, height:%d."<<width<<height;
return;
}
m_image = image.copy(0, 0, width, height);
delete[] dataTmp;
update();
}
void updateVideo2GUI(void* rgbData,unsigned long userData,int width,int height)
{
MediaPlayer* window=(MediaPlayer*)userData;
if(window != NULL){
window->onRecvFrame((char*)rgbData, width, height);
}
}
void MediaPlayer::PaintImage(QPainter &painter)
{
QMutexLocker locker(&m_mutex);
int imageWidth = m_image.width();
int imageHeight = m_image.height();
QRect m_imageRect=QRect(0,0,imageWidth,imageHeight);
painter.drawImage(m_imageRect, m_image);
}
void MediaPlayer::paintEvent(QPaintEvent *)
{
QPainter painter(this);
if (!m_image.isNull())
{
PaintImage(painter);
}
}
void* doRecordH264Thread(void* arg)
{
MediaPlayer* pMediaPlayer=(MediaPlayer*)arg;
if(pMediaPlayer!=NULL){
// camClient->doLoginAuthentication();
qDebug()<<"doRecordH264Thread";
camera_t* camera = camera_open("/dev/video0", 640, 480);
qDebug()<<"camera_open\n";
camera_init(camera);
qDebug()<<"camera_init\n";
camera_start(camera);
h264_encoder_init(&camera->encoder,camera->width,camera->height);
camera->h264_buf = (unsigned char*)malloc(sizeof(uint8_t) * camera->width * camera->height * 4);//如果未分配空间则会内存出错
memset(camera->h264_buf,0,sizeof(uint8_t) * camera->width * camera->height * 4);
struct timeval timeout;
timeout.tv_sec = 1;
timeout.tv_usec = 0;
printf("%s %d\n",__FUNCTION__,__LINE__);
/* skip 5 frames for booting a cam */
for (int i = 0; i < 5; i++) {
printf("%s %d\n",__FUNCTION__,__LINE__);
camera_frame(camera, timeout);
printf("%s %d\n",__FUNCTION__,__LINE__);
}
#if 1
while (1)
{
/* code */
camera_frame(camera, timeout);
unsigned char* rgb =
yuyv2rgb(camera->head.start, camera->width, camera->height);
updateVideo2GUI((void*)rgb,(unsigned long)pMediaPlayer,camera->width,camera->height);
printf("%s %d\n",__FUNCTION__,__LINE__);
}
#endif
#if 0
unsigned char* rgb =
yuyv2rgb(camera->head.start, camera->width, camera->height);
FILE* out = fopen("result.jpg", "w");
jpeg(out, rgb, camera->width, camera->height, 100);
updateVideo2GUI((void*)rgb,pMediaPlayer);
fclose(out);
free(rgb);
#endif
printf("%s %d\n",__FUNCTION__,__LINE__);
camera_stop(camera);
camera_finish(camera);
camera_close(camera);
}
return NULL;
}
MediaPlayer::MediaPlayer(QWidget *parent)
: QMainWindow(parent)
{
detachThreadCreate(NULL,(void*)doRecordH264Thread,(void *)this);
// return 0;
}
MediaPlayer::~MediaPlayer()
{
}