因为倒霉疫情已经在家呆了三个月了,项目需要设计一个可以直播的网络的设计,再方案的计划上,一开始就花费了大量的时间,现在终于开始有一些突破,虽然因为UDP的丢包问题很严重,仍然出现了很多的问题,准备马上使用TCP重新尝试一下。此外在信号和槽的使用上在嵌入式平台上还是存在很严重的问题,因此好像还是只能进行单向的数据传输。
NetCamera.pro
#-------------------------------------------------
#
# Project created by QtCreator 2020-03-01T18:07:39
#
#-------------------------------------------------
QT += core network
QT -= gui
TARGET = NetCamera
CONFIG += console
CONFIG -= app_bundle
TEMPLATE = app
#ARM
INCLUDEPATH +=/home/swann/s5p6818/opencv-2.4.9-6818/include/opencv
INCLUDEPATH +=/home/swann/s5p6818/opencv-2.4.9-6818/include/opencv2
INCLUDEPATH +=/home/swann/s5p6818/opencv-2.4.9-6818/include
LIBS += /usr/lib/libopencv_highgui.so \
/usr/lib/libopencv_core.so \
/usr/lib/libopencv_objdetect.so \
/usr/lib/libopencv_imgproc.so
SOURCES += main.cpp \
Camera/Camera.cpp \
Camera/Udpcam.cpp
HEADERS += \
Camera/camera.h \
Camera/videodev2_samsung.h \
Camera/Udpcam.h
USB摄像头采集
camera.h
#ifndef CAMERA_H
#define CAMERA_H
#include
#define CLEAR(x) memset (&(x), 0, sizeof (x))
#define CAPTURE_BUFFER_NUMBER 1
typedef enum {
IO_METHOD_READ, IO_METHOD_MMAP, IO_METHOD_USERPTR,
} io_method;
class Camera
{
public:
Camera(char *DEV_NAME,int w,int h);//构造函数
~Camera();
bool OpenDevice();//打开设备
void CloseDevice();//关闭设备
bool GetBuffer(unsigned char *image);//得到一幅图像
unsigned int getImageSize(); //图像大小
void DecodeYUV420SP(unsigned int* rgbBuf, unsigned char* yuv420sp, int width, int height);
void video_yuyv_to_rgb24(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height);
private:
char dev_name[50];
io_method io;
int fd;
bool sign3;
FILE * yuv_fd; //将捕捉的图像保存为文件
int width;
int height;
struct { void * start; int length; } buffers[CAPTURE_BUFFER_NUMBER];
struct buffer
{
void * start;
size_t length;
};
unsigned int cap_image_size ;
bool init_device(void);
bool init_mmap(void);
void uninit_device(void);
bool start_capturing(void);//开始捕捉
void stop_capturing(void);//停止捕捉
void mainloop(unsigned char *image);
int read_frame(unsigned char *image);//读取一帧
void close_device(void);
bool open_device(void);
bool process_image(unsigned char *image,const void * p);
int yuvtorgb( int y, int u, int v);
void errno_exit(const char * s);
int xioctl(int fd, int request, void * arg);
};
#endif // CAMERA_H
为了减少平台依赖,添加一个头文件
videodev2_samsung.h
/*
* Video for Linux Two header file for samsung
*
* Copyright (C) 2009, Samsung Electronics
*
* This header file contains several v4l2 APIs to be proposed to v4l2
* community and until bein accepted, will be used restrictly in Samsung's
* camera interface driver FIMC.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* Alternatively, Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef __LINUX_VIDEODEV2_SAMSUNG_H
#define __LINUX_VIDEODEV2_SAMSUNG_H
/* Values for 'capabilities' field */
/* Object detection device */
#define V4L2_CAP_OBJ_RECOGNITION 0x10000000
/* strobe control */
#define V4L2_CAP_STROBE 0x20000000
#define V4L2_CID_FOCUS_MODE (V4L2_CID_CAMERA_CLASS_BASE + 17)
/* Focus Methods */
enum v4l2_focus_mode {
V4L2_FOCUS_MODE_AUTO = 0,
V4L2_FOCUS_MODE_MACRO = 1,
V4L2_FOCUS_MODE_MANUAL = 2,
V4L2_FOCUS_MODE_LASTP = 2,
};
#define V4L2_CID_ZOOM_MODE (V4L2_CID_CAMERA_CLASS_BASE + 18)
/* Zoom Methods */
enum v4l2_zoom_mode {
V4L2_ZOOM_MODE_CONTINUOUS = 0,
V4L2_ZOOM_MODE_OPTICAL = 1,
V4L2_ZOOM_MODE_DIGITAL = 2,
V4L2_ZOOM_MODE_LASTP = 2,
};
/* Exposure Methods */
#define V4L2_CID_PHOTOMETRY (V4L2_CID_CAMERA_CLASS_BASE + 19)
enum v4l2_photometry_mode {
V4L2_PHOTOMETRY_MULTISEG = 0, /*Multi Segment */
V4L2_PHOTOMETRY_CWA = 1, /*Centre Weighted Average */
V4L2_PHOTOMETRY_SPOT = 2,
V4L2_PHOTOMETRY_AFSPOT = 3, /*Spot metering on focused point */
V4L2_PHOTOMETRY_LASTP = V4L2_PHOTOMETRY_AFSPOT,
};
/* Manual exposure control items menu type: iris, shutter, iso */
#define V4L2_CID_CAM_APERTURE (V4L2_CID_CAMERA_CLASS_BASE + 20)
#define V4L2_CID_CAM_SHUTTER (V4L2_CID_CAMERA_CLASS_BASE + 21)
#define V4L2_CID_CAM_ISO (V4L2_CID_CAMERA_CLASS_BASE + 22)
/* Following CIDs are menu type */
#define V4L2_CID_SCENEMODE (V4L2_CID_CAMERA_CLASS_BASE + 23)
#define V4L2_CID_CAM_STABILIZE (V4L2_CID_CAMERA_CLASS_BASE + 24)
#define V4L2_CID_CAM_MULTISHOT (V4L2_CID_CAMERA_CLASS_BASE + 25)
/* Control dynamic range */
#define V4L2_CID_CAM_DR (V4L2_CID_CAMERA_CLASS_BASE + 26)
/* White balance preset control */
#define V4L2_CID_WHITE_BALANCE_PRESET (V4L2_CID_CAMERA_CLASS_BASE + 27)
/* CID extensions */
#define V4L2_CID_ROTATION (V4L2_CID_PRIVATE_BASE + 0)
#define V4L2_CID_PADDR_Y (V4L2_CID_PRIVATE_BASE + 1)
#define V4L2_CID_PADDR_CB (V4L2_CID_PRIVATE_BASE + 2)
#define V4L2_CID_PADDR_CR (V4L2_CID_PRIVATE_BASE + 3)
#define V4L2_CID_PADDR_CBCR (V4L2_CID_PRIVATE_BASE + 4)
#define V4L2_CID_OVERLAY_AUTO (V4L2_CID_PRIVATE_BASE + 5)
#define V4L2_CID_OVERLAY_VADDR0 (V4L2_CID_PRIVATE_BASE + 6)
#define V4L2_CID_OVERLAY_VADDR1 (V4L2_CID_PRIVATE_BASE + 7)
#define V4L2_CID_OVERLAY_VADDR2 (V4L2_CID_PRIVATE_BASE + 8)
#define V4L2_CID_OVLY_MODE (V4L2_CID_PRIVATE_BASE + 9)
#define V4L2_CID_DST_INFO (V4L2_CID_PRIVATE_BASE + 10)
#define V4L2_CID_IMAGE_EFFECT_FN (V4L2_CID_PRIVATE_BASE + 16)
#define V4L2_CID_IMAGE_EFFECT_APPLY (V4L2_CID_PRIVATE_BASE + 17)
#define V4L2_CID_IMAGE_EFFECT_CB (V4L2_CID_PRIVATE_BASE + 18)
#define V4L2_CID_IMAGE_EFFECT_CR (V4L2_CID_PRIVATE_BASE + 19)
#define V4L2_CID_RESERVED_MEM_BASE_ADDR (V4L2_CID_PRIVATE_BASE + 20)
#define V4L2_CID_FIMC_VERSION (V4L2_CID_PRIVATE_BASE + 21)
#define V4L2_CID_STREAM_PAUSE (V4L2_CID_PRIVATE_BASE + 53)
/* CID Extensions for camera sensor operations */
#define V4L2_CID_CAM_PREVIEW_ONOFF (V4L2_CID_PRIVATE_BASE + 64)
#define V4L2_CID_CAM_CAPTURE (V4L2_CID_PRIVATE_BASE + 65)
#define V4L2_CID_CAM_JPEG_MEMSIZE (V4L2_CID_PRIVATE_BASE + 66)
#define V4L2_CID_CAM_DATE_INFO_YEAR (V4L2_CID_PRIVATE_BASE + 14)
#define V4L2_CID_CAM_DATE_INFO_MONTH (V4L2_CID_PRIVATE_BASE + 15)
#define V4L2_CID_CAM_DATE_INFO_DATE (V4L2_CID_PRIVATE_BASE + 22)
#define V4L2_CID_CAM_SENSOR_VER (V4L2_CID_PRIVATE_BASE + 23)
#define V4L2_CID_CAM_FW_MINOR_VER (V4L2_CID_PRIVATE_BASE + 24)
#define V4L2_CID_CAM_FW_MAJOR_VER (V4L2_CID_PRIVATE_BASE + 25)
#define V4L2_CID_CAM_PRM_MINOR_VER (V4L2_CID_PRIVATE_BASE + 26)
#define V4L2_CID_CAM_PRM_MAJOR_VER (V4L2_CID_PRIVATE_BASE + 27)
#define V4L2_CID_CAM_FW_VER (V4L2_CID_PRIVATE_BASE + 28)
#define V4L2_CID_CAM_SET_FW_ADDR (V4L2_CID_PRIVATE_BASE + 29)
#define V4L2_CID_CAM_SET_FW_SIZE (V4L2_CID_PRIVATE_BASE + 30)
#define V4L2_CID_CAM_UPDATE_FW (V4L2_CID_PRIVATE_BASE + 31)
#define V4L2_CID_CAM_JPEG_MAIN_SIZE (V4L2_CID_PRIVATE_BASE + 32)
#define V4L2_CID_CAM_JPEG_MAIN_OFFSET (V4L2_CID_PRIVATE_BASE + 33)
#define V4L2_CID_CAM_JPEG_THUMB_SIZE (V4L2_CID_PRIVATE_BASE + 34)
#define V4L2_CID_CAM_JPEG_THUMB_OFFSET (V4L2_CID_PRIVATE_BASE + 35)
#define V4L2_CID_CAM_JPEG_POSTVIEW_OFFSET (V4L2_CID_PRIVATE_BASE + 36)
#define V4L2_CID_CAM_JPEG_QUALITY (V4L2_CID_PRIVATE_BASE + 37)
#define V4L2_CID_CAM_SENSOR_MAKER (V4L2_CID_PRIVATE_BASE + 38)
#define V4L2_CID_CAM_SENSOR_OPTICAL (V4L2_CID_PRIVATE_BASE + 39)
#define V4L2_CID_CAM_AF_VER_LOW (V4L2_CID_PRIVATE_BASE + 40)
#define V4L2_CID_CAM_AF_VER_HIGH (V4L2_CID_PRIVATE_BASE + 41)
#define V4L2_CID_CAM_GAMMA_RG_LOW (V4L2_CID_PRIVATE_BASE + 42)
#define V4L2_CID_CAM_GAMMA_RG_HIGH (V4L2_CID_PRIVATE_BASE + 43)
#define V4L2_CID_CAM_GAMMA_BG_LOW (V4L2_CID_PRIVATE_BASE + 44)
#define V4L2_CID_CAM_GAMMA_BG_HIGH (V4L2_CID_PRIVATE_BASE + 45)
#define V4L2_CID_CAM_DUMP_FW (V4L2_CID_PRIVATE_BASE + 46)
#define V4L2_CID_CAM_GET_DUMP_SIZE (V4L2_CID_PRIVATE_BASE + 47)
#define V4L2_CID_CAMERA_VT_MODE (V4L2_CID_PRIVATE_BASE + 48)
#define V4L2_CID_CAMERA_VGA_BLUR (V4L2_CID_PRIVATE_BASE + 49)
#define V4L2_CID_CAMERA_CAPTURE (V4L2_CID_PRIVATE_BASE + 50)
#define V4L2_CID_MAIN_SW_DATE_INFO_YEAR (V4L2_CID_PRIVATE_BASE + 54)
#define V4L2_CID_MAIN_SW_DATE_INFO_MONTH (V4L2_CID_PRIVATE_BASE + 55)
#define V4L2_CID_MAIN_SW_DATE_INFO_DATE (V4L2_CID_PRIVATE_BASE + 56)
#define V4L2_CID_MAIN_SW_FW_MINOR_VER (V4L2_CID_PRIVATE_BASE + 57)
#define V4L2_CID_MAIN_SW_FW_MAJOR_VER (V4L2_CID_PRIVATE_BASE + 58)
#define V4L2_CID_MAIN_SW_PRM_MINOR_VER (V4L2_CID_PRIVATE_BASE + 59)
#define V4L2_CID_MAIN_SW_PRM_MAJOR_VER (V4L2_CID_PRIVATE_BASE + 60)
enum v4l2_blur {
BLUR_LEVEL_0 = 0,
BLUR_LEVEL_1,
BLUR_LEVEL_2,
BLUR_LEVEL_3,
BLUR_LEVEL_MAX,
};
#define V4L2_CID_CAMERA_SCENE_MODE (V4L2_CID_PRIVATE_BASE + 70)
enum v4l2_scene_mode {
SCENE_MODE_BASE,
SCENE_MODE_NONE,
SCENE_MODE_PORTRAIT,
SCENE_MODE_NIGHTSHOT,
SCENE_MODE_BACK_LIGHT,
SCENE_MODE_LANDSCAPE,
SCENE_MODE_SPORTS,
SCENE_MODE_PARTY_INDOOR,
SCENE_MODE_BEACH_SNOW,
SCENE_MODE_SUNSET,
SCENE_MODE_DUST_DAWN,
SCENE_MODE_FALL_COLOR,
SCENE_MODE_FIREWORKS,
SCENE_MODE_TEXT,
SCENE_MODE_CANDLE_LIGHT,
SCENE_MODE_MAX,
};
#define V4L2_CID_CAMERA_FLASH_MODE (V4L2_CID_PRIVATE_BASE + 71)
enum v4l2_flash_mode {
FLASH_MODE_BASE,
FLASH_MODE_OFF,
FLASH_MODE_AUTO,
FLASH_MODE_ON,
FLASH_MODE_TORCH,
FLASH_MODE_MAX,
};
#define V4L2_CID_CAMERA_BRIGHTNESS (V4L2_CID_PRIVATE_BASE + 72)
enum v4l2_ev_mode {
EV_MINUS_4 = 0,
EV_MINUS_3,
EV_MINUS_2,
EV_MINUS_1,
EV_DEFAULT,
EV_PLUS_1,
EV_PLUS_2,
EV_PLUS_3,
EV_PLUS_4,
EV_MAX,
};
#define V4L2_CID_CAMERA_WHITE_BALANCE (V4L2_CID_PRIVATE_BASE + 73)
enum v4l2_wb_mode {
WHITE_BALANCE_BASE = 0,
WHITE_BALANCE_AUTO,
WHITE_BALANCE_SUNNY,
WHITE_BALANCE_CLOUDY,
WHITE_BALANCE_TUNGSTEN,
WHITE_BALANCE_FLUORESCENT,
WHITE_BALANCE_MAX,
};
#define V4L2_CID_CAMERA_EFFECT (V4L2_CID_PRIVATE_BASE + 74)
enum v4l2_effect_mode {
IMAGE_EFFECT_BASE = 0,
IMAGE_EFFECT_NONE,
IMAGE_EFFECT_BNW,
IMAGE_EFFECT_SEPIA,
IMAGE_EFFECT_AQUA,
IMAGE_EFFECT_ANTIQUE,
IMAGE_EFFECT_NEGATIVE,
IMAGE_EFFECT_SHARPEN,
IMAGE_EFFECT_MAX,
};
#define V4L2_CID_CAMERA_ISO (V4L2_CID_PRIVATE_BASE + 75)
enum v4l2_iso_mode {
ISO_AUTO = 0,
ISO_50,
ISO_100,
ISO_200,
ISO_400,
ISO_800,
ISO_1600,
ISO_SPORTS,
ISO_NIGHT,
ISO_MOVIE,
ISO_MAX,
};
#define V4L2_CID_CAMERA_METERING (V4L2_CID_PRIVATE_BASE + 76)
enum v4l2_metering_mode {
METERING_BASE = 0,
METERING_MATRIX,
METERING_CENTER,
METERING_SPOT,
METERING_MAX,
};
#define V4L2_CID_CAMERA_CONTRAST (V4L2_CID_PRIVATE_BASE + 77)
enum v4l2_contrast_mode {
CONTRAST_MINUS_2 = 0,
CONTRAST_MINUS_1,
CONTRAST_DEFAULT,
CONTRAST_PLUS_1,
CONTRAST_PLUS_2,
CONTRAST_MAX,
};
#define V4L2_CID_CAMERA_SATURATION (V4L2_CID_PRIVATE_BASE + 78)
enum v4l2_saturation_mode {
SATURATION_MINUS_2 = 0,
SATURATION_MINUS_1,
SATURATION_DEFAULT,
SATURATION_PLUS_1,
SATURATION_PLUS_2,
SATURATION_MAX,
};
#define V4L2_CID_CAMERA_SHARPNESS (V4L2_CID_PRIVATE_BASE + 79)
enum v4l2_sharpness_mode {
SHARPNESS_MINUS_2 = 0,
SHARPNESS_MINUS_1,
SHARPNESS_DEFAULT,
SHARPNESS_PLUS_1,
SHARPNESS_PLUS_2,
SHARPNESS_MAX,
};
#define V4L2_CID_CAMERA_WDR (V4L2_CID_PRIVATE_BASE + 80)
enum v4l2_wdr_mode {
WDR_OFF,
WDR_ON,
WDR_MAX,
};
#define V4L2_CID_CAMERA_ANTI_SHAKE (V4L2_CID_PRIVATE_BASE + 81)
enum v4l2_anti_shake_mode {
ANTI_SHAKE_OFF,
ANTI_SHAKE_STILL_ON,
ANTI_SHAKE_MOVIE_ON,
ANTI_SHAKE_MAX,
};
#define V4L2_CID_CAMERA_TOUCH_AF_START_STOP (V4L2_CID_PRIVATE_BASE + 82)
enum v4l2_touch_af {
TOUCH_AF_STOP = 0,
TOUCH_AF_START,
TOUCH_AF_MAX,
};
#define V4L2_CID_CAMERA_SMART_AUTO (V4L2_CID_PRIVATE_BASE + 83)
enum v4l2_smart_auto {
SMART_AUTO_OFF = 0,
SMART_AUTO_ON,
SMART_AUTO_MAX,
};
#define V4L2_CID_CAMERA_VINTAGE_MODE (V4L2_CID_PRIVATE_BASE + 84)
enum v4l2_vintage_mode {
VINTAGE_MODE_BASE,
VINTAGE_MODE_OFF,
VINTAGE_MODE_NORMAL,
VINTAGE_MODE_WARM,
VINTAGE_MODE_COOL,
VINTAGE_MODE_BNW,
VINTAGE_MODE_MAX,
};
#define V4L2_CID_CAMERA_JPEG_QUALITY (V4L2_CID_PRIVATE_BASE + 85)
#define V4L2_CID_CAMERA_GPS_LATITUDE (V4L2_CID_CAMERA_CLASS_BASE + 30)
#define V4L2_CID_CAMERA_GPS_LONGITUDE (V4L2_CID_CAMERA_CLASS_BASE + 31)
#define V4L2_CID_CAMERA_GPS_TIMESTAMP (V4L2_CID_CAMERA_CLASS_BASE + 32)
#define V4L2_CID_CAMERA_GPS_ALTITUDE (V4L2_CID_CAMERA_CLASS_BASE + 33)
#define V4L2_CID_CAMERA_EXIF_TIME_INFO (V4L2_CID_CAMERA_CLASS_BASE + 34)
#define V4L2_CID_CAMERA_ZOOM (V4L2_CID_PRIVATE_BASE + 90)
enum v4l2_zoom_level {
ZOOM_LEVEL_0 = 0,
ZOOM_LEVEL_1,
ZOOM_LEVEL_2,
ZOOM_LEVEL_3,
ZOOM_LEVEL_4,
ZOOM_LEVEL_5,
ZOOM_LEVEL_6,
ZOOM_LEVEL_7,
ZOOM_LEVEL_8,
ZOOM_LEVEL_9,
ZOOM_LEVEL_10,
ZOOM_LEVEL_11,
ZOOM_LEVEL_12,
ZOOM_LEVEL_MAX,
};
#define V4L2_CID_CAMERA_FACE_DETECTION (V4L2_CID_PRIVATE_BASE + 91)
enum v4l2_face_detection {
FACE_DETECTION_OFF = 0,
FACE_DETECTION_ON,
FACE_DETECTION_NOLINE,
FACE_DETECTION_ON_BEAUTY,
FACE_DETECTION_MAX,
};
#define V4L2_CID_CAMERA_SMART_AUTO_STATUS (V4L2_CID_PRIVATE_BASE + 92)
enum v4l2_smart_auto_status {
SMART_AUTO_STATUS_AUTO = 0,
SMART_AUTO_STATUS_LANDSCAPE,
SMART_AUTO_STATUS_PORTRAIT,
SMART_AUTO_STATUS_MACRO,
SMART_AUTO_STATUS_NIGHT,
SMART_AUTO_STATUS_PORTRAIT_NIGHT,
SMART_AUTO_STATUS_BACKLIT,
SMART_AUTO_STATUS_PORTRAIT_BACKLIT,
SMART_AUTO_STATUS_ANTISHAKE,
SMART_AUTO_STATUS_PORTRAIT_ANTISHAKE,
SMART_AUTO_STATUS_MAX,
};
#define V4L2_CID_CAMERA_SET_AUTO_FOCUS (V4L2_CID_PRIVATE_BASE + 93)
enum v4l2_auto_focus {
AUTO_FOCUS_OFF = 0,
AUTO_FOCUS_ON,
AUTO_FOCUS_MAX,
};
#define V4L2_CID_CAMERA_BEAUTY_SHOT (V4L2_CID_PRIVATE_BASE + 94)
enum v4l2_beauty_shot {
BEAUTY_SHOT_OFF = 0,
BEAUTY_SHOT_ON,
BEAUTY_SHOT_MAX,
};
#define V4L2_CID_CAMERA_AEAWB_LOCK_UNLOCK (V4L2_CID_PRIVATE_BASE + 95)
enum v4l2_ae_awb_lockunlock {
AE_UNLOCK_AWB_UNLOCK = 0,
AE_LOCK_AWB_UNLOCK,
AE_UNLOCK_AWB_LOCK,
AE_LOCK_AWB_LOCK,
AE_AWB_MAX
};
#define V4L2_CID_CAMERA_FACEDETECT_LOCKUNLOCK (V4L2_CID_PRIVATE_BASE + 96)
enum v4l2_face_lock {
FACE_LOCK_OFF = 0,
FACE_LOCK_ON,
FIRST_FACE_TRACKING,
FACE_LOCK_MAX,
};
#define V4L2_CID_CAMERA_OBJECT_POSITION_X (V4L2_CID_PRIVATE_BASE + 97)
#define V4L2_CID_CAMERA_OBJECT_POSITION_Y (V4L2_CID_PRIVATE_BASE + 98)
#define V4L2_CID_CAMERA_FOCUS_MODE (V4L2_CID_PRIVATE_BASE + 99)
enum v4l2_focusmode {
FOCUS_MODE_AUTO = 0,
FOCUS_MODE_MACRO,
FOCUS_MODE_FACEDETECT,
FOCUS_MODE_AUTO_DEFAULT,
FOCUS_MODE_MACRO_DEFAULT,
FOCUS_MODE_FACEDETECT_DEFAULT,
FOCUS_MODE_INFINITY,
FOCUS_MODE_MAX,
};
#define V4L2_CID_CAMERA_OBJ_TRACKING_STATUS (V4L2_CID_PRIVATE_BASE + 100)
enum v4l2_obj_tracking_status {
OBJECT_TRACKING_STATUS_BASE,
OBJECT_TRACKING_STATUS_PROGRESSING,
OBJECT_TRACKING_STATUS_SUCCESS,
OBJECT_TRACKING_STATUS_FAIL,
OBJECT_TRACKING_STATUS_MISSING,
OBJECT_TRACKING_STATUS_MAX,
};
#define V4L2_CID_CAMERA_OBJ_TRACKING_START_STOP (V4L2_CID_PRIVATE_BASE + 101)
enum v4l2_ot_start_stop {
OT_STOP = 0,
OT_START,
OT_MAX,
};
#define V4L2_CID_CAMERA_CAF_START_STOP (V4L2_CID_PRIVATE_BASE + 102)
enum v4l2_caf_start_stop {
CAF_STOP = 0,
CAF_START,
CAF_MAX,
};
#define V4L2_CID_CAMERA_AUTO_FOCUS_RESULT (V4L2_CID_PRIVATE_BASE + 103)
#define V4L2_CID_CAMERA_FRAME_RATE (V4L2_CID_PRIVATE_BASE + 104)
enum v4l2_frame_rate {
FRAME_RATE_AUTO = 0,
FRAME_RATE_7 = 7,
FRAME_RATE_15 = 15,
FRAME_RATE_30 = 30,
FRAME_RATE_60 = 60,
FRAME_RATE_120 = 120,
FRAME_RATE_MAX
};
#define V4L2_CID_CAMERA_ANTI_BANDING (V4L2_CID_PRIVATE_BASE + 105)
enum v4l2_anti_banding {
ANTI_BANDING_AUTO = 0,
ANTI_BANDING_50HZ = 1,
ANTI_BANDING_60HZ = 2,
ANTI_BANDING_OFF = 3,
};
#define V4L2_CID_CAMERA_SET_GAMMA (V4L2_CID_PRIVATE_BASE + 106)
enum v4l2_gamma_mode {
GAMMA_OFF = 0,
GAMMA_ON = 1,
GAMMA_MAX,
};
#define V4L2_CID_CAMERA_SET_SLOW_AE (V4L2_CID_PRIVATE_BASE + 107)
enum v4l2_slow_ae_mode {
SLOW_AE_OFF,
SLOW_AE_ON,
SLOW_AE_MAX,
};
#define V4L2_CID_CAMERA_BATCH_REFLECTION (V4L2_CID_PRIVATE_BASE + 108)
#define V4L2_CID_CAMERA_EXIF_ORIENTATION (V4L2_CID_PRIVATE_BASE + 109)
#define V4L2_CID_CAMERA_RESET (V4L2_CID_PRIVATE_BASE + 111)
#define V4L2_CID_CAMERA_CHECK_DATALINE (V4L2_CID_PRIVATE_BASE + 112)
#define V4L2_CID_CAMERA_CHECK_DATALINE_STOP (V4L2_CID_PRIVATE_BASE + 113)
#define V4L2_CID_CAMERA_GET_ISO (V4L2_CID_PRIVATE_BASE + 114)
#define V4L2_CID_CAMERA_GET_SHT_TIME (V4L2_CID_PRIVATE_BASE + 115)
#define V4L2_CID_CAMERA_SENSOR_MODE (V4L2_CID_PRIVATE_BASE + 116)
#define V4L2_CID_ESD_INT (V4L2_CID_PRIVATE_BASE + 117)
#define V4L2_CID_CAMERA_GET_FLASH_ONOFF (V4L2_CID_PRIVATE_BASE + 118)
#define V4L2_CID_CAMERA_RETURN_FOCUS (V4L2_CID_PRIVATE_BASE + 119)
/* Pixel format FOURCC depth Description */
/* 12 Y/CbCr 4:2:0 64x32 macroblocks */
#define V4L2_PIX_FMT_NV12T v4l2_fourcc('T', 'V', '1', '2')
/*
* * V4L2 extention for digital camera
* */
/* Strobe flash light */
enum v4l2_strobe_control {
/* turn off the flash light */
V4L2_STROBE_CONTROL_OFF = 0,
/* turn on the flash light */
V4L2_STROBE_CONTROL_ON = 1,
/* act guide light before splash */
V4L2_STROBE_CONTROL_AFGUIDE = 2,
/* charge the flash light */
V4L2_STROBE_CONTROL_CHARGE = 3,
};
enum v4l2_strobe_conf {
V4L2_STROBE_OFF = 0, /* Always off */
V4L2_STROBE_ON = 1, /* Always splashes */
/* Auto control presets */
V4L2_STROBE_AUTO = 2,
V4L2_STROBE_REDEYE_REDUCTION = 3,
V4L2_STROBE_SLOW_SYNC = 4,
V4L2_STROBE_FRONT_CURTAIN = 5,
V4L2_STROBE_REAR_CURTAIN = 6,
/* Extra manual control presets */
/* keep turned on until turning off */
V4L2_STROBE_PERMANENT = 7,
V4L2_STROBE_EXTERNAL = 8,
};
enum v4l2_strobe_status {
V4L2_STROBE_STATUS_OFF = 0,
/* while processing configurations */
V4L2_STROBE_STATUS_BUSY = 1,
V4L2_STROBE_STATUS_ERR = 2,
V4L2_STROBE_STATUS_CHARGING = 3,
V4L2_STROBE_STATUS_CHARGED = 4,
};
/* capabilities field */
/* No strobe supported */
#define V4L2_STROBE_CAP_NONE 0x0000
/* Always flash off mode */
#define V4L2_STROBE_CAP_OFF 0x0001
/* Always use flash light mode */
#define V4L2_STROBE_CAP_ON 0x0002
/* Flashlight works automatic */
#define V4L2_STROBE_CAP_AUTO 0x0004
/* Red-eye reduction */
#define V4L2_STROBE_CAP_REDEYE 0x0008
/* Slow sync */
#define V4L2_STROBE_CAP_SLOWSYNC 0x0010
/* Front curtain */
#define V4L2_STROBE_CAP_FRONT_CURTAIN 0x0020
/* Rear curtain */
#define V4L2_STROBE_CAP_REAR_CURTAIN 0x0040
/* keep turned on until turning off */
#define V4L2_STROBE_CAP_PERMANENT 0x0080
/* use external strobe */
#define V4L2_STROBE_CAP_EXTERNAL 0x0100
/* Set mode and Get status */
struct v4l2_strobe {
/* off/on/charge:0/1/2 */
enum v4l2_strobe_control control;
/* supported strobe capabilities */
__u32 capabilities;
enum v4l2_strobe_conf mode;
enum v4l2_strobe_status status; /* read only */
/* default is 0 and range of value varies from each models */
__u32 flash_ev;
__u32 reserved[4];
};
#define VIDIOC_S_STROBE _IOWR('V', 83, struct v4l2_strobe)
#define VIDIOC_G_STROBE _IOR('V', 84, struct v4l2_strobe)
/* Object recognition and collateral actions */
enum v4l2_recog_mode {
V4L2_RECOGNITION_MODE_OFF = 0,
V4L2_RECOGNITION_MODE_ON = 1,
V4L2_RECOGNITION_MODE_LOCK = 2,
};
enum v4l2_recog_action {
V4L2_RECOGNITION_ACTION_NONE = 0, /* only recognition */
V4L2_RECOGNITION_ACTION_BLINK = 1, /* Capture on blinking */
V4L2_RECOGNITION_ACTION_SMILE = 2, /* Capture on smiling */
};
enum v4l2_recog_pattern {
V4L2_RECOG_PATTERN_FACE = 0, /* Face */
V4L2_RECOG_PATTERN_HUMAN = 1, /* Human */
V4L2_RECOG_PATTERN_CHAR = 2, /* Character */
};
struct v4l2_recog_rect {
enum v4l2_recog_pattern p; /* detected pattern */
struct v4l2_rect o; /* detected area */
__u32 reserved[4];
};
struct v4l2_recog_data {
__u8 detect_cnt; /* detected object counter */
struct v4l2_rect o; /* detected area */
__u32 reserved[4];
};
struct v4l2_recognition {
enum v4l2_recog_mode mode;
/* Which pattern to detect */
enum v4l2_recog_pattern pattern;
/* How many object to detect */
__u8 obj_num;
/* select detected object */
__u32 detect_idx;
/* read only :Get object coordination */
struct v4l2_recog_data data;
enum v4l2_recog_action action;
__u32 reserved[4];
};
#define VIDIOC_S_RECOGNITION _IOWR('V', 85, struct v4l2_recognition)
#define VIDIOC_G_RECOGNITION _IOR('V', 86, struct v4l2_recognition)
/* We use this struct as the v4l2_streamparm raw_data for
* VIDIOC_G_PARM and VIDIOC_S_PARM
*/
struct sec_cam_parm {
struct v4l2_captureparm capture;
int contrast;
int effects;
int brightness;
int flash_mode;
int focus_mode;
int iso;
int metering;
int saturation;
int scene_mode;
int sharpness;
int white_balance;
};
/* ------------- Input ------------------*/
// type
#define V4L2_INPUT_TYPE_MSDMA 3
#define V4L2_INPUT_TYPE_FIFO 4
// fmt
/* ------------- Output -----------------*/
// type
#define V4L2_OUTPUT_TYPE_MSDMA 4
#define V4L2_OUTPUT_TYPE_COMPOSITE 5
#define V4L2_OUTPUT_TYPE_SVIDEO 6
#define V4L2_OUTPUT_TYPE_YPBPR_INERLACED 7
#define V4L2_OUTPUT_TYPE_YPBPR_PROGRESSIVE 8
#define V4L2_OUTPUT_TYPE_RGB_PROGRESSIVE 9
#define V4L2_OUTPUT_TYPE_DIGITAL 10
#define V4L2_OUTPUT_TYPE_HDMI V4L2_OUTPUT_TYPE_DIGITAL
#define V4L2_OUTPUT_TYPE_HDMI_RGB 11
#define V4L2_OUTPUT_TYPE_DVI 12
// fmt
/* ------------- STD -------------------*/
#define V4L2_STD_PAL_BDGHI V4L2_STD_PAL_B|V4L2_STD_PAL_D|V4L2_STD_PAL_G|V4L2_STD_PAL_H|V4L2_STD_PAL_I
#define V4L2_STD_480P_60_16_9 ((v4l2_std_id)0x04000000)
#define V4L2_STD_480P_60_4_3 ((v4l2_std_id)0x05000000)
#define V4L2_STD_576P_50_16_9 ((v4l2_std_id)0x06000000)
#define V4L2_STD_576P_50_4_3 ((v4l2_std_id)0x07000000)
#define V4L2_STD_720P_60 ((v4l2_std_id)0x08000000)
#define V4L2_STD_720P_50 ((v4l2_std_id)0x09000000)
#define V4L2_STD_1080P_60 ((v4l2_std_id)0x0a000000)
#define V4L2_STD_1080P_50 ((v4l2_std_id)0x0b000000)
#define V4L2_STD_1080P_30 ((v4l2_std_id)0x12000000)
#define V4L2_STD_1080I_60 ((v4l2_std_id)0x0c000000)
#define V4L2_STD_1080I_50 ((v4l2_std_id)0x0d000000)
#define V4L2_STD_480P_59 ((v4l2_std_id)0x0e000000)
#define V4L2_STD_720P_59 ((v4l2_std_id)0x0f000000)
#define V4L2_STD_1080I_59 ((v4l2_std_id)0x10000000)
#define V4L2_STD_1080P_59 ((v4l2_std_id)0x11000000)
#define V4L2_STD_1080P_30 ((v4l2_std_id)0x12000000)
/* ------------- ext. param -------------------*/
#define V4L2_FBUF_FLAG_PRE_MULTIPLY 0x0040
#define V4L2_FBUF_CAP_PRE_MULTIPLY 0x0080
#define VIDIOC_HDCP_ENABLE _IOWR('V', 100, unsigned int) //Control HDCP flag 1: enable, 0: disable
#define VIDIOC_HDCP_STATUS _IOR('V', 101, unsigned int)
#define VIDIOC_HDCP_PROT_STATUS _IOR('V', 102, unsigned int)
#define VIDIOC_INIT_AUDIO _IOR('V', 103, unsigned int)
#define VIDIOC_AV_MUTE _IOR('V', 104, unsigned int)
#define VIDIOC_G_AVMUTE _IOR('V', 105, unsigned int)
#define VIDIOC_TV_OUT_BASE_ADDR _IOR('V', 106, unsigned int)
struct v4l2_window_s5p_tvout
{
__u32 capability;
__u32 flags;
unsigned int priority;
struct v4l2_window win;
};
struct v4l2_pix_format_s5p_tvout
{
void *base_y;
void *base_c;
unsigned short src_img_endian;
struct v4l2_pix_format pix_fmt;
};
#endif /* __LINUX_VIDEODEV2_SAMSUNG_H */
camera.cpp
#include
#include
#include
#include
#include /* getopt_long() */
#include /* low-level i/o */
#include
#include
#include
#include
#include
#include
#include
#include /* for videodev2.h */
#include
#include "camera.h"
#include "videodev2_samsung.h"
void Camera::DecodeYUV420SP(unsigned int* rgbBuf, unsigned char* yuv420sp, int width, int height) {
int frameSize = width * height;
int i = 0, y = 0;
int uvp = 0, u = 0, v = 0;
int y1192 = 0, r = 0, g = 0, b = 0;
unsigned int xrgb8888;
int xrgb8888Index = 0;
for (int j = 0, yp = 0; j < height; j++) {
uvp = frameSize + (j >> 1) * width;
u = 0;
v = 0;
for (i = 0; i < width; i++, yp++) {
y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0) y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}
y1192 = 1192 * y;
r = (y1192 + 1634 * u);
g = (y1192 - 833 * u - 400 * v);
b = (y1192 + 2066 * v);
if (r < 0) r = 0; else if (r > 262143) r = 262143;
if (g < 0) g = 0; else if (g > 262143) g = 262143;
if (b < 0) b = 0; else if (b > 262143) b = 262143;
r = (unsigned char)(r >> 10);
g = (unsigned char)(g >> 10);
b = (unsigned char)(b >> 10);
xrgb8888 = (unsigned int)((r << 16) | (g << 8) | b);
rgbBuf[xrgb8888Index++] = xrgb8888;
}
}
}
//YUYV(yuv422)转RGB
void Camera:: video_yuyv_to_rgb24(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height)
{
unsigned int in, out;
int y0, u, y1, v;
unsigned int pixel24;
unsigned char *pixel = (unsigned char *)&pixel24;
unsigned int size = width*height*2;
for (in = 0, out = 0; in < size; in += 4, out += 8)
{
y0 = yuv[in+0];
u = yuv[in+1];
y1 = yuv[in+2];
v = yuv[in+3];
sign3 = true;
pixel24 = yuvtorgb(y0, u, v);
rgb[out+0] = pixel[0]; //for QT
rgb[out+1] = pixel[1];
rgb[out+2] = pixel[2];
//rgb[out+0] = pixel[2]; //for iplimage
//rgb[out+1] = pixel[1];
//rgb[out+2] = pixel[0];
//sign3 = true;
pixel24 = yuvtorgb(y1, u, v);
rgb[out+4] = pixel[0];
rgb[out+5] = pixel[1];
rgb[out+6] = pixel[2];
//rgb[out+3] = pixel[2];
//rgb[out+4] = pixel[1];
//rgb[out+5] = pixel[0];
}
//return 0;
}
int Camera:: yuvtorgb(int y, int u, int v)
{
unsigned int pixel24 = 0;
unsigned char *pixel = (unsigned char *)&pixel24;
int r, g, b;
static long int ruv, guv, buv;
if (sign3)
{
sign3 = false;
ruv = 1159*(v-128);
guv = 380*(u-128) + 813*(v-128);
buv = 2018*(u-128);
}
r = (1164*(y-16) + ruv) / 1000;
g = (1164*(y-16) - guv) / 1000;
b = (1164*(y-16) + buv) / 1000;
if (r > 255) r = 255;
if (g > 255) g = 255;
if (b > 255) b = 255;
if (r < 0) r = 0;
if (g < 0) g = 0;
if (b < 0) b = 0;
pixel[0] = r;
pixel[1] = g;
pixel[2] = b;
return pixel24;
}
Camera::Camera(char *DEV_NAME, int w, int h)//构造函数
{
memcpy(dev_name,DEV_NAME,strlen(DEV_NAME));
io = IO_METHOD_MMAP;//IO_METHOD_READ;//IO_METHOD_MMAP;
cap_image_size=0;
width=w;
height=h;
}
Camera::~Camera(){
}
unsigned int Camera::getImageSize(){
return cap_image_size;
}
void Camera::CloseDevice() {
stop_capturing();
uninit_device();
close_device();
}
void Camera::errno_exit(const char * s) {
fprintf(stderr, "%s error %d, %s\n", s, errno, strerror(errno));
exit(EXIT_FAILURE);
}
int Camera::xioctl(int fd, int request, void * arg) {
int r;
do
r = ioctl(fd, request, arg);
while (-1 == r && EINTR == errno);
return r;
}
int Camera::read_frame(unsigned char *image) {
struct v4l2_buffer buf;
//CLEAR (buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if (-1 == xioctl(fd, VIDIOC_DQBUF, &buf)) //把数据从缓存中读取出来
{
switch (errno) {
case EAGAIN:
return 0;
case EIO:
/* Could ignore EIO, see spec. */
/* fall through */
default:
errno_exit("VIDIOC_DQBUF");
}
}
assert(buf.index < CAPTURE_BUFFER_NUMBER);
memcpy((void*)image,(void*)buffers[0].start,buffers[0].length);
//fwrite(buffers[0].start, buffers[0].length, 1, yuv_fd);//写入文件
//DecodeYUV420SP((unsigned int*)image, (unsigned char*)buffers[0].start, width, height);
//video_yuyv_to_rgb24((unsigned char*)buffers[0].start,(unsigned char*)image,width,height);
if (-1 == xioctl(fd, VIDIOC_QBUF, &buf))//再将其入列,把数据放回缓存队列
errno_exit("VIDIOC_QBUF");
return 1;
}
void Camera::stop_capturing(void) {
enum v4l2_buf_type type;
switch (io) {
case IO_METHOD_READ:
/* Nothing to do. */
break;
case IO_METHOD_MMAP:
case IO_METHOD_USERPTR:
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
/*关闭视频采集命令*/
if (-1 == xioctl(fd, VIDIOC_STREAMOFF, &type))
errno_exit("VIDIOC_STREAMOFF");
break;
}
}
bool Camera::start_capturing(void) {
unsigned int i;
enum v4l2_buf_type type;
for (i = 0; i < CAPTURE_BUFFER_NUMBER; ++i) {
struct v4l2_buffer buf;
CLEAR (buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (-1 == xioctl(fd, VIDIOC_QBUF, &buf))//再将其入列,把数据放回缓存队列
return false;
}
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
//启动视频采集命令
/*通过调用select函数来判断一帧视频数据是否采集完成,当视频设备驱动完成一帧视频数据采集并保存到视频缓冲区中时,select函数返
回,应用程序接着可以读取视频数据;否则select函数阻塞直到视频数据采集完成*/
if (-1 == xioctl(fd, VIDIOC_STREAMON, &type))
return false;
return true;
}
void Camera::uninit_device(void) {
unsigned int i;
switch (io) {
case IO_METHOD_READ:
free(buffers[0].start);
break;
case IO_METHOD_MMAP:
for (i = 0; i < CAPTURE_BUFFER_NUMBER; ++i)//释放申请的内存映射
if (-1 == munmap(buffers[i].start, buffers[i].length))
errno_exit("munmap");
break;
case IO_METHOD_USERPTR:
for (i = 0; i < CAPTURE_BUFFER_NUMBER; ++i)
free(buffers[i].start);
break;
}
}
/*********************申请物理内存*******************************/
bool Camera::init_mmap(void) {
bool CouldSetFrameRate = false;
struct v4l2_streamparm StreamParam;
memset(&StreamParam, 0, sizeof StreamParam);
StreamParam.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(fd, VIDIOC_G_PARM, &StreamParam) < 0) {
fprintf(stderr, "could not set frame rate\n");
} else {
CouldSetFrameRate = StreamParam.parm.capture.capability & V4L2_CAP_TIMEPERFRAME;
}
// map the capture buffer...
struct v4l2_requestbuffers req;
CLEAR (req);
req.count = CAPTURE_BUFFER_NUMBER; // 缓冲区内缓冲帧的数目
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;// 缓冲帧数据格式
req.memory = V4L2_MEMORY_MMAP;//内存映射(memorymapping)方式
if(ioctl(fd, VIDIOC_REQBUFS, &req) < 0) //申请缓冲,count是申请的数量
{
fprintf(stderr, "request capture buffer failed\n");
return false;
}
if (int(req.count) != CAPTURE_BUFFER_NUMBER) {
fprintf(stderr, "capture buffer number is wrong\n");
return false;
}
for (unsigned int i = 0; i < req.count; i++) {
struct v4l2_buffer buf; //驱动中的一帧
CLEAR (buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;//buffer类型
buf.memory = V4L2_MEMORY_MMAP;//IO方式,内存映射
buf.index = i;//序号
//映射用户空间,把VIDIOC_REQBUFS中分配的数据缓存转换成物理地址
if (ioctl(fd, VIDIOC_QUERYBUF, &buf) < 0)
errno_exit("VIDIOC_QUERYBUF");
buffers[i].length = buf.length;
//通过mmap建立映射关系
buffers[i].start = mmap(NULL /* start anywhere */, buf.length,
PROT_READ | PROT_WRITE /* required */,
MAP_SHARED /* recommended */, fd, buf.m.offset);
if (MAP_FAILED == buffers[i].start)
return false;
}
struct v4l2_control ctrl;
ctrl.id = V4L2_CID_CAMERA_CHECK_DATALINE;
ctrl.value = 0;
/* 设置新的命令值 */
if(ioctl(fd, VIDIOC_S_CTRL,&ctrl)) {
fprintf(stderr, "VIDIOC_S_CTRL V4L2_CID_CAMERA_CHECK_DATALINE failed\n");
return false;
}
return true;
}
bool Camera::init_device(void) {
v4l2_input input;
memset(&input, 0, sizeof(struct v4l2_input));
input.index = 0;
if (ioctl(fd, VIDIOC_ENUMINPUT, &input) != 0) {
fprintf(stderr, "No matching index found\n");
return false;
}
if (!input.name) {
fprintf(stderr, "No matching index found\n");
return false;
}
if (ioctl(fd, VIDIOC_S_INPUT, &input) < 0) {
fprintf(stderr, "VIDIOC_S_INPUT failed\n");
return false;
}
struct v4l2_fmtdesc fmt1; //v4l2_fmtdesc : 帧格式结构体
int ret;
memset(&fmt1, 0, sizeof(fmt1));//将fmt1结构体填充为0
fmt1.index = 0; //初始化为0,要查询的格式序号
fmt1.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; // 数据流类型,必须永远是V4L2_BUF_TYPE_VIDEO_CAPTURE
while ((ret = ioctl(fd, VIDIOC_ENUM_FMT, &fmt1)) == 0) //显示所有支持的格式
{
fmt1.index++;
printf("{ pixelformat = '%c%c%c%c', description = '%s' }\n",fmt1.pixelformat & 0xFF,
(fmt1.pixelformat >> 8) & 0xFF,(fmt1.pixelformat >> 16) & 0xFF,
(fmt1.pixelformat >> 24) & 0xFF,fmt1.description); // pixelformat;格式32位 description[32];// 格式名称8位
}
/**************************设置当前帧格式************************/
struct v4l2_format fmt; //设置当前格式结构体(v4l2_format包含v4l2_fmtdesc fmt为共用体)
CLEAR (fmt);
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;//数据流类型
fmt.fmt.pix.width = width;
fmt.fmt.pix.height = height;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;//V4L2_PIX_FMT_NV12;//视频数据存储类型
fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
// fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG;//V4L2_PIX_FMT_NV12;//视频数据存储类型
// fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
if (-1 == xioctl(fd, VIDIOC_S_FMT, &fmt))//设置图像格式
return false;
/**************************读取当前帧格式*************************/
if (-1 == xioctl(fd, VIDIOC_G_FMT, &fmt))
{
return false;
}
else
printf("\nCurrent data format information:\n twidth:%d \n theight:%d \n",
fmt.fmt.pix.width,fmt.fmt.pix.height);
printf(" pixelformat = '%c%c%c%c'\n",fmt.fmt.pix.pixelformat & 0xFF,
(fmt.fmt.pix.pixelformat >> 8) & 0xFF,(fmt.fmt.pix.pixelformat >> 16) & 0xFF,
(fmt.fmt.pix.pixelformat >> 24) & 0xFF); // pixelformat;格式32位
//原始摄像头数据每帧的大小
cap_image_size = fmt.fmt.pix.sizeimage;
init_mmap();
return true;
}
void Camera::close_device(void) {
if (-1 == close(fd))
errno_exit("close");
fd = -1;
}
bool Camera::open_device(void) {
struct stat st;
if (-1 == stat(dev_name, &st)) //通过文件名filename获取文件信息,并保存在buf所指的结构体st中
{
fprintf(stderr, "Cannot identify '%s': %d, %s\n", dev_name, errno,
strerror(errno));
return false;
}
if (!S_ISCHR(st.st_mode)) {
fprintf(stderr, "%s is no device\n", dev_name);
return false;
}
fd = ::open(dev_name, O_RDWR | O_NONBLOCK);
if (-1 == fd) {
fprintf(stderr, "Cannot open '%s': %d, %s\n", dev_name, errno,
strerror(errno));
return false;
}
// yuv_fd = fopen("out.mpg","wb");//打开文件
return true;
}
bool Camera::OpenDevice(){
if (open_device()) {
printf("Open video success\n");
if (init_device())
{
if (start_capturing())
return true;
}
} else
printf("open video failed\n");
return false;
}
bool Camera::GetBuffer(unsigned char *image){
fd_set fds;
struct timeval tv;
int r;
FD_ZERO(&fds);
FD_SET(fd, &fds);
/* Timeout. */
tv.tv_sec = 2;
tv.tv_usec = 0;
r = select(fd + 1, &fds, NULL, NULL, &tv);
if (-1 == r) {
errno_exit("select");
}
if (0 == r) {
fprintf(stderr, "select timeout\n");
exit(EXIT_FAILURE);
}
read_frame(image);
return true;
}
网络传输部分
Udpcam.h
#ifndef UDPCAM_H
#define UDPCAM_H
#include
#include
class UdpCam
{
public:
UdpCam(char* ip,qint16 port);
int width,height,size;
void dealMsg();//slot
void Send(char* data,QByteArray buf);
void frameStartcheck();
void frameEndcheck();
int convert_yuv_to_rgb_pixel(int y, int u, int v);
int convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height);
void Udpcheck(char* dst,int len);
private:
QString udpip;
qint16 udpport;
QUdpSocket *udpSocket;
};
#endif // UDPCAM_H
Udpcam.cpp
#include "Udpcam.h"
#include
#include
#include
#include
using namespace std ;
//YUYV(yuv422)转RGB
int UdpCam::convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height)
{
unsigned int in, out = 0;
unsigned int pixel_16;
unsigned char pixel_24[3];
unsigned int pixel32;
int y0, u, y1, v;
for(in = 0; in < width * height * 2; in += 4) {
pixel_16 =
yuv[in + 3] << 24 |
yuv[in + 2] << 16 |
yuv[in + 1] << 8 |
yuv[in + 0];
y0 = (pixel_16 & 0x000000ff);
u = (pixel_16 & 0x0000ff00) >> 8;
y1 = (pixel_16 & 0x00ff0000) >> 16;
v = (pixel_16 & 0xff000000) >> 24;
pixel32 = convert_yuv_to_rgb_pixel(y0, u, v);
pixel_24[0] = (pixel32 & 0x000000ff);
pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
rgb[out++] = pixel_24[0];
rgb[out++] = pixel_24[1];
rgb[out++] = pixel_24[2];
pixel32 = convert_yuv_to_rgb_pixel(y1, u, v);
pixel_24[0] = (pixel32 & 0x000000ff);
pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
rgb[out++] = pixel_24[0];
rgb[out++] = pixel_24[1];
rgb[out++] = pixel_24[2];
}
return 0;
}
int UdpCam::convert_yuv_to_rgb_pixel(int y, int u, int v)
{
unsigned int pixel32 = 0;
unsigned char *pixel = (unsigned char *)&pixel32;
int r, g, b;
r = y + (1.370705 * (v-128));
g = y - (0.698001 * (v-128)) - (0.337633 * (u-128));
b = y + (1.732446 * (u-128));
if(r > 255) r = 255;
if(g > 255) g = 255;
if(b > 255) b = 255;
if(r < 0) r = 0;
if(g < 0) g = 0;
if(b < 0) b = 0;
pixel[0] = r * 220 / 256;
pixel[1] = g * 220 / 256;
pixel[2] = b * 220 / 256;
return pixel32;
}
UdpCam::UdpCam(char* ip,qint16 port)
{
udpip = QString (ip);
udpport=port;
udpSocket =new QUdpSocket();
udpSocket->bind(9999);
}
void UdpCam::Send(char* data, QByteArray buf)
{
for(int i=0;i<size/2000;i++){
memcpy(buf.data(),data+2000*i,2000);
udpSocket->writeDatagram(buf,QHostAddress(udpip),udpport);
}
}
void UdpCam::frameStartcheck()
{
udpSocket->writeDatagram("start",QHostAddress(udpip),udpport);
}
void UdpCam::frameEndcheck()
{
udpSocket->writeDatagram("end",QHostAddress(udpip),udpport);
}
void UdpCam::Udpcheck(char* dst,int len)
{
char tbuf[5];
memset(tbuf,0,5);
QHostAddress cliaddr;
quint16 port;
udpSocket->readDatagram(tbuf,len,&cliaddr,&port);
while(1)
{
int res=strcmp(tbuf,dst);
printf("%s,%s,%d\n",tbuf,dst,res);
if(res==0)break;
}
}
主文件
main.cpp
/*
* main.cpp
*
* Created on: 2020年3月1日
* Author: Westlor
*/
#include
#include
#include
#include
#include
#include
#include "Camera/camera.h"
#include "Camera/Udpcam.h"
#define CAM_DEV "/dev/video5"
#define FB_DEV "/dev/fb0"
Camera *camera;
using namespace std;
void sign_func(int sign_num)
{
switch(sign_num)
{
case SIGINT:
printf("I have get SIGINT, I'm going now..\n" );
camera->CloseDevice();
exit(0);
break;
}
}
//IP PORT WIDTH HEIGHT
int main(int argc,char** argv) {
QCoreApplication a(argc, argv);
printf("[IP,PORT,WIDTH,HEIGHT]:%s,%s,%s,%s\n",argv[1],argv[2],argv[3],argv[4]);
if(argc!=5)
{
printf("USE THIS MODE :./APP IP PORT WIDTH HEIGHT");
return a.exec();
}
UdpCam udpcamera(argv[1],atoi(argv[2]));
udpcamera.width=atoi((const char*)argv[3]);
udpcamera.height=atoi((const char*)argv[4]);
udpcamera.size=udpcamera.width*udpcamera.height*2;
unsigned char image[udpcamera.width*udpcamera.height*2];
camera=new Camera(CAM_DEV, udpcamera.width, udpcamera.height);
if(!camera->OpenDevice()){
printf("Cam Open error\n");
return -1;
}
printf("Waiting for signal SIGINT..\n");
signal(SIGINT, sign_func);
while(1)
{
if(camera->GetBuffer(image)){
QByteArray b;
b.resize(2000);
udpcamera.Send((char*)image,b);
}
else{
break;
}
}
return a.exec();
}
上位机代码设计
NetWin.pro
#-------------------------------------------------
#
# Project created by QtCreator 2020-03-01T16:51:20
#
#-------------------------------------------------
QT += core gui network
greaterThan(QT_MAJOR_VERSION, 4): QT += widgets
TARGET = NetWIn
TEMPLATE = app
#X86
INCLUDEPATH +=/usr/local/include
INCLUDEPATH +=/usr/local/include/opencv
LIBS +=/usr/local/lib/libopencv*
#ARM
#INCLUDEPATH +=/home/swann/s5p6818/opencv-2.4.9-6818/include/opencv
#INCLUDEPATH +=/home/swann/s5p6818/opencv-2.4.9-6818/include/opencv2
#INCLUDEPATH +=/home/swann/s5p6818/opencv-2.4.9-6818/include
#LIBS += /usr/lib/libopencv_highgui.so \
# /usr/lib/libopencv_core.so \
# /usr/lib/libopencv_objdetect.so \
# /usr/lib/libopencv_imgproc.so
SOURCES += main.cpp\
widget.cpp
HEADERS += widget.h
FORMS += widget.ui
widget.h
#ifndef WIDGET_H
#define WIDGET_H
#include
#include
#include
namespace Ui {
class Widget;
}
class Widget : public QWidget
{
Q_OBJECT
public:
explicit Widget(QWidget *parent = 0);
int width,height;
~Widget();
int convert_yuv_to_rgb_pixel(int y, int u, int v);
int convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height);
void dealMsg();//slot
void getframe();
bool Udpcheck(char* src,char* dst);
private:
Ui::Widget *ui;
char* buf;
char* rgbbuf;
int getnum;
int size;
uchar *pp;
uchar * p;
QPainter *painter;
QLabel *label;
QImage *frame;
QTimer *timer;
QUdpSocket *udpSocket;
FILE *fd;
QString udpip;
qint16 udpport;
private slots:
void paint();
};
#endif // WIDGET_H
widget.cpp
#include "widget.h"
#include "ui_widget.h"
#include
#include
#include
#include
#include
#include
//YUYV(yuv422)转RGB
int Widget::convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height)
{
unsigned int in, out = 0;
unsigned int pixel_16;
unsigned char pixel_24[3];
unsigned int pixel32;
int y0, u, y1, v;
for(in = 0; in < width * height * 2; in += 4) {
pixel_16 =
yuv[in + 3] << 24 |
yuv[in + 2] << 16 |
yuv[in + 1] << 8 |
yuv[in + 0];
y0 = (pixel_16 & 0x000000ff);
u = (pixel_16 & 0x0000ff00) >> 8;
y1 = (pixel_16 & 0x00ff0000) >> 16;
v = (pixel_16 & 0xff000000) >> 24;
pixel32 = convert_yuv_to_rgb_pixel(y0, u, v);
pixel_24[0] = (pixel32 & 0x000000ff);
pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
rgb[out++] = pixel_24[0];
rgb[out++] = pixel_24[1];
rgb[out++] = pixel_24[2];
pixel32 = convert_yuv_to_rgb_pixel(y1, u, v);
pixel_24[0] = (pixel32 & 0x000000ff);
pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
rgb[out++] = pixel_24[0];
rgb[out++] = pixel_24[1];
rgb[out++] = pixel_24[2];
}
return 0;
}
int Widget::convert_yuv_to_rgb_pixel(int y, int u, int v)
{
unsigned int pixel32 = 0;
unsigned char *pixel = (unsigned char *)&pixel32;
int r, g, b;
r = y + (1.370705 * (v-128));
g = y - (0.698001 * (v-128)) - (0.337633 * (u-128));
b = y + (1.732446 * (u-128));
if(r > 255) r = 255;
if(g > 255) g = 255;
if(b > 255) b = 255;
if(r < 0) r = 0;
if(g < 0) g = 0;
if(b < 0) b = 0;
pixel[0] = r * 220 / 256;
pixel[1] = g * 220 / 256;
pixel[2] = b * 220 / 256;
return pixel32;
}
Widget::Widget(QWidget *parent) :
QWidget(parent),
ui(new Ui::Widget)
{
ui->setupUi(this);
udpSocket =new QUdpSocket(this);
width=1280;
height=800;
size=width*height*2;
getnum=0;
buf=(char*)malloc(size*sizeof(char));
rgbbuf=(char*)malloc(width*height*3);
//获取编辑区内容
qint16 port =ui->lineEditPort->text().toInt();
udpSocket->bind(port);
udpip = QString ("192.168.0.18");
udpport=9999;
setWindowTitle("PORT:9999");
connect(udpSocket,&QUdpSocket::readyRead,this,&Widget::dealMsg);
frame = new QImage(pp,width,height,QImage::Format_RGB888);
}
void Widget::paint()
{
convert_yuv_to_rgb_buffer((unsigned char*)buf,(unsigned char*)rgbbuf, width, height);
QImage img = QImage((const unsigned char*)(rgbbuf),
width, height, QImage::Format_RGB888);
//设定图像大小自适应label窗口的大小
img = img.scaled(ui->label->size(), Qt::IgnoreAspectRatio, Qt::SmoothTransformation);
ui->label->setPixmap(QPixmap::fromImage(img));
}
void Widget::getframe()
{
QHostAddress cliaddr;
quint16 port;
udpSocket->readDatagram(buf+2000*(getnum-1),2000,&cliaddr,&port);
}
void Widget::dealMsg()
{
getnum++;
getframe();
if(getnum>=size/2000)
{
paint();
getnum=0;
}
}
Widget::~Widget()
{
free(buf);
free(rgbbuf);
delete ui;
}
bool Widget::Udpcheck(char* src,char* dst)
{
while(1)
{
int res=strcmp(src,dst);
printf("%s,%s,%d\n",src,dst,res);
if(res==0)break;
}
}
可以非常明显的发现因为存在的丢包问题导致画面有点分裂,之后准备使用LINUX中的TCP传输重新在一次尝试
源码:
NetCam