//HKIPcamera.cpp
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include "HCNetSDK.h"
//#include "PlayM4.h"
#include "plaympeg4.h"
//#include "global.h"
//#include "readCamera.h"
#define USECOLOR 1
using namespace cv;
using namespace std;
//--------------------------------------------
int iPicNum = 0;//Set channel NO.
LONG nPort = -1;
HWND hWnd = NULL;
CRITICAL_SECTION g_cs_frameList;
list g_frameList;
LONG lUserID;
NET_DVR_DEVICEINFO_V30 struDeviceInfo;
HANDLE hThread;
LONG lRealPlayHandle = -1;
void yv12toYUV(char *outYuv, char *inYv12, int width, int height, int widthStep)
{
int col, row;
unsigned int Y, U, V;
int tmp;
int idx;
//printf("widthStep=%d.\n",widthStep);
for (row = 0; rowint rowptr = row*width;
for (col = 0; col//int colhalf=col>>1;
tmp = (row / 2)*(width / 2) + (col / 2);
// if((row==1)&&( col>=1400 &&col<=1600))
// {
// printf("col=%d,row=%d,width=%d,tmp=%d.\n",col,row,width,tmp);
// printf("row*width+col=%d,width*height+width*height/4+tmp=%d,width*height+tmp=%d.\n",row*width+col,width*height+width*height/4+tmp,width*height+tmp);
// }
Y = (unsigned int)inYv12[row*width + col];
U = (unsigned int)inYv12[width*height + width*height / 4 + tmp];
V = (unsigned int)inYv12[width*height + tmp];
// if ((col==200))
// {
// printf("col=%d,row=%d,width=%d,tmp=%d.\n",col,row,width,tmp);
// printf("width*height+width*height/4+tmp=%d.\n",width*height+width*height/4+tmp);
// return ;
// }
if ((idx + col * 3 + 2)> (1200 * widthStep))
{
//printf("row * widthStep=%d,idx+col*3+2=%d.\n",1200 * widthStep,idx+col*3+2);
}
outYuv[idx + col * 3] = Y;
outYuv[idx + col * 3 + 1] = U;
outYuv[idx + col * 3 + 2] = V;
}
}
//printf("col=%d,row=%d.\n",col,row);
}
//解码回调 视频为YUV数据(YV12),音频为PCM数据
void CALLBACK DecCBFun(long nPort, char * pBuf, long nSize, FRAME_INFO * pFrameInfo, long nReserved1, long nReserved2)
{
long lFrameType = pFrameInfo->nType;
if (lFrameType == T_YV12)
{
#if USECOLOR
//int start = clock();
static IplImage* pImgYCrCb = cvCreateImage(cvSize(pFrameInfo->nWidth, pFrameInfo->nHeight), 8, 3);//得到图像的Y分量
yv12toYUV(pImgYCrCb->imageData, pBuf, pFrameInfo->nWidth, pFrameInfo->nHeight, pImgYCrCb->widthStep);//得到全部RGB图像
static IplImage* pImg = cvCreateImage(cvSize(pFrameInfo->nWidth, pFrameInfo->nHeight), 8, 3);
cvCvtColor(pImgYCrCb, pImg, CV_YCrCb2RGB);
//int end = clock();
#else
static IplImage* pImg = cvCreateImage(cvSize(pFrameInfo->nWidth, pFrameInfo->nHeight), 8, 1);
memcpy(pImg->imageData, pBuf, pFrameInfo->nWidth*pFrameInfo->nHeight);
#endif
//printf("%d\n",end-start);
//Mat frametemp(pImg), frame;
//frametemp.copyTo(frame);
// cvShowImage("IPCamera",pImg);
// cvWaitKey(1);
EnterCriticalSection(&g_cs_frameList);
g_frameList.push_back(pImg);
LeaveCriticalSection(&g_cs_frameList);
#if USECOLOR
// cvReleaseImage(&pImgYCrCb);
// cvReleaseImage(&pImg);
#else
/*cvReleaseImage(&pImg);*/
#endif
//此时是YV12格式的视频数据,保存在pBuf中,可以fwrite(pBuf,nSize,1,Videofile);
//fwrite(pBuf,nSize,1,fp);
}
/***************
else if (lFrameType ==T_AUDIO16)
{
//此时是音频数据,数据保存在pBuf中,可以fwrite(pBuf,nSize,1,Audiofile);
}
else
{
}
*******************/
}
///实时流回调
void CALLBACK fRealDataCallBack(LONG lRealHandle, DWORD dwDataType, BYTE *pBuffer, DWORD dwBufSize, void *pUser)
{
DWORD dRet;
switch (dwDataType)
{
case NET_DVR_SYSHEAD: //系统头
if (!PlayM4_GetPort(&nPort)) //获取播放库未使用的通道号
{
break;
}
if (dwBufSize > 0)
{
if (!PlayM4_OpenStream(nPort, pBuffer, dwBufSize, 1024 * 1024))
{
dRet = PlayM4_GetLastError(nPort);
break;
}
//设置解码回调函数 只解码不显示
if (!PlayM4_SetDecCallBack(nPort, DecCBFun))
{
dRet = PlayM4_GetLastError(nPort);
break;
}
//设置解码回调函数 解码且显示
//if (!PlayM4_SetDecCallBackEx(nPort,DecCBFun,NULL,NULL))
//{
// dRet=PlayM4_GetLastError(nPort);
// break;
//}
//打开视频解码
if (!PlayM4_Play(nPort, hWnd))
{
dRet = PlayM4_GetLastError(nPort);
break;
}
//打开音频解码, 需要码流是复合流
// if (!PlayM4_PlaySound(nPort))
// {
// dRet=PlayM4_GetLastError(nPort);
// break;
// }
}
break;
case NET_DVR_STREAMDATA: //码流数据
if (dwBufSize > 0 && nPort != -1)
{
BOOL inData = PlayM4_InputData(nPort, pBuffer, dwBufSize);
while (!inData)
{
Sleep(10);
inData = PlayM4_InputData(nPort, pBuffer, dwBufSize);
OutputDebugString(L"PlayM4_InputData failed \n");
}
}
break;
}
}
void CALLBACK g_ExceptionCallBack(DWORD dwType, LONG lUserID, LONG lHandle, void *pUser)
{
char tempbuf[256] = { 0 };
switch (dwType)
{
case EXCEPTION_RECONNECT: //预览时重连
printf("----------reconnect--------%d\n", time(NULL));
break;
default:
break;
}
}
bool OpenCamera(char* ip, char* usr, char* password)
{
lUserID = NET_DVR_Login_V30(ip, 8000, usr, password, &struDeviceInfo);
if (lUserID == 0)
{
cout << "Log in success!" << endl;
return TRUE;
}
else
{
printf("Login error, %d\n", NET_DVR_GetLastError());
NET_DVR_Cleanup();
return FALSE;
}
}
DWORD WINAPI ReadCamera(LPVOID IpParameter)
{
//---------------------------------------
//设置异常消息回调函数
NET_DVR_SetExceptionCallBack_V30(0, NULL, g_ExceptionCallBack, NULL);
//cvNamedWindow("Mywindow", 0);
//cvNamedWindow("IPCamera", 0);
//HWND h = (HWND)cvGetWindowHandle("Mywindow");
//h = cvNamedWindow("IPCamera");
//---------------------------------------
//启动预览并设置回调数据流
NET_DVR_CLIENTINFO ClientInfo;
ClientInfo.lChannel = 1; //Channel number 设备通道号
ClientInfo.hPlayWnd = NULL; //窗口为空,设备SDK不解码只取流
ClientInfo.lLinkMode = 1; //Main Stream
ClientInfo.sMultiCastIP = NULL;
LONG lRealPlayHandle;
lRealPlayHandle = NET_DVR_RealPlay_V30(lUserID, &ClientInfo, fRealDataCallBack, NULL, TRUE);
if (lRealPlayHandle<0)
{
printf("NET_DVR_RealPlay_V30 failed! Error number: %d\n", NET_DVR_GetLastError());
return -1;
}
else
cout << "码流回调成功!" << endl;
Sleep(-1);
if (!NET_DVR_StopRealPlay(lRealPlayHandle))
{
printf("NET_DVR_StopRealPlay error! Error number: %d\n", NET_DVR_GetLastError());
return 0;
}
NET_DVR_Logout(lUserID);
NET_DVR_Cleanup();
return 0;
}
void init(char* ip, char* usr, char* password){
//HANDLE hThread;
//LPDWORD threadID;
//---------------------------------------
// 初始化
NET_DVR_Init();
//设置连接时间与重连时间
NET_DVR_SetConnectTime(2000, 1);
NET_DVR_SetReconnect(10000, true);
OpenCamera(ip, usr, password);
InitializeCriticalSection(&g_cs_frameList);
hThread = ::CreateThread(NULL, 0, ReadCamera, NULL, 0, 0);
}
Mat getframe(){
Mat frame1;
EnterCriticalSection(&g_cs_frameList);
while (!g_frameList.size()){
LeaveCriticalSection(&g_cs_frameList);
EnterCriticalSection(&g_cs_frameList);
}
list ::iterator it;
it = g_frameList.end();
it--;
Mat dbgframe = (*(it));
(*g_frameList.begin()).copyTo(frame1);
frame1 = dbgframe;
g_frameList.pop_front();
//imshow("camera", frame1);
//waitKey(1);
g_frameList.clear(); // 丢掉旧的帧
LeaveCriticalSection(&g_cs_frameList);
return(frame1);
}
void release(){
::CloseHandle(hThread);
NET_DVR_StopRealPlay(lRealPlayHandle);
//关闭预览
NET_DVR_Logout(lUserID);
//注销用户
NET_DVR_Cleanup();
}
//HKIPcamera.h
#include
using namespace cv;
void init(char* ip, char* usr, char* password);
Mat getframe();
void release();
// HKIPcamera.i
/* Example of wrapping a C function that takes a C double array as input using
* numpy typemaps for SWIG. */
%module HKIPcamera
%include
%cv_mat__instantiate_defaults
%header %{
/* Includes the header in the wrapper code */
#include "HKIPcamera.h"
%}
%include "HKIPcamera.h"
//将其中的OpenCV路径改成自己的
swig -ID:/opencv/build/include -python -c++ HKIPcamera.i
在extern “C” __declspec(dllexport) 的“C”和 下划线之间加空格,否则会报错。
参考以下三篇博客编译动态链接库
Python调用C++函数(SWIG,VS2013使用numpy.i完成Numpy与C++数组转换)
linux下Python调用海康SDK实时显示网络摄像头
OpenCV+海康威视摄像头的实时读取