#ifndef _VIDEOINPUT
#define _VIDEOINPUT
#pragma comment(lib,"Strmiids.lib")
#include
#include
#include
#include
#include
#include
#include
#ifndef _WIN32_WINNT
# define _WIN32_WINNT 0x501
#endif
#include
#define VI_VERSION 0.200
#define VI_MAX_CAMERAS 20
#define VI_NUM_TYPES 19 //DON'T TOUCH
#define VI_NUM_FORMATS 18 //DON'T TOUCH
#define VI_COMPOSITE 0
#define VI_S_VIDEO 1
#define VI_TUNER 2
#define VI_USB 3
#define VI_1394 4
#define VI_NTSC_M 0
#define VI_PAL_B 1
#define VI_PAL_D 2
#define VI_PAL_G 3
#define VI_PAL_H 4
#define VI_PAL_I 5
#define VI_PAL_M 6
#define VI_PAL_N 7
#define VI_PAL_NC 8
#define VI_SECAM_B 9
#define VI_SECAM_D 10
#define VI_SECAM_G 11
#define VI_SECAM_H 12
#define VI_SECAM_K 13
#define VI_SECAM_K1 14
#define VI_SECAM_L 15
#define VI_NTSC_M_J 16
#define VI_NTSC_433 17
#define VI_MEDIASUBTYPE_RGB24 0
#define VI_MEDIASUBTYPE_RGB32 1
#define VI_MEDIASUBTYPE_RGB555 2
#define VI_MEDIASUBTYPE_RGB565 3
#define VI_MEDIASUBTYPE_YUY2 4
#define VI_MEDIASUBTYPE_YVYU 5
#define VI_MEDIASUBTYPE_YUYV 6
#define VI_MEDIASUBTYPE_IYUV 7
#define VI_MEDIASUBTYPE_UYVY 8
#define VI_MEDIASUBTYPE_YV12 9
#define VI_MEDIASUBTYPE_YVU9 10
#define VI_MEDIASUBTYPE_Y411 11
#define VI_MEDIASUBTYPE_Y41P 12
#define VI_MEDIASUBTYPE_Y211 13
#define VI_MEDIASUBTYPE_AYUV 14
#define VI_MEDIASUBTYPE_Y800 15
#define VI_MEDIASUBTYPE_Y8 16
#define VI_MEDIASUBTYPE_GREY 17
#define VI_MEDIASUBTYPE_MJPG 18
struct ICaptureGraphBuilder2;
struct IGraphBuilder;
struct IBaseFilter;
struct IAMCrossbar;
struct IMediaControl;
struct ISampleGrabber;
struct IMediaEventEx;
struct IAMStreamConfig;
struct _AMMediaType;
class SampleGrabberCallback;
typedef _AMMediaType AM_MEDIA_TYPE;
static int comInitCount = 0;
class videoDevice{
public:
videoDevice();
void setSize(int w, int h);
void NukeDownstream(IBaseFilter *pBF);
void destroyGraph();
~videoDevice();
int videoSize;
int width;
int height;
int tryWidth;
int tryHeight;
ICaptureGraphBuilder2 *pCaptureGraph;
IGraphBuilder *pGraph;
IMediaControl *pControl;
IBaseFilter *pVideoInputFilter;
IBaseFilter *pGrabberF;
IBaseFilter * pDestFilter;
IAMStreamConfig *streamConf;
ISampleGrabber * pGrabber;
AM_MEDIA_TYPE * pAmMediaType;
IMediaEventEx * pMediaEvent;
GUID videoType;
long formatType;
SampleGrabberCallback * sgCallback;
bool tryDiffSize;
bool useCrossbar;
bool readyToCapture;
bool sizeSet;
bool setupStarted;
bool specificFormat;
bool autoReconnect;
int nFramesForReconnect;
unsigned long nFramesRunning;
int connection;
int storeConn;
int myID;
long requestedFrameTime;
char nDeviceName[255];
WCHAR wDeviceName[255];
unsigned char * pixels;
char * pBuffer;
};
class videoInput{
public:
videoInput();
~videoInput();
static void setVerbose(bool _verbose);
static void setComMultiThreaded(bool bMulti);
static int listDevices(bool silent = false);
static std::vector string> getDeviceList();
static const char * getDeviceName(int deviceID);
static int getDeviceIDFromName(const char * name);
void setUseCallback(bool useCallback);
void setIdealFramerate(int deviceID, int idealFramerate);
void setAutoReconnectOnFreeze(int deviceNumber, bool doReconnect, int numMissedFramesBeforeReconnect);
bool setupDevice(int deviceID);
bool setupDevice(int deviceID, int w, int h);
bool setupDevice(int deviceID, int connection);
bool setupDevice(int deviceID, int w, int h, int connection);
bool setFormat(int deviceNumber, int format);
void setRequestedMediaSubType(int mediatype);
bool isFrameNew(int deviceID);
bool isDeviceSetup(int deviceID);
unsigned char * getPixels(int deviceID, bool flipRedAndBlue = true, bool flipImage = false);
bool getPixels(int id, unsigned char * pixels, bool flipRedAndBlue = true, bool flipImage = false);
void showSettingsWindow(int deviceID);
bool setVideoSettingFilter(int deviceID, long Property, long lValue, long Flags = NULL, bool useDefaultValue = false);
bool setVideoSettingFilterPct(int deviceID, long Property, float pctValue, long Flags = NULL);
bool getVideoSettingFilter(int deviceID, long Property, long &min, long &max, long &SteppingDelta, long ¤tValue, long &flags, long &defaultValue);
bool setVideoSettingCamera(int deviceID, long Property, long lValue, long Flags = NULL, bool useDefaultValue = false);
bool setVideoSettingCameraPct(int deviceID, long Property, float pctValue, long Flags = NULL);
bool getVideoSettingCamera(int deviceID, long Property, long &min, long &max, long &SteppingDelta, long ¤tValue, long &flags, long &defaultValue);
int getWidth(int deviceID);
int getHeight(int deviceID);
int getSize(int deviceID);
void stopDevice(int deviceID);
bool restartDevice(int deviceID);
int devicesFound;
long propBrightness;
long propContrast;
long propHue;
long propSaturation;
long propSharpness;
long propGamma;
long propColorEnable;
long propWhiteBalance;
long propBacklightCompensation;
long propGain;
long propPan;
long propTilt;
long propRoll;
long propZoom;
long propExposure;
long propIris;
long propFocus;
private:
void setPhyCon(int deviceID, int conn);
void setAttemptCaptureSize(int deviceID, int w, int h);
bool setup(int deviceID);
void processPixels(unsigned char * src, unsigned char * dst, int width, int height, bool bRGB, bool bFlip);
int start(int deviceID, videoDevice * VD);
int getDeviceCount();
void getMediaSubtypeAsString(GUID type, char * typeAsString);
HRESULT getDevice(IBaseFilter **pSrcFilter, int deviceID, WCHAR * wDeviceName, char * nDeviceName);
static HRESULT ShowFilterPropertyPages(IBaseFilter *pFilter);
HRESULT SaveGraphFile(IGraphBuilder *pGraph, WCHAR *wszPath);
HRESULT routeCrossbar(ICaptureGraphBuilder2 **ppBuild, IBaseFilter **pVidInFilter, int conType, GUID captureMode);
static bool comInit();
static bool comUnInit();
int connection;
int callbackSetCount;
bool bCallback;
GUID CAPTURE_MODE;
GUID requestedMediaSubType;
GUID MEDIASUBTYPE_Y800;
GUID MEDIASUBTYPE_Y8;
GUID MEDIASUBTYPE_GREY;
videoDevice * VDList[VI_MAX_CAMERAS];
GUID mediaSubtypes[VI_NUM_TYPES];
long formatTypes[VI_NUM_FORMATS];
static void __cdecl basicThread(void * objPtr);
static char deviceNames[VI_MAX_CAMERAS][255];
};
#endif
#include
#include "videoInput.h"
#include
#include
#pragma include_alias( "dxtrans.h", "qedit.h" )
#define __IDxtCompositor_INTERFACE_DEFINED__
#define __IDxtAlphaSetter_INTERFACE_DEFINED__
#define __IDxtJpeg_INTERFACE_DEFINED__
#define __IDxtKey_INTERFACE_DEFINED__
#include
#include
#include
#include
#ifndef HEADER
#define HEADER(pVideoInfo) (&(((VIDEOINFOHEADER *) (pVideoInfo))->bmiHeader))
#endif
MIDL_INTERFACE("0579154A-2B53-4994-B0D0-E773148EFF85")
ISampleGrabberCB : public IUnknown
{
public:
virtual HRESULT STDMETHODCALLTYPE SampleCB(
double SampleTime,
IMediaSample *pSample) = 0;
virtual HRESULT STDMETHODCALLTYPE BufferCB(
double SampleTime,
BYTE *pBuffer,
long BufferLen) = 0;
};
MIDL_INTERFACE("6B652FFF-11FE-4fce-92AD-0266B5D7C78F")
ISampleGrabber : public IUnknown
{
public:
virtual HRESULT STDMETHODCALLTYPE SetOneShot(
BOOL OneShot) = 0;
virtual HRESULT STDMETHODCALLTYPE SetMediaType(
const AM_MEDIA_TYPE *pType) = 0;
virtual HRESULT STDMETHODCALLTYPE GetConnectedMediaType(
AM_MEDIA_TYPE *pType) = 0;
virtual HRESULT STDMETHODCALLTYPE SetBufferSamples(
BOOL BufferThem) = 0;
virtual HRESULT STDMETHODCALLTYPE GetCurrentBuffer(
long *pBufferSize,
long *pBuffer) = 0;
virtual HRESULT STDMETHODCALLTYPE GetCurrentSample(
IMediaSample **ppSample) = 0;
virtual HRESULT STDMETHODCALLTYPE SetCallback(
ISampleGrabberCB *pCallback,
long WhichMethodToCallback) = 0;
};
EXTERN_C const CLSID CLSID_SampleGrabber;
EXTERN_C const IID IID_ISampleGrabber;
EXTERN_C const CLSID CLSID_NullRenderer;
static bool verbose = true;
static bool VI_COM_MULTI_THREADED = false;
void MyFreeMediaType(AM_MEDIA_TYPE& mt){
if (mt.cbFormat != 0)
{
CoTaskMemFree((PVOID)mt.pbFormat);
mt.cbFormat = 0;
mt.pbFormat = NULL;
}
if (mt.pUnk != NULL)
{
mt.pUnk->Release();
mt.pUnk = NULL;
}
}
void MyDeleteMediaType(AM_MEDIA_TYPE *pmt)
{
if (pmt != NULL)
{
MyFreeMediaType(*pmt);
CoTaskMemFree(pmt);
}
}
class SampleGrabberCallback : public ISampleGrabberCB{
public:
SampleGrabberCallback(){
InitializeCriticalSection(&critSection);
freezeCheck = 0;
bufferSetup = false;
newFrame = false;
latestBufferLength = 0;
hEvent = CreateEvent(NULL, true, false, NULL);
}
~SampleGrabberCallback(){
ptrBuffer = NULL;
DeleteCriticalSection(&critSection);
CloseHandle(hEvent);
if(bufferSetup){
delete [] pixels;
}
}
bool setupBuffer(int numBytesIn){
if(bufferSetup){
return false;
}else{
numBytes = numBytesIn;
pixels = new unsigned char[numBytes];
bufferSetup = true;
newFrame = false;
latestBufferLength = 0;
}
return true;
}
STDMETHODIMP_(ULONG) AddRef() { return 1; }
STDMETHODIMP_(ULONG) Release() { return 2; }
STDMETHODIMP QueryInterface(REFIID riid, void **ppvObject){
*ppvObject = static_cast(this);
return S_OK;
}
STDMETHODIMP SampleCB(double Time, IMediaSample *pSample){
if(WaitForSingleObject(hEvent, 0) == WAIT_OBJECT_0) return S_OK;
HRESULT hr = pSample->GetPointer(&ptrBuffer);
if(hr == S_OK){
latestBufferLength = pSample->GetActualDataLength();
if(latestBufferLength == numBytes){
EnterCriticalSection(&critSection);
memcpy(pixels, ptrBuffer, latestBufferLength);
newFrame = true;
freezeCheck = 1;
LeaveCriticalSection(&critSection);
SetEvent(hEvent);
}else{
printf("ERROR: SampleCB() - buffer sizes do not match\n");
}
}
return S_OK;
}
STDMETHODIMP BufferCB(double Time, BYTE *pBuffer, long BufferLen){
return E_NOTIMPL;
}
int freezeCheck;
int latestBufferLength;
int numBytes;
bool newFrame;
bool bufferSetup;
unsigned char * pixels;
unsigned char * ptrBuffer;
CRITICAL_SECTION critSection;
HANDLE hEvent;
};
videoDevice::videoDevice(){
pCaptureGraph = NULL;
pGraph = NULL;
pControl = NULL;
pVideoInputFilter = NULL;
pGrabber = NULL;
pDestFilter = NULL;
pGrabberF = NULL;
pMediaEvent = NULL;
streamConf = NULL;
pAmMediaType = NULL;
sgCallback = new SampleGrabberCallback();
sgCallback->newFrame = false;
videoType = MEDIASUBTYPE_RGB24;
connection = PhysConn_Video_Composite;
storeConn = 0;
videoSize = 0;
width = 0;
height = 0;
tryWidth = 0;
tryHeight = 0;
nFramesForReconnect= 10000;
nFramesRunning = 0;
myID = -1;
tryDiffSize = false;
useCrossbar = false;
readyToCapture = false;
sizeSet = false;
setupStarted = false;
specificFormat = false;
autoReconnect = false;
requestedFrameTime = -1;
memset(wDeviceName, 0, sizeof(WCHAR) * 255);
memset(nDeviceName, 0, sizeof(char) * 255);
}
void videoDevice::setSize(int w, int h){
if(sizeSet){
if(verbose)printf("SETUP: Error device size should not be set more than once \n");
}
else
{
width = w;
height = h;
videoSize = w*h*3;
sizeSet = true;
pixels = new unsigned char[videoSize];
pBuffer = new char[videoSize];
memset(pixels, 0 , videoSize);
sgCallback->setupBuffer(videoSize);
}
}
void videoDevice::NukeDownstream(IBaseFilter *pBF){
IPin *pP, *pTo;
ULONG u;
IEnumPins *pins = NULL;
PIN_INFO pininfo;
HRESULT hr = pBF->EnumPins(&pins);
pins->Reset();
while (hr == NOERROR)
{
hr = pins->Next(1, &pP, &u);
if (hr == S_OK && pP)
{
pP->ConnectedTo(&pTo);
if (pTo)
{
hr = pTo->QueryPinInfo(&pininfo);
if (hr == NOERROR)
{
if (pininfo.dir == PINDIR_INPUT)
{
NukeDownstream(pininfo.pFilter);
pGraph->Disconnect(pTo);
pGraph->Disconnect(pP);
pGraph->RemoveFilter(pininfo.pFilter);
}
pininfo.pFilter->Release();
pininfo.pFilter = NULL;
}
pTo->Release();
}
pP->Release();
}
}
if (pins) pins->Release();
}
void videoDevice::destroyGraph(){
HRESULT hr = NULL;
int FuncRetval=0;
int NumFilters=0;
int i = 0;
while (hr == NOERROR)
{
IEnumFilters * pEnum = 0;
ULONG cFetched;
hr = pGraph->EnumFilters(&pEnum);
if (FAILED(hr)) { if(verbose)printf("SETUP: pGraph->EnumFilters() failed. \n"); return; }
IBaseFilter * pFilter = NULL;
if (pEnum->Next(1, &pFilter, &cFetched) == S_OK)
{
FILTER_INFO FilterInfo={0};
hr = pFilter->QueryFilterInfo(&FilterInfo);
FilterInfo.pGraph->Release();
int count = 0;
char buffer[255];
memset(buffer, 0, 255 * sizeof(char));
while( FilterInfo.achName[count] != 0x00 )
{
buffer[count] = static_cast<char>(FilterInfo.achName[count]);
count++;
}
if(verbose)printf("SETUP: removing filter %s...\n", buffer);
hr = pGraph->RemoveFilter(pFilter);
if (FAILED(hr)) { if(verbose)printf("SETUP: pGraph->RemoveFilter() failed. \n"); return; }
if(verbose)printf("SETUP: filter removed %s \n",buffer);
pFilter->Release();
pFilter = NULL;
}
else hr = 1;
pEnum->Release();
pEnum = NULL;
i++;
}
return;
}
videoDevice::~videoDevice(){
if(setupStarted){ if(verbose)printf("\nSETUP: Disconnecting device %i\n", myID); }
else{
if(sgCallback){
sgCallback->Release();
delete sgCallback;
}
return;
}
HRESULT HR = NULL;
if( (sgCallback) && (pGrabber) )
{
pGrabber->SetCallback(NULL, 1);
if(verbose)printf("SETUP: freeing Grabber Callback\n");
sgCallback->Release();
if(sizeSet){
delete[] pixels;
delete[] pBuffer;
}
delete sgCallback;
}
if( (pControl) )
{
HR = pControl->Pause();
if (FAILED(HR)) if(verbose)printf("ERROR - Could not pause pControl\n");
HR = pControl->Stop();
if (FAILED(HR)) if(verbose)printf("ERROR - Could not stop pControl\n");
}
if( (pVideoInputFilter) )NukeDownstream(pVideoInputFilter);
if( (pDestFilter) ){ if(verbose)printf("SETUP: freeing Renderer \n");
(pDestFilter)->Release();
(pDestFilter) = 0;
}
if( (pVideoInputFilter) ){ if(verbose)printf("SETUP: freeing Capture Source \n");
(pVideoInputFilter)->Release();
(pVideoInputFilter) = 0;
}
if( (pGrabberF) ){ if(verbose)printf("SETUP: freeing Grabber Filter \n");
(pGrabberF)->Release();
(pGrabberF) = 0;
}
if( (pGrabber) ){ if(verbose)printf("SETUP: freeing Grabber \n");
(pGrabber)->Release();
(pGrabber) = 0;
}
if( (pControl) ){ if(verbose)printf("SETUP: freeing Control \n");
(pControl)->Release();
(pControl) = 0;
}
if( (pMediaEvent) ){ if(verbose)printf("SETUP: freeing Media Event \n");
(pMediaEvent)->Release();
(pMediaEvent) = 0;
}
if( (streamConf) ){ if(verbose)printf("SETUP: freeing Stream \n");
(streamConf)->Release();
(streamConf) = 0;
}
if( (pAmMediaType) ){ if(verbose)printf("SETUP: freeing Media Type \n");
MyDeleteMediaType(pAmMediaType);
}
if((pMediaEvent)){
if(verbose)printf("SETUP: freeing Media Event \n");
(pMediaEvent)->Release();
(pMediaEvent) = 0;
}
if( (pGraph) )destroyGraph();
if( (pCaptureGraph) ){ if(verbose)printf("SETUP: freeing Capture Graph \n");
(pCaptureGraph)->Release();
(pCaptureGraph) = 0;
}
if( (pGraph) ){ if(verbose)printf("SETUP: freeing Main Graph \n");
(pGraph)->Release();
(pGraph) = 0;
}
if(verbose)printf("SETUP: Device %i disconnected and freed\n\n",myID);
}
void makeGUID( GUID *guid, unsigned long Data1, unsigned short Data2, unsigned short Data3,
unsigned char b0, unsigned char b1, unsigned char b2, unsigned char b3,
unsigned char b4, unsigned char b5, unsigned char b6, unsigned char b7 ){
guid->Data1 = Data1;
guid->Data2 = Data2;
guid->Data3 = Data3;
guid->Data4[0] = b0; guid->Data4[1] = b1; guid->Data4[2] = b2; guid->Data4[3] = b3;
guid->Data4[4] = b4; guid->Data4[5] = b5; guid->Data4[6] = b6; guid->Data4[7] = b7;
}
videoInput::videoInput(){
comInit();
devicesFound = 0;
callbackSetCount = 0;
bCallback = true;
requestedMediaSubType = MEDIASUBTYPE_RGB24;
for(int i=0; inew videoDevice();
if(verbose)printf("\n***** VIDEOINPUT LIBRARY - %2.04f - TFW2013 *****\n\n",VI_VERSION);
makeGUID( &MEDIASUBTYPE_Y800, 0x30303859, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 );
makeGUID( &MEDIASUBTYPE_Y8, 0x20203859, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 );
makeGUID( &MEDIASUBTYPE_GREY, 0x59455247, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 );
mediaSubtypes[0] = MEDIASUBTYPE_RGB24;
mediaSubtypes[1] = MEDIASUBTYPE_RGB32;
mediaSubtypes[2] = MEDIASUBTYPE_RGB555;
mediaSubtypes[3] = MEDIASUBTYPE_RGB565;
mediaSubtypes[4] = MEDIASUBTYPE_YUY2;
mediaSubtypes[5] = MEDIASUBTYPE_YVYU;
mediaSubtypes[6] = MEDIASUBTYPE_YUYV;
mediaSubtypes[7] = MEDIASUBTYPE_IYUV;
mediaSubtypes[8] = MEDIASUBTYPE_UYVY;
mediaSubtypes[9] = MEDIASUBTYPE_YV12;
mediaSubtypes[10] = MEDIASUBTYPE_YVU9;
mediaSubtypes[11] = MEDIASUBTYPE_Y411;
mediaSubtypes[12] = MEDIASUBTYPE_Y41P;
mediaSubtypes[13] = MEDIASUBTYPE_Y211;
mediaSubtypes[14] = MEDIASUBTYPE_AYUV;
mediaSubtypes[15] = MEDIASUBTYPE_Y800;
mediaSubtypes[16] = MEDIASUBTYPE_Y8;
mediaSubtypes[17] = MEDIASUBTYPE_GREY;
mediaSubtypes[18] = MEDIASUBTYPE_MJPG;
formatTypes[VI_NTSC_M] = AnalogVideo_NTSC_M;
formatTypes[VI_NTSC_M_J] = AnalogVideo_NTSC_M_J;
formatTypes[VI_NTSC_433] = AnalogVideo_NTSC_433;
formatTypes[VI_PAL_B] = AnalogVideo_PAL_B;
formatTypes[VI_PAL_D] = AnalogVideo_PAL_D;
formatTypes[VI_PAL_G] = AnalogVideo_PAL_G;
formatTypes[VI_PAL_H] = AnalogVideo_PAL_H;
formatTypes[VI_PAL_I] = AnalogVideo_PAL_I;
formatTypes[VI_PAL_M] = AnalogVideo_PAL_M;
formatTypes[VI_PAL_N] = AnalogVideo_PAL_N;
formatTypes[VI_PAL_NC] = AnalogVideo_PAL_N_COMBO;
formatTypes[VI_SECAM_B] = AnalogVideo_SECAM_B;
formatTypes[VI_SECAM_D] = AnalogVideo_SECAM_D;
formatTypes[VI_SECAM_G] = AnalogVideo_SECAM_G;
formatTypes[VI_SECAM_H] = AnalogVideo_SECAM_H;
formatTypes[VI_SECAM_K] = AnalogVideo_SECAM_K;
formatTypes[VI_SECAM_K1] = AnalogVideo_SECAM_K1;
formatTypes[VI_SECAM_L] = AnalogVideo_SECAM_L;
propBrightness = VideoProcAmp_Brightness;
propContrast = VideoProcAmp_Contrast;
propHue = VideoProcAmp_Hue;
propSaturation = VideoProcAmp_Saturation;
propSharpness = VideoProcAmp_Sharpness;
propGamma = VideoProcAmp_Gamma;
propColorEnable = VideoProcAmp_ColorEnable;
propWhiteBalance = VideoProcAmp_WhiteBalance;
propBacklightCompensation = VideoProcAmp_BacklightCompensation;
propGain = VideoProcAmp_Gain;
propPan = CameraControl_Pan;
propTilt = CameraControl_Tilt;
propRoll = CameraControl_Roll;
propZoom = CameraControl_Zoom;
propExposure = CameraControl_Exposure;
propIris = CameraControl_Iris;
propFocus = CameraControl_Focus;
}
void videoInput::setVerbose(bool _verbose){
verbose = _verbose;
}
void videoInput::setComMultiThreaded(bool bMulti){
if( bMulti != VI_COM_MULTI_THREADED ){
VI_COM_MULTI_THREADED = bMulti;
int limit = 100;
while(!comUnInit() && limit > 0){
limit--;
}
comInit();
}
}
void videoInput::setUseCallback(bool useCallback){
if(callbackSetCount == 0){
bCallback = useCallback;
callbackSetCount = 1;
}else{
printf("ERROR: setUseCallback can only be called before setup\n");
}
}
void videoInput::setIdealFramerate(int deviceNumber, int idealFramerate){
if(deviceNumber >= VI_MAX_CAMERAS || VDList[deviceNumber]->readyToCapture) return;
if( idealFramerate > 0 ){
VDList[deviceNumber]->requestedFrameTime = (unsigned long)(10000000 / idealFramerate);
}
}
void videoInput::setAutoReconnectOnFreeze(int deviceNumber, bool doReconnect, int numMissedFramesBeforeReconnect){
if(deviceNumber >= VI_MAX_CAMERAS) return;
VDList[deviceNumber]->autoReconnect = doReconnect;
VDList[deviceNumber]->nFramesForReconnect = numMissedFramesBeforeReconnect;
}
bool videoInput::setupDevice(int deviceNumber){
if(deviceNumber >= VI_MAX_CAMERAS || VDList[deviceNumber]->readyToCapture) return false;
if(setup(deviceNumber))return true;
return false;
}
bool videoInput::setupDevice(int deviceNumber, int connection){
if(deviceNumber >= VI_MAX_CAMERAS || VDList[deviceNumber]->readyToCapture) return false;
setPhyCon(deviceNumber, connection);
if(setup(deviceNumber))return true;
return false;
}
bool videoInput::setupDevice(int deviceNumber, int w, int h){
if(deviceNumber >= VI_MAX_CAMERAS || VDList[deviceNumber]->readyToCapture) return false;
setAttemptCaptureSize(deviceNumber, w, h);
if(setup(deviceNumber))return true;
return false;
}
bool videoInput::setupDevice(int deviceNumber, int w, int h, int connection){
if(deviceNumber >= VI_MAX_CAMERAS || VDList[deviceNumber]->readyToCapture) return false;
setAttemptCaptureSize(deviceNumber,w,h);
setPhyCon(deviceNumber, connection);
if(setup(deviceNumber))return true;
return false;
}
bool videoInput::setFormat(int deviceNumber, int format){
if(deviceNumber >= VI_MAX_CAMERAS || !VDList[deviceNumber]->readyToCapture) return false;
bool returnVal = false;
if(format >= 0 && format < VI_NUM_FORMATS){
VDList[deviceNumber]->formatType = formatTypes[format];
VDList[deviceNumber]->specificFormat = true;
if(VDList[deviceNumber]->specificFormat){
HRESULT hr = getDevice(&VDList[deviceNumber]->pVideoInputFilter, deviceNumber, VDList[deviceNumber]->wDeviceName, VDList[deviceNumber]->nDeviceName);
if(hr != S_OK){
return false;
}
IAMAnalogVideoDecoder *pVideoDec = NULL;
hr = VDList[deviceNumber]->pCaptureGraph->FindInterface(NULL, &MEDIATYPE_Video, VDList[deviceNumber]->pVideoInputFilter, IID_IAMAnalogVideoDecoder, (void **)&pVideoDec);
if(VDList[deviceNumber]->pVideoInputFilter)VDList[deviceNumber]->pVideoInputFilter->Release();
if(VDList[deviceNumber]->pVideoInputFilter)VDList[deviceNumber]->pVideoInputFilter = NULL;
if(FAILED(hr)){
printf("SETUP: couldn't set requested format\n");
}else{
long lValue = 0;
hr = pVideoDec->get_AvailableTVFormats(&lValue);
if( SUCCEEDED(hr) && (lValue & VDList[deviceNumber]->formatType) )
{
hr = pVideoDec->put_TVFormat(VDList[deviceNumber]->formatType);
if( FAILED(hr) ){
printf("SETUP: couldn't set requested format\n");
}else{
returnVal = true;
}
}
pVideoDec->Release();
pVideoDec = NULL;
}
}
}
return returnVal;
}
char videoInput::deviceNames[VI_MAX_CAMERAS][255]={{0}};
const char * videoInput::getDeviceName(int deviceID){
if( deviceID >= VI_MAX_CAMERAS ){
return NULL;
}
return deviceNames[deviceID];
}
int videoInput::getDeviceIDFromName(const char * name) {
if (listDevices(true) == 0) return -1;
int deviceID = -1;
for (int i = 0; i < VI_MAX_CAMERAS; i++) {
if (deviceNames[i] == name) {
deviceID = i;
break;
}
}
return deviceID;
}
std::vector <std::string> videoInput::getDeviceList(){
int numDev = videoInput::listDevices(true);
std::vector <std::string> deviceList;
for(int i = 0; i < numDev; i++){
char * name = (char *) videoInput::getDeviceName(i);
if( name == NULL )break;
deviceList.push_back(name);
}
return deviceList;
}
int videoInput::listDevices(bool silent){
comInit();
if(!silent)printf("\nVIDEOINPUT SPY MODE!\n\n");
ICreateDevEnum *pDevEnum = NULL;
IEnumMoniker *pEnum = NULL;
int deviceCounter = 0;
HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL,
CLSCTX_INPROC_SERVER, IID_ICreateDevEnum,
reinterpret_cast<void**>(&pDevEnum));
if (SUCCEEDED(hr))
{
hr = pDevEnum->CreateClassEnumerator(
CLSID_VideoInputDeviceCategory,
&pEnum, 0);
if(hr == S_OK){
if(!silent)printf("SETUP: Looking For Capture Devices\n");
IMoniker *pMoniker = NULL;
while (pEnum->Next(1, &pMoniker, NULL) == S_OK){
IPropertyBag *pPropBag;
hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag,
(void**)(&pPropBag));
if (FAILED(hr)){
pMoniker->Release();
continue;
}
VARIANT varName;
VariantInit(&varName);
hr = pPropBag->Read(L"Description", &varName, 0);
if (FAILED(hr)) hr = pPropBag->Read(L"FriendlyName", &varName, 0);
if (SUCCEEDED(hr)){
hr = pPropBag->Read(L"FriendlyName", &varName, 0);
int count = 0;
int maxLen = sizeof(deviceNames[0])/sizeof(deviceNames[0][0]) - 2;
while( varName.bstrVal[count] != 0x00 && count < maxLen) {
deviceNames[deviceCounter][count] = static_cast<char>(varName.bstrVal[count]);
count++;
}
deviceNames[deviceCounter][count] = 0;
if(!silent)printf("SETUP: %i) %s \n",deviceCounter, deviceNames[deviceCounter]);
}
pPropBag->Release();
pPropBag = NULL;
pMoniker->Release();
pMoniker = NULL;
deviceCounter++;
}
pDevEnum->Release();
pDevEnum = NULL;
pEnum->Release();
pEnum = NULL;
}
if(!silent)printf("SETUP: %i Device(s) found\n\n", deviceCounter);
}
comUnInit();
return deviceCounter;
}
int videoInput::getWidth(int id){
if(isDeviceSetup(id))
{
return VDList[id] ->width;
}
return 0;
}
int videoInput::getHeight(int id){
if(isDeviceSetup(id))
{
return VDList[id] ->height;
}
return 0;
}
int videoInput::getSize(int id){
if(isDeviceSetup(id))
{
return VDList[id] ->videoSize;
}
return 0;
}
bool videoInput::getPixels(int id, unsigned char * dstBuffer, bool flipRedAndBlue, bool flipImage){
bool success = false;
if(isDeviceSetup(id)){
if(bCallback){
DWORD result = WaitForSingleObject(VDList[id]->sgCallback->hEvent, 1000);
if( result != WAIT_OBJECT_0) return false;
EnterCriticalSection(&VDList[id]->sgCallback->critSection);
unsigned char * src = VDList[id]->sgCallback->pixels;
unsigned char * dst = dstBuffer;
int height = VDList[id]->height;
int width = VDList[id]->width;
processPixels(src, dst, width, height, flipRedAndBlue, flipImage);
VDList[id]->sgCallback->newFrame = false;
LeaveCriticalSection(&VDList[id]->sgCallback->critSection);
ResetEvent(VDList[id]->sgCallback->hEvent);
success = true;
}
else{
long bufferSize = VDList[id]->videoSize;
HRESULT hr = VDList[id]->pGrabber->GetCurrentBuffer(&bufferSize, (long *)VDList[id]->pBuffer);
if(hr==S_OK){
int numBytes = VDList[id]->videoSize;
if (numBytes == bufferSize){
unsigned char * src = (unsigned char * )VDList[id]->pBuffer;
unsigned char * dst = dstBuffer;
int height = VDList[id]->height;
int width = VDList[id]->width;
processPixels(src, dst, width, height, flipRedAndBlue, flipImage);
success = true;
}else{
if(verbose)printf("ERROR: GetPixels() - bufferSizes do not match!\n");
}
}else{
if(verbose)printf("ERROR: GetPixels() - Unable to grab frame for device %i\n", id);
}
}
}
return success;
}
unsigned char * videoInput::getPixels(int id, bool flipRedAndBlue, bool flipImage){
if(isDeviceSetup(id)){
getPixels(id, VDList[id]->pixels, flipRedAndBlue, flipImage);
}
return VDList[id]->pixels;
}
bool videoInput::isFrameNew(int id){
if(!isDeviceSetup(id)) return false;
if(!bCallback)return true;
bool result = false;
bool freeze = false;
EnterCriticalSection(&VDList[id]->sgCallback->critSection);
result = VDList[id]->sgCallback->newFrame;
if(VDList[id]->nFramesRunning > 400 && VDList[id]->sgCallback->freezeCheck > VDList[id]->nFramesForReconnect ){
freeze = true;
}
VDList[id]->sgCallback->freezeCheck++;
LeaveCriticalSection(&VDList[id]->sgCallback->critSection);
VDList[id]->nFramesRunning++;
if(freeze && VDList[id]->autoReconnect){
if(verbose)printf("ERROR: Device seems frozen - attempting to reconnect\n");
if( !restartDevice(VDList[id]->myID) ){
if(verbose)printf("ERROR: Unable to reconnect to device\n");
}else{
if(verbose)printf("SUCCESS: Able to reconnect to device\n");
}
}
return result;
}
bool videoInput::isDeviceSetup(int id){
if(idreadyToCapture)return true;
else return false;
}
void __cdecl videoInput::basicThread(void * objPtr){
videoDevice * vd = *( (videoDevice **)(objPtr) );
ShowFilterPropertyPages(vd->pVideoInputFilter);
if(vd->pVideoInputFilter)vd->pVideoInputFilter->Release();
if(vd->pVideoInputFilter)vd->pVideoInputFilter = NULL;
return;
}
void videoInput::showSettingsWindow(int id){
if(isDeviceSetup(id)){
HANDLE myTempThread;
HRESULT hr = getDevice(&VDList[id]->pVideoInputFilter, id, VDList[id]->wDeviceName, VDList[id]->nDeviceName);
if(hr == S_OK){
myTempThread = (HANDLE)_beginthread(basicThread, 0, (void *)&VDList[id]);
}
}
}
bool videoInput::getVideoSettingFilter(int deviceID, long Property, long &min, long &max, long &SteppingDelta, long ¤tValue, long &flags, long &defaultValue){
if( !isDeviceSetup(deviceID) )return false;
HRESULT hr;
bool isSuccessful = false;
videoDevice * VD = VDList[deviceID];
hr = getDevice(&VD->pVideoInputFilter, deviceID, VD->wDeviceName, VD->nDeviceName);
if (FAILED(hr)){
printf("setVideoSetting - getDevice Error\n");
return false;
}
IAMVideoProcAmp *pAMVideoProcAmp = NULL;
hr = VD->pVideoInputFilter->QueryInterface(IID_IAMVideoProcAmp, (void**)&pAMVideoProcAmp);
if(FAILED(hr)){
printf("setVideoSetting - QueryInterface Error\n");
if(VD->pVideoInputFilter)VD->pVideoInputFilter->Release();
if(VD->pVideoInputFilter)VD->pVideoInputFilter = NULL;
return false;
}
if (verbose) printf("Setting video setting %ld.\n", Property);
pAMVideoProcAmp->GetRange(Property, &min, &max, &SteppingDelta, &defaultValue, &flags);
if (verbose) printf("Range for video setting %ld: Min:%ld Max:%ld SteppingDelta:%ld Default:%ld Flags:%ld\n", Property, min, max, SteppingDelta, defaultValue, flags);
pAMVideoProcAmp->Get(Property, ¤tValue, &flags);
if(pAMVideoProcAmp)pAMVideoProcAmp->Release();
if(VD->pVideoInputFilter)VD->pVideoInputFilter->Release();
if(VD->pVideoInputFilter)VD->pVideoInputFilter = NULL;
return true;
}
bool videoInput::setVideoSettingFilterPct(int deviceID, long Property, float pctValue, long Flags){
if( !isDeviceSetup(deviceID) )return false;
long min, max, currentValue, flags, defaultValue, stepAmnt;
if( !getVideoSettingFilter(deviceID, Property, min, max, stepAmnt, currentValue, flags, defaultValue) )return false;
if(pctValue > 1.0)pctValue = 1.0;
else if(pctValue < 0)pctValue = 0.0;
float range = (float)max - (float)min;
if(range <= 0)return false;
if(stepAmnt == 0) return false;
long value = (long)( (float)min + range * pctValue );
long rasterValue = value;
if( range == stepAmnt ){
if( pctValue < 0.5)rasterValue = min;
else rasterValue = max;
}else{
long mod = value % stepAmnt;
double halfStep = stepAmnt * 0.5;
if( mod < halfStep ) rasterValue -= mod;
else rasterValue += stepAmnt - mod;
printf("RASTER - pctValue is %f - value is %i - step is %i - mod is %i - rasterValue is %i\n", pctValue, value, stepAmnt, mod, rasterValue);
}
return setVideoSettingFilter(deviceID, Property, rasterValue, Flags, false);
}
bool videoInput::setVideoSettingFilter(int deviceID, long Property, long lValue, long Flags, bool useDefaultValue){
if( !isDeviceSetup(deviceID) )return false;
HRESULT hr;
bool isSuccessful = false;
videoDevice * VD = VDList[deviceID];
hr = getDevice(&VD->pVideoInputFilter, deviceID, VD->wDeviceName, VD->nDeviceName);
if (FAILED(hr)){
printf("setVideoSetting - getDevice Error\n");
return false;
}
IAMVideoProcAmp *pAMVideoProcAmp = NULL;
hr = VD->pVideoInputFilter->QueryInterface(IID_IAMVideoProcAmp, (void**)&pAMVideoProcAmp);
if(FAILED(hr)){
printf("setVideoSetting - QueryInterface Error\n");
if(VD->pVideoInputFilter)VD->pVideoInputFilter->Release();
if(VD->pVideoInputFilter)VD->pVideoInputFilter = NULL;
return false;
}
if (verbose) printf("Setting video setting %ld.\n", Property);
long CurrVal, Min, Max, SteppingDelta, Default, CapsFlags, AvailableCapsFlags = 0;
pAMVideoProcAmp->GetRange(Property, &Min, &Max, &SteppingDelta, &Default, &AvailableCapsFlags);
if (verbose) printf("Range for video setting %ld: Min:%ld Max:%ld SteppingDelta:%ld Default:%ld Flags:%ld\n", Property, Min, Max, SteppingDelta, Default, AvailableCapsFlags);
pAMVideoProcAmp->Get(Property, &CurrVal, &CapsFlags);
if (verbose) printf("Current value: %ld Flags %ld (%s)\n", CurrVal, CapsFlags, (CapsFlags == 1 ? "Auto" : (CapsFlags == 2 ? "Manual" : "Unknown")));
if (useDefaultValue) {
pAMVideoProcAmp->Set(Property, Default, VideoProcAmp_Flags_Auto);
}
else{
pAMVideoProcAmp->Set(Property, lValue, Flags);
}
if(pAMVideoProcAmp)pAMVideoProcAmp->Release();
if(VD->pVideoInputFilter)VD->pVideoInputFilter->Release();
if(VD->pVideoInputFilter)VD->pVideoInputFilter = NULL;
return true;
}
bool videoInput::setVideoSettingCameraPct(int deviceID, long Property, float pctValue, long Flags){
if( !isDeviceSetup(deviceID) )return false;
long min, max, currentValue, flags, defaultValue, stepAmnt;
if( !getVideoSettingCamera(deviceID, Property, min, max, stepAmnt, currentValue, flags, defaultValue) )return false;
if(pctValue > 1.0)pctValue = 1.0;
else if(pctValue < 0)pctValue = 0.0;
float range = (float)max - (float)min;
if(range <= 0)return false;
if(stepAmnt == 0) return false;
long value = (long)( (float)min + range * pctValue );
long rasterValue = value;
if( range == stepAmnt ){
if( pctValue < 0.5)rasterValue = min;
else rasterValue = max;
}else{
long mod = value % stepAmnt;
double halfStep = stepAmnt * 0.5;
if( mod < halfStep ) rasterValue -= mod;
else rasterValue += stepAmnt - mod;
printf("RASTER - pctValue is %f - value is %i - step is %i - mod is %i - rasterValue is %i\n", pctValue, value, stepAmnt, mod, rasterValue);
}
return setVideoSettingCamera(deviceID, Property, rasterValue, Flags, false);
}
bool videoInput::setVideoSettingCamera(int deviceID, long Property, long lValue, long Flags, bool useDefaultValue){
IAMCameraControl *pIAMCameraControl;
if(isDeviceSetup(deviceID))
{
HRESULT hr;
hr = getDevice(&VDList[deviceID]->pVideoInputFilter, deviceID, VDList[deviceID]->wDeviceName, VDList[deviceID]->nDeviceName);
if (verbose) printf("Setting video setting %ld.\n", Property);
hr = VDList[deviceID]->pVideoInputFilter->QueryInterface(IID_IAMCameraControl, (void**)&pIAMCameraControl);
if (FAILED(hr)) {
printf("Error\n");
return false;
}
else
{
long CurrVal, Min, Max, SteppingDelta, Default, CapsFlags, AvailableCapsFlags;
pIAMCameraControl->GetRange(Property, &Min, &Max, &SteppingDelta, &Default, &AvailableCapsFlags);
if (verbose) printf("Range for video setting %ld: Min:%ld Max:%ld SteppingDelta:%ld Default:%ld Flags:%ld\n", Property, Min, Max, SteppingDelta, Default, AvailableCapsFlags);
pIAMCameraControl->Get(Property, &CurrVal, &CapsFlags);
if (verbose) printf("Current value: %ld Flags %ld (%s)\n", CurrVal, CapsFlags, (CapsFlags == 1 ? "Auto" : (CapsFlags == 2 ? "Manual" : "Unknown")));
if (useDefaultValue) {
pIAMCameraControl->Set(Property, Default, CameraControl_Flags_Auto);
}
else
{
pIAMCameraControl->Set(Property, lValue, Flags);
}
pIAMCameraControl->Release();
return true;
}
}
return false;
}
bool videoInput::getVideoSettingCamera(int deviceID, long Property, long &min, long &max, long &SteppingDelta, long ¤tValue, long &flags, long &defaultValue){
if( !isDeviceSetup(deviceID) )return false;
HRESULT hr;
bool isSuccessful = false;
videoDevice * VD = VDList[deviceID];
hr = getDevice(&VD->pVideoInputFilter, deviceID, VD->wDeviceName, VD->nDeviceName);
if (FAILED(hr)){
printf("setVideoSetting - getDevice Error\n");
return false;
}
IAMCameraControl *pIAMCameraControl = NULL;
hr = VD->pVideoInputFilter->QueryInterface(IID_IAMCameraControl, (void**)&pIAMCameraControl);
if(FAILED(hr)){
printf("setVideoSetting - QueryInterface Error\n");
if(VD->pVideoInputFilter)VD->pVideoInputFilter->Release();
if(VD->pVideoInputFilter)VD->pVideoInputFilter = NULL;
return false;
}
if (verbose) printf("Setting video setting %ld.\n", Property);
pIAMCameraControl->GetRange(Property, &min, &max, &SteppingDelta, &defaultValue, &flags);
if (verbose) printf("Range for video setting %ld: Min:%ld Max:%ld SteppingDelta:%ld Default:%ld Flags:%ld\n", Property, min, max, SteppingDelta, defaultValue, flags);
pIAMCameraControl->Get(Property, ¤tValue, &flags);
if(pIAMCameraControl)pIAMCameraControl->Release();
if(VD->pVideoInputFilter)VD->pVideoInputFilter->Release();
if(VD->pVideoInputFilter)VD->pVideoInputFilter = NULL;
return true;
}
void videoInput::stopDevice(int id){
if(id < VI_MAX_CAMERAS)
{
delete VDList[id];
VDList[id] = new videoDevice();
}
}
bool videoInput::restartDevice(int id){
if(isDeviceSetup(id))
{
int conn = VDList[id]->storeConn;
int tmpW = VDList[id]->width;
int tmpH = VDList[id]->height;
bool bFormat = VDList[id]->specificFormat;
long format = VDList[id]->formatType;
int nReconnect = VDList[id]->nFramesForReconnect;
bool bReconnect = VDList[id]->autoReconnect;
unsigned long avgFrameTime = VDList[id]->requestedFrameTime;
stopDevice(id);
if( avgFrameTime != -1){
VDList[id]->requestedFrameTime = avgFrameTime;
}
if( setupDevice(id, tmpW, tmpH, conn) ){
if( bFormat ){
setFormat(id, format);
}
if( bReconnect ){
setAutoReconnectOnFreeze(id, true, nReconnect);
}
return true;
}
}
return false;
}
videoInput::~videoInput(){
for(int i = 0; i < VI_MAX_CAMERAS; i++)
{
delete VDList[i];
}
comUnInit();
}
bool videoInput::comInit(){
HRESULT hr = NULL;
if(comInitCount == 0 ){
if( VI_COM_MULTI_THREADED ){
hr = CoInitializeEx(NULL,COINIT_MULTITHREADED);
}else{
hr = CoInitialize(NULL);
}
if( hr == RPC_E_CHANGED_MODE){
if(verbose)printf("SETUP - COM already setup - threaded VI might not be possible\n");
}
}
comInitCount++;
return true;
}
bool videoInput::comUnInit(){
if(comInitCount > 0)comInitCount--;
if(comInitCount == 0){
CoUninitialize();
return true;
}
return false;
}
void videoInput::setAttemptCaptureSize(int id, int w, int h){
VDList[id]->tryWidth = w;
VDList[id]->tryHeight = h;
VDList[id]->tryDiffSize = true;
}
void videoInput::setPhyCon(int id, int conn){
switch(conn){
case 0:
VDList[id]->connection = PhysConn_Video_Composite;
break;
case 1:
VDList[id]->connection = PhysConn_Video_SVideo;
break;
case 2:
VDList[id]->connection = PhysConn_Video_Tuner;
break;
case 3:
VDList[id]->connection = PhysConn_Video_USB;
break;
case 4:
VDList[id]->connection = PhysConn_Video_1394;
break;
default:
return;
break;
}
VDList[id]->storeConn = conn;
VDList[id]->useCrossbar = true;
}
bool videoInput::setup(int deviceNumber){
devicesFound = getDeviceCount();
if(deviceNumber>devicesFound-1)
{
if(verbose)printf("SETUP: device[%i] not found - you have %i devices available\n", deviceNumber, devicesFound);
if(devicesFound>=0) if(verbose)printf("SETUP: this means that the last device you can use is device[%i] \n", devicesFound-1);
return false;
}
if(VDList[deviceNumber]->readyToCapture)
{
if(verbose)printf("SETUP: can't setup, device %i is currently being used\n",VDList[deviceNumber]->myID);
return false;
}
HRESULT hr = start(deviceNumber, VDList[deviceNumber]);
if(hr == S_OK)return true;
else return false;
}
void videoInput::processPixels(unsigned char * src, unsigned char * dst, int width, int height, bool bRGB, bool bFlip){
int widthInBytes = width * 3;
int numBytes = widthInBytes * height;
if(!bRGB){
int x = 0;
int y = 0;
if(bFlip){
for(int y = 0; y < height; y++){
memcpy(dst + (y * widthInBytes), src + ( (height -y -1) * widthInBytes), widthInBytes);
}
}else{
memcpy(dst, src, numBytes);
}
}else{
if(bFlip){
int x = 0;
int y = (height - 1) * widthInBytes;
src += y;
for(int i = 0; i < numBytes; i+=3){
if(x >= width){
x = 0;
src -= widthInBytes*2;
}
*dst = *(src+2);
dst++;
*dst = *(src+1);
dst++;
*dst = *src;
dst++;
src+=3;
x++;
}
}
else{
for(int i = 0; i < numBytes; i+=3){
*dst = *(src+2);
dst++;
*dst = *(src+1);
dst++;
*dst = *src;
dst++;
src+=3;
}
}
}
}
void videoInput::getMediaSubtypeAsString(GUID type, char * typeAsString){
static const int maxStr = 8;
char tmpStr[maxStr];
if( type == MEDIASUBTYPE_RGB24) strncpy(tmpStr, "RGB24", maxStr);
else if(type == MEDIASUBTYPE_RGB32) strncpy(tmpStr, "RGB32", maxStr);
else if(type == MEDIASUBTYPE_RGB555)strncpy(tmpStr, "RGB555", maxStr);
else if(type == MEDIASUBTYPE_RGB565)strncpy(tmpStr, "RGB565", maxStr);
else if(type == MEDIASUBTYPE_YUY2) strncpy(tmpStr, "YUY2", maxStr);
else if(type == MEDIASUBTYPE_YVYU) strncpy(tmpStr, "YVYU", maxStr);
else if(type == MEDIASUBTYPE_YUYV) strncpy(tmpStr, "YUYV", maxStr);
else if(type == MEDIASUBTYPE_IYUV) strncpy(tmpStr, "IYUV", maxStr);
else if(type == MEDIASUBTYPE_UYVY) strncpy(tmpStr, "UYVY", maxStr);
else if(type == MEDIASUBTYPE_YV12) strncpy(tmpStr, "YV12", maxStr);
else if(type == MEDIASUBTYPE_YVU9) strncpy(tmpStr, "YVU9", maxStr);
else if(type == MEDIASUBTYPE_Y411) strncpy(tmpStr, "Y411", maxStr);
else if(type == MEDIASUBTYPE_Y41P) strncpy(tmpStr, "Y41P", maxStr);
else if(type == MEDIASUBTYPE_Y211) strncpy(tmpStr, "Y211", maxStr);
else if(type == MEDIASUBTYPE_AYUV) strncpy(tmpStr, "AYUV", maxStr);
else if(type == MEDIASUBTYPE_Y800) strncpy(tmpStr, "Y800", maxStr);
else if(type == MEDIASUBTYPE_Y8) strncpy(tmpStr, "Y8", maxStr);
else if(type == MEDIASUBTYPE_GREY) strncpy(tmpStr, "GREY", maxStr);
else strncpy(tmpStr, "OTHER", maxStr);
memcpy(typeAsString, tmpStr, sizeof(char)*8);
}
void videoInput::setRequestedMediaSubType(int mediatype) {
requestedMediaSubType = mediaSubtypes[mediatype];
}
static void findClosestSizeAndSubtype(videoDevice * VD, int widthIn, int heightIn, int &widthOut, int &heightOut, GUID & mediatypeOut){
HRESULT hr;
int nearW = 9999999;
int nearH = 9999999;
bool foundClosestMatch = true;
int iCount = 0;
int iSize = 0;
hr = VD->streamConf->GetNumberOfCapabilities(&iCount, &iSize);
if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))
{
for (int iFormat = 0; iFormat < iCount; iFormat++)
{
VIDEO_STREAM_CONFIG_CAPS scc;
AM_MEDIA_TYPE *pmtConfig;
hr = VD->streamConf->GetStreamCaps(iFormat, &pmtConfig, (BYTE*)&scc);
if (SUCCEEDED(hr)){
int stepX = scc.OutputGranularityX;
int stepY = scc.OutputGranularityY;
int tempW = 999999;
int tempH = 999999;
if(stepX < 1 || stepY < 1) continue;
bool exactMatch = false;
bool exactMatchX = false;
bool exactMatchY = false;
for(int x = scc.MinOutputSize.cx; x <= scc.MaxOutputSize.cx; x+= stepX){
if( widthIn == x ){
exactMatchX = true;
tempW = x;
}
else if( abs(widthIn-x) < abs(widthIn-tempW) ){
tempW = x;
}
}
for(int y = scc.MinOutputSize.cy; y <= scc.MaxOutputSize.cy; y+= stepY){
if( heightIn == y){
exactMatchY = true;
tempH = y;
}
else if( abs(heightIn-y) < abs(heightIn-tempH) ){
tempH = y;
}
}
if(exactMatchX && exactMatchY){
foundClosestMatch = false;
exactMatch = true;
widthOut = widthIn;
heightOut = heightIn;
mediatypeOut = pmtConfig->subtype;
}
else if( abs(widthIn - tempW) + abs(heightIn - tempH) < abs(widthIn - nearW) + abs(heightIn - nearH) )
{
nearW = tempW;
nearH = tempH;
widthOut = nearW;
heightOut = nearH;
mediatypeOut = pmtConfig->subtype;
}
MyDeleteMediaType(pmtConfig);
if(exactMatch)break;
}
}
}
}
static bool setSizeAndSubtype(videoDevice * VD, int attemptWidth, int attemptHeight, GUID mediatype){
VIDEOINFOHEADER *pVih = reinterpret_cast(VD->pAmMediaType->pbFormat);
int tmpWidth = HEADER(pVih)->biWidth;
int tmpHeight = HEADER(pVih)->biHeight;
AM_MEDIA_TYPE * tmpType = NULL;
HRESULT hr = VD->streamConf->GetFormat(&tmpType);
if(hr != S_OK)return false;
HEADER(pVih)->biWidth = attemptWidth;
HEADER(pVih)->biHeight = attemptHeight;
VD->pAmMediaType->formattype = FORMAT_VideoInfo;
VD->pAmMediaType->majortype = MEDIATYPE_Video;
VD->pAmMediaType->subtype = mediatype;
VD->pAmMediaType->lSampleSize = attemptWidth*attemptHeight*3;
if( VD->requestedFrameTime != -1){
pVih->AvgTimePerFrame = VD->requestedFrameTime;
}
hr = VD->streamConf->SetFormat(VD->pAmMediaType);
if(hr == S_OK){
if( tmpType != NULL )MyDeleteMediaType(tmpType);
return true;
}else{
VD->streamConf->SetFormat(tmpType);
if( tmpType != NULL )MyDeleteMediaType(tmpType);
}
return false;
}
int videoInput::start(int deviceID, videoDevice *VD){
HRESULT hr = NULL;
VD->myID = deviceID;
VD->setupStarted = true;
CAPTURE_MODE = PIN_CATEGORY_CAPTURE;
callbackSetCount = 1;
if(verbose)printf("SETUP: Setting up device %i\n",deviceID);
hr = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2, (void **)&VD->pCaptureGraph);
if (FAILED(hr))
{
if(verbose)printf("ERROR - Could not create the Filter Graph Manager\n");
return hr;
}
hr = CoCreateInstance(CLSID_FilterGraph, 0, CLSCTX_INPROC_SERVER,IID_IGraphBuilder, (void**)&VD->pGraph);
if (FAILED(hr))
{
if(verbose)printf("ERROR - Could not add the graph builder!\n");
stopDevice(deviceID);
return hr;
}
hr = VD->pCaptureGraph->SetFiltergraph(VD->pGraph);
if (FAILED(hr))
{
if(verbose)printf("ERROR - Could not set filtergraph\n");
stopDevice(deviceID);
return hr;
}
hr = VD->pGraph->QueryInterface(IID_IMediaControl, (void **)&VD->pControl);
if (FAILED(hr))
{
if(verbose)printf("ERROR - Could not create the Media Control object\n");
stopDevice(deviceID);
return hr;
}
hr = getDevice(&VD->pVideoInputFilter, deviceID, VD->wDeviceName, VD->nDeviceName);
if (SUCCEEDED(hr)){
if(verbose)printf("SETUP: %s\n", VD->nDeviceName);
hr = VD->pGraph->AddFilter(VD->pVideoInputFilter, VD->wDeviceName);
}else{
if(verbose)printf("ERROR - Could not find specified video device\n");
stopDevice(deviceID);
return hr;
}
IAMStreamConfig *streamConfTest = NULL;
hr = VD->pCaptureGraph->FindInterface(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, VD->pVideoInputFilter, IID_IAMStreamConfig, (void **)&streamConfTest);
if(FAILED(hr)){
if(verbose)printf("SETUP: Couldn't find preview pin using SmartTee\n");
}else{
CAPTURE_MODE = PIN_CATEGORY_PREVIEW;
streamConfTest->Release();
streamConfTest = NULL;
}
if(VD->useCrossbar)
{
if(verbose)printf("SETUP: Checking crossbar\n");
routeCrossbar(&VD->pCaptureGraph, &VD->pVideoInputFilter, VD->connection, CAPTURE_MODE);
}
hr = VD->pCaptureGraph->FindInterface(&CAPTURE_MODE, &MEDIATYPE_Video, VD->pVideoInputFilter, IID_IAMStreamConfig, (void **)&VD->streamConf);
if(FAILED(hr)){
if(verbose)printf("ERROR: Couldn't config the stream!\n");
stopDevice(deviceID);
return hr;
}
hr = VD->streamConf->GetFormat(&VD->pAmMediaType);
if(FAILED(hr)){
if(verbose)printf("ERROR: Couldn't getFormat for pAmMediaType!\n");
stopDevice(deviceID);
return hr;
}
VIDEOINFOHEADER *pVih = reinterpret_cast(VD->pAmMediaType->pbFormat);
int currentWidth = HEADER(pVih)->biWidth;
int currentHeight = HEADER(pVih)->biHeight;
bool customSize = VD->tryDiffSize;
bool foundSize = false;
if(customSize){
if(verbose) printf("SETUP: Default Format is set to %i by %i \n", currentWidth, currentHeight);
char guidStr[8];
getMediaSubtypeAsString(requestedMediaSubType, guidStr);
if(verbose)printf("SETUP: trying requested format %s @ %i by %i\n", guidStr, VD->tryWidth, VD->tryHeight);
if( setSizeAndSubtype(VD, VD->tryWidth, VD->tryHeight, requestedMediaSubType) ) {
VD->setSize(VD->tryWidth, VD->tryHeight);
foundSize = true;
}
if (!foundSize) {
for(int i = 0; i < VI_NUM_TYPES; i++){
getMediaSubtypeAsString(mediaSubtypes[i], guidStr);
if(verbose)printf("SETUP: trying format %s @ %i by %i\n", guidStr, VD->tryWidth, VD->tryHeight);
if( setSizeAndSubtype(VD, VD->tryWidth, VD->tryHeight, mediaSubtypes[i]) ){
VD->setSize(VD->tryWidth, VD->tryHeight);
foundSize = true;
break;
}
}
}
if( foundSize == false ){
if( verbose )printf("SETUP: couldn't find requested size - searching for closest matching size\n");
int closestWidth = -1;
int closestHeight = -1;
GUID newMediaSubtype;
findClosestSizeAndSubtype(VD, VD->tryWidth, VD->tryHeight, closestWidth, closestHeight, newMediaSubtype);
if( closestWidth != -1 && closestHeight != -1){
getMediaSubtypeAsString(newMediaSubtype, guidStr);
if(verbose)printf("SETUP: closest supported size is %s @ %i %i\n", guidStr, closestWidth, closestHeight);
if( setSizeAndSubtype(VD, closestWidth, closestHeight, newMediaSubtype) ){
VD->setSize(closestWidth, closestHeight);
foundSize = true;
}
}
}
}
if(customSize == false || foundSize == false){
if( VD->requestedFrameTime != -1 ){
pVih->AvgTimePerFrame = VD->requestedFrameTime;
hr = VD->streamConf->SetFormat(VD->pAmMediaType);
}
VD->setSize(currentWidth, currentHeight);
}
hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER,IID_IBaseFilter, (void**)&VD->pGrabberF);
if (FAILED(hr)){
if(verbose)printf("Could not Create Sample Grabber - CoCreateInstance()\n");
stopDevice(deviceID);
return hr;
}
hr = VD->pGraph->AddFilter(VD->pGrabberF, L"Sample Grabber");
if (FAILED(hr)){
if(verbose)printf("Could not add Sample Grabber - AddFilter()\n");
stopDevice(deviceID);
return hr;
}
hr = VD->pGrabberF->QueryInterface(IID_ISampleGrabber, (void**)&VD->pGrabber);
if (FAILED(hr)){
if(verbose)printf("ERROR: Could not query SampleGrabber\n");
stopDevice(deviceID);
return hr;
}
hr = VD->pGrabber->SetOneShot(FALSE);
if(bCallback){
hr = VD->pGrabber->SetBufferSamples(FALSE);
}else{
hr = VD->pGrabber->SetBufferSamples(TRUE);
}
if(bCallback){
hr = VD->pGrabber->SetCallback(VD->sgCallback, 0);
if (FAILED(hr)){
if(verbose)printf("ERROR: problem setting callback\n");
stopDevice(deviceID);
return hr;
}else{
if(verbose)printf("SETUP: Capture callback set\n");
}
}
AM_MEDIA_TYPE mt;
ZeroMemory(&mt,sizeof(AM_MEDIA_TYPE));
mt.majortype = MEDIATYPE_Video;
mt.subtype = MEDIASUBTYPE_RGB24;
mt.formattype = FORMAT_VideoInfo;
hr = VD->pGrabber->SetMediaType(&mt);
if(VD->streamConf){
VD->streamConf->Release();
VD->streamConf = NULL;
}else{
if(verbose)printf("ERROR: connecting device - prehaps it is already being used?\n");
stopDevice(deviceID);
return S_FALSE;
}
hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)(&VD->pDestFilter));
if (FAILED(hr)){
if(verbose)printf("ERROR: Could not create filter - NullRenderer\n");
stopDevice(deviceID);
return hr;
}
hr = VD->pGraph->AddFilter(VD->pDestFilter, L"NullRenderer");
if (FAILED(hr)){
if(verbose)printf("ERROR: Could not add filter - NullRenderer\n");
stopDevice(deviceID);
return hr;
}
hr = VD->pCaptureGraph->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, VD->pVideoInputFilter, VD->pGrabberF, VD->pDestFilter);
if (FAILED(hr)){
if(verbose)printf("ERROR: Could not connect pins - RenderStream()\n");
stopDevice(deviceID);
return hr;
}
{
IMediaFilter *pMediaFilter = 0;
hr = VD->pGraph->QueryInterface(IID_IMediaFilter, (void**)&pMediaFilter);
if (FAILED(hr)){
if(verbose)printf("ERROR: Could not get IID_IMediaFilter interface\n");
}else{
pMediaFilter->SetSyncSource(NULL);
pMediaFilter->Release();
}
}
hr = VD->pControl->Run();
if (FAILED(hr)){
if(verbose)printf("ERROR: Could not start graph\n");
stopDevice(deviceID);
return hr;
}
if(!bCallback){
long bufferSize = VD->videoSize;
while( hr != S_OK){
hr = VD->pGrabber->GetCurrentBuffer(&bufferSize, (long *)VD->pBuffer);
Sleep(10);
}
}
if(verbose)printf("SETUP: Device is setup and ready to capture.\n\n");
VD->readyToCapture = true;
VD->pVideoInputFilter->Release();
VD->pVideoInputFilter = NULL;
VD->pGrabberF->Release();
VD->pGrabberF = NULL;
VD->pDestFilter->Release();
VD->pDestFilter = NULL;
return S_OK;
}
int videoInput::getDeviceCount(){
ICreateDevEnum *pDevEnum = NULL;
IEnumMoniker *pEnum = NULL;
int deviceCounter = 0;
HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL,
CLSCTX_INPROC_SERVER, IID_ICreateDevEnum,
reinterpret_cast<void**>(&pDevEnum));
if (SUCCEEDED(hr))
{
hr = pDevEnum->CreateClassEnumerator(
CLSID_VideoInputDeviceCategory,
&pEnum, 0);
if(hr == S_OK){
IMoniker *pMoniker = NULL;
while (pEnum->Next(1, &pMoniker, NULL) == S_OK){
IPropertyBag *pPropBag;
hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag,
(void**)(&pPropBag));
if (FAILED(hr)){
pMoniker->Release();
continue;
}
pPropBag->Release();
pPropBag = NULL;
pMoniker->Release();
pMoniker = NULL;
deviceCounter++;
}
pEnum->Release();
pEnum = NULL;
}
pDevEnum->Release();
pDevEnum = NULL;
}
return deviceCounter;
}
HRESULT videoInput::getDevice(IBaseFilter** gottaFilter, int deviceId, WCHAR * wDeviceName, char * nDeviceName){
BOOL done = false;
int deviceCounter = 0;
ICreateDevEnum *pSysDevEnum = NULL;
HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void **)&pSysDevEnum);
if (FAILED(hr))
{
return hr;
}
IEnumMoniker *pEnumCat = NULL;
hr = pSysDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumCat, 0);
if (hr == S_OK)
{
IMoniker *pMoniker = NULL;
ULONG cFetched;
while ((pEnumCat->Next(1, &pMoniker, &cFetched) == S_OK) && (!done))
{
if(deviceCounter == deviceId)
{
IPropertyBag *pPropBag;
hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag);
if (SUCCEEDED(hr))
{
VARIANT varName;
VariantInit(&varName);
hr = pPropBag->Read(L"FriendlyName", &varName, 0);
if (SUCCEEDED(hr))
{
int count = 0;
while( varName.bstrVal[count] != 0x00 ) {
wDeviceName[count] = varName.bstrVal[count];
nDeviceName[count] = (char)varName.bstrVal[count];
count++;
}
hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, (void**)gottaFilter);
done = true;
}
VariantClear(&varName);
pPropBag->Release();
pPropBag = NULL;
pMoniker->Release();
pMoniker = NULL;
}
}
deviceCounter++;
}
pEnumCat->Release();
pEnumCat = NULL;
}
pSysDevEnum->Release();
pSysDevEnum = NULL;
if (done) {
return hr;
} else {
return VFW_E_NOT_FOUND;
}
}
HRESULT videoInput::ShowFilterPropertyPages(IBaseFilter *pFilter){
ISpecifyPropertyPages *pProp;
HRESULT hr = pFilter->QueryInterface(IID_ISpecifyPropertyPages, (void **)&pProp);
if (SUCCEEDED(hr))
{
FILTER_INFO FilterInfo;
hr = pFilter->QueryFilterInfo(&FilterInfo);
IUnknown *pFilterUnk;
pFilter->QueryInterface(IID_IUnknown, (void **)&pFilterUnk);
CAUUID caGUID;
pProp->GetPages(&caGUID);
pProp->Release();
OleCreatePropertyFrame(
NULL,
0, 0,
FilterInfo.achName,
1,
&pFilterUnk,
caGUID.cElems,
caGUID.pElems,
0,
0, NULL
);
if(pFilterUnk)pFilterUnk->Release();
if(FilterInfo.pGraph)FilterInfo.pGraph->Release();
CoTaskMemFree(caGUID.pElems);
}
return hr;
}
HRESULT videoInput::SaveGraphFile(IGraphBuilder *pGraph, WCHAR *wszPath) {
const WCHAR wszStreamName[] = L"ActiveMovieGraph";
HRESULT hr;
IStorage *pStorage = NULL;
hr = StgCreateDocfile(
wszPath,
STGM_CREATE | STGM_TRANSACTED | STGM_READWRITE | STGM_SHARE_EXCLUSIVE,
0, &pStorage);
if(FAILED(hr))
{
return hr;
}
IStream *pStream;
hr = pStorage->CreateStream(
wszStreamName,
STGM_WRITE | STGM_CREATE | STGM_SHARE_EXCLUSIVE,
0, 0, &pStream);
if (FAILED(hr))
{
pStorage->Release();
return hr;
}
IPersistStream *pPersist = NULL;
pGraph->QueryInterface(IID_IPersistStream, reinterpret_cast<void**>(&pPersist));
hr = pPersist->Save(pStream, TRUE);
pStream->Release();
pPersist->Release();
if (SUCCEEDED(hr))
{
hr = pStorage->Commit(STGC_DEFAULT);
}
pStorage->Release();
return hr;
}
HRESULT videoInput::routeCrossbar(ICaptureGraphBuilder2 **ppBuild, IBaseFilter **pVidInFilter, int conType, GUID captureMode){
ICaptureGraphBuilder2 *pBuild = NULL;
pBuild = *ppBuild;
IBaseFilter *pVidFilter = NULL;
pVidFilter = * pVidInFilter;
IAMCrossbar *pXBar1 = NULL;
HRESULT hr = pBuild->FindInterface(&LOOK_UPSTREAM_ONLY, NULL, pVidFilter,
IID_IAMCrossbar, (void**)&pXBar1);
if (SUCCEEDED(hr))
{
bool foundDevice = false;
if(verbose)printf("SETUP: You are not a webcam! Setting Crossbar\n");
pXBar1->Release();
IAMCrossbar *Crossbar;
hr = pBuild->FindInterface(&captureMode, &MEDIATYPE_Interleaved, pVidFilter, IID_IAMCrossbar, (void **)&Crossbar);
if(hr != NOERROR){
hr = pBuild->FindInterface(&captureMode, &MEDIATYPE_Video, pVidFilter, IID_IAMCrossbar, (void **)&Crossbar);
}
LONG lInpin, lOutpin;
hr = Crossbar->get_PinCounts(&lOutpin , &lInpin);
BOOL IPin=TRUE; LONG pIndex=0 , pRIndex=0 , pType=0;
while( pIndex < lInpin)
{
hr = Crossbar->get_CrossbarPinInfo( IPin , pIndex , &pRIndex , &pType);
if( pType == conType){
if(verbose)printf("SETUP: Found Physical Interface");
switch(conType){
case PhysConn_Video_Composite:
if(verbose)printf(" - Composite\n");
break;
case PhysConn_Video_SVideo:
if(verbose)printf(" - S-Video\n");
break;
case PhysConn_Video_Tuner:
if(verbose)printf(" - Tuner\n");
break;
case PhysConn_Video_USB:
if(verbose)printf(" - USB\n");
break;
case PhysConn_Video_1394:
if(verbose)printf(" - Firewire\n");
break;
}
foundDevice = true;
break;
}
pIndex++;
}
if(foundDevice){
BOOL OPin=FALSE; LONG pOIndex=0 , pORIndex=0 , pOType=0;
while( pOIndex < lOutpin)
{
hr = Crossbar->get_CrossbarPinInfo( OPin , pOIndex , &pORIndex , &pOType);
if( pOType == PhysConn_Video_VideoDecoder)
break;
}
Crossbar->Route(pOIndex,pIndex);
}else{
if(verbose)printf("SETUP: Didn't find specified Physical Connection type. Using Defualt. \n");
}
if(pXBar1)pXBar1->Release();
if(pXBar1)pXBar1 = NULL;
}else{
if(verbose)printf("SETUP: You are a webcam or snazzy firewire cam! No Crossbar needed\n");
return hr;
}
return hr;
}
frameProcessor.h
#ifndef FRAMEPROCESSOR_H
#define FRAMEPROCESSOR_H
#include
class FrameProcessor
{
public:
virtual void process(cv::Mat &input, cv::Mat &output) = 0;
};
#endif // FRAMEPROCESSOR_H
videoProcessor.h
#ifndef VIDEOPROCESSOR_H
#define VIDEOPROCESSOR_H
#include
#include
#include "frameProcessor.h"
using namespace std;
using namespace cv;
class VideoProcessor
{
private:
cv::VideoCapture capture;
void(*process)(cv::Mat&, cv::Mat&);
FrameProcessor *frameProcessor;
bool callIt;
std::string windowNameInput;
std::string windowNameOutput;
int delay;
long fnumber;
long frameToStop;
bool stop;
std::vector<std::string> images;
std::vector<std::string>::const_iterator itImg;
bool readNextFrame(cv::Mat &frame)
{
if (images.size() == 0)
return capture.read(frame);
else {
if (itImg != images.end())
{
frame = cv::imread(*itImg);
itImg++;
return frame.data != 0;
}
}
}
public:
VideoProcessor() : callIt(false), delay(-1),
fnumber(0), stop(false),
process(0), frameProcessor(0) {}
void displayInput(std::string wt);
void displayOutput(std::string wn);
void dontDisplay();
bool setInput(std::string filename);
bool setInput(int id);
bool setInput(const std::vector<std::string>& imgs);
void setDelay(int d);
double getFrameRate();
void callProcess();
void dontCallProcess();
void setFrameProcessor(FrameProcessor* frameProcessorPtr);
void setFrameProcessor(void(*frameProcessingCallback)(cv::Mat&, cv::Mat&));
void stopIt();
bool isStopped();
bool isOpened();
long getFrameNumber();
void run();
};
#endif
videoProcessor.cpp
#include "videoProcessor.h"
void VideoProcessor::displayInput(std::string wt)
{
windowNameInput = wt;
cv::namedWindow(windowNameInput);
}
void VideoProcessor::displayOutput(std::string wn)
{
windowNameOutput = wn;
cv::namedWindow(windowNameOutput);
}
void VideoProcessor::dontDisplay()
{
cv::destroyWindow(windowNameInput);
cv::destroyWindow(windowNameOutput);
windowNameInput.clear();
windowNameOutput.clear();
}
bool VideoProcessor::setInput(std::string filename)
{
fnumber = 0;
capture.release();
images.clear();
return capture.open(filename);
}
bool VideoProcessor::setInput(int id)
{
fnumber = 0;
capture.release();
images.clear();
return capture.open(id);
}
bool VideoProcessor::setInput(const std::vector<std::string>& imgs)
{
fnumber = 0;
capture.release();
images = imgs;
itImg = images.begin();
return true;
}
void VideoProcessor::setDelay(int d)
{
delay = d;
}
double VideoProcessor::getFrameRate()
{
if (images.size() != 0) return 25.0;
double r = capture.get(CV_CAP_PROP_FPS);
return r;
}
void VideoProcessor::callProcess()
{
callIt = true;
}
void VideoProcessor::dontCallProcess()
{
callIt = false;
}
void VideoProcessor::setFrameProcessor(FrameProcessor* frameProcessorPtr)
{
process = 0;
frameProcessor = frameProcessorPtr;
callProcess();
}
void VideoProcessor::setFrameProcessor(void(*frameProcessingCallback)(cv::Mat&, cv::Mat&))
{
frameProcessor = 0;
process = frameProcessingCallback;
callProcess();
}
void VideoProcessor::stopIt()
{
stop = true;
}
bool VideoProcessor::isStopped()
{
return stop;
}
bool VideoProcessor::isOpened()
{
return capture.isOpened() || !images.empty();
}
long VideoProcessor::getFrameNumber()
{
if (images.size() == 0)
{
long f = static_cast<long>(capture.get(CV_CAP_PROP_POS_FRAMES));
return f;
}
else
{
return static_cast<long>(images.end() - images.begin());
}
}
void VideoProcessor::run()
{
cv::Mat frame;
cv::Mat output;
if (!isOpened())
{
cout << "Error!"<<"打开预处理的视频或序列文件或设备失败!"<return;
}
stop = false;
while (!isStopped())
{
if (!readNextFrame(frame))
break;
if (windowNameInput.length() != 0)
cv::imshow(windowNameInput, frame);
if (callIt)
{
if (process)
process(frame, output);
else if (frameProcessor)
frameProcessor->process(frame, output);
fnumber++;
}
else
{
output = frame;
}
if (windowNameOutput.length() != 0)
cv::imshow(windowNameOutput, output);
if (delay >= 0 && cv::waitKey(delay) >= 0)
stopIt();
}
}