/*
* WengoPhone, a voice over Internet phone
* Copyright (C) 2004-2006 Wengo
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
//#include "IWebcamDriver.h"
#include "DirectXWebcamDriver.h"
#include "pixertool/directx-pixertool.h"
//#include
//#include
#include
#pragma comment(lib, "comsupp.lib")
//#include
using namespace std;
#define SAFE_RELEASE_POINTER(x) { if (x) x->Release(); x = NULL; }
#define SAFE_RELEASE(x) { if (x) x.Release(); x = NULL; }
//CComBSTR aaa("aaa");
HRESULT AddDevicesToList(HWND hlist)
{
HRESULT hr;
//create an enumerator
CComPtr
pCreateDevEnum.CoCreateInstance(CLSID_SystemDeviceEnum);
if ( !pCreateDevEnum)
{
return S_FALSE;
}
//enumerate video capture devices
CComPtr
pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumMoniker, 0);
if (!pEnumMoniker)
{
return S_FALSE;
}
pEnumMoniker->Reset();
//go through and find all video capture device(s)
ULONG cFetched;
IMoniker *pM;
while (hr = pEnumMoniker->Next(1, &pM, &cFetched), hr == S_OK)
{
CComPtr
hr = pM->BindToStorage( 0,0, IID_IPropertyBag, (void **)&pBag);
if (SUCCEEDED (hr))
{
VARIANT var;
var.vt = VT_BSTR;
hr =pBag->Read( L"FriendlyName", &var, NULL);
if ( hr == NOERROR)
{
USES_CONVERSION;
if (hlist)
::SendMessage(hlist, CB_ADDSTRING, 0, reinterpret_cast
SysFreeString( var.bstrVal);
}
}
pM->Release();
}
if (hlist)
::SendMessage(hlist, CB_SETCURSEL, 0, 0);
return S_OK;
}
HRESULT AddDevicesToList(std::list
{
HRESULT hr;
std::wstring strCamDevice =L"";
//create an enumerator
CComPtr
pCreateDevEnum.CoCreateInstance(CLSID_SystemDeviceEnum);
if ( !pCreateDevEnum)
{
return S_FALSE;
}
//enumerate video capture devices
CComPtr
pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumMoniker, 0);
if (!pEnumMoniker)
{
return S_FALSE;
}
pEnumMoniker->Reset();
//go through and find all video capture device(s)
ULONG cFetched;
IMoniker *pM;
while (hr = pEnumMoniker->Next(1, &pM, &cFetched), hr == S_OK)
{
CComPtr
hr = pM->BindToStorage( 0,0, IID_IPropertyBag, (void **)&pBag);
if (SUCCEEDED (hr))
{
VARIANT var;
var.vt = VT_BSTR;
hr =pBag->Read( L"FriendlyName", &var, NULL);
if ( hr == NOERROR)
{
USES_CONVERSION;
strCamDevice= (LPCTSTR)W2T(var.bstrVal);
SysFreeString( var.bstrVal);
cam_list.push_back(strCamDevice);
}
}
pM->Release();
}
return S_OK;
}
HRESULT FindMyCaptureDevice (IBaseFilter **pF, BSTR bstrName)
{
HRESULT hr= E_FAIL;
CComPtr< IBaseFilter > pFilter;
CComPtr
CComPtr
//create the system enumerator
pSysDevEnum.CoCreateInstance (CLSID_SystemDeviceEnum);
if (!pSysDevEnum)
{
return E_FAIL;
}
//Obtain a class enumerator for the video compressor category
pSysDevEnum->CreateClassEnumerator (CLSID_VideoInputDeviceCategory, &pEnumCat, 0);
if (!pEnumCat )
{
return E_FAIL;
}
pEnumCat->Reset();
while (true) {
CComPtr
ULONG cFetched;
CComPtr
HRESULT hr_work = pEnumCat->Next (1, &pMoniker, &cFetched);
if (hr_work != S_OK)
{
break;
}
hr = pMoniker->BindToStorage (0, 0, IID_IPropertyBag, (void **)&pProp);
if (hr != S_OK)
{
continue;
}
VARIANT varName;
VariantInit( &varName);
hr = pProp->Read (L"FriendlyName", &varName, 0);
if (SUCCEEDED (hr) && wcscmp (bstrName, varName.bstrVal) == 0) {
hr = pMoniker->BindToObject (0, 0, IID_IBaseFilter, (void **)& pFilter);
break;
}
VariantClear( &varName);
pMoniker = NULL; //Release for the next loop;
}
if ( pFilter) {
*pF = pFilter;
(*pF)->AddRef(); //add ref on the way out.
}
return hr;
}
IAMStreamConfig *GetIAMStreamConfig (IBaseFilter * pFilter)
{
IEnumPins *pEnum= NULL;
HRESULT hr = pFilter->EnumPins(&pEnum);
if ( FAILED (hr))
return NULL;
IPin *pPin= NULL;
while (pEnum->Next(1, &pPin, NULL) == S_OK)
{
IAMStreamConfig *pIAMS = NULL;
hr = pPin->QueryInterface (IID_IAMStreamConfig, (void **)&pIAMS);
if ( SUCCEEDED (hr) )
{
return pIAMS;
}
pPin->Release();
}
pEnum->Release();
return NULL;
}
DirectXWebcamDriver::DirectXWebcamDriver(int flags,int nWidth,int nHeight)
: IWebcamDriver(flags)
{
Init();
_desiredWidth= nWidth;
_desiredHeight = nHeight;
_hSharedMemoryReadThread = NULL;
_hSharedMemoryReadFile = NULL;
_hSharedMemoryWriteFile = NULL;
_hSharedMemoryLock = NULL;
_isStartCaptured = FALSE;
}
DirectXWebcamDriver::~DirectXWebcamDriver()
{
UnInit();
}
void DirectXWebcamDriver::Init()
{
//FIXME does not work because of Qt4.1.2
//CoInitializeEx(NULL, COINIT_MULTITHREADED);
CoInitialize(NULL);
_isOpen = false;
_isCaptureGraphBuild = false;
_isRunning =false;
//_webcamDriver = driver;
_pGrabberF = NULL;
_pGrabber = NULL;
_pNull = NULL;
_iam = NULL;
_convImage = NULL;
//_desiredPalette = PIX_OSI_YUV420P;
_desiredPalette = PIX_OSI_RGB24;
_cachedWidth = 0;
_cachedHeight = 0;
_cachedFPS = 10;
_forcedFPS = 10;
_fpsTimerLast = 0;
_convFlags = PIX_NO_FLAG;
initializeConvImage();
_cb_cbk = NULL;
_cb_udata = NULL;
}
void DirectXWebcamDriver::UnInit()
{
stopCapture();
if( _hSharedMemoryWriteFile ) //共享写文件有效
{
if (_isRunning && _isCaptureGraphBuild && _pGraph) //不在运行,可能已经停止//不在捕获视频//无效的graph
{
//if ( !_isCaptureGraphBuild)
// return;
//if (!_pGraph) {
// return;
//}
CComQIPtr< IMediaControl, &IID_IMediaControl > pControl = _pGraph;
HRESULT hr = pControl->Stop();
if (hr != S_OK) {//停止不成功
//LOG_ERROR("Could not stop capture");
//return;
}
}
_isRunning = false;
}
cleanup();
CoUninitialize();
if (_convImage)
{
pix_free(_convImage);
_convImage = 0;
}
if( _hSharedMemoryReadFile ) //共享内存读文件关闭
{
CloseHandle( _hSharedMemoryReadFile );
_hSharedMemoryReadFile = NULL;
}
if( _hSharedMemoryWriteFile ) //共享内存写文件关闭
{
//通知接受这,这里要关闭了
WaitForSingleObject( _hSharedMemoryLock , INFINITE );
PSHAREDMEMORY lpView = (PSHAREDMEMORY)MapViewOfFile( _hSharedMemoryWriteFile, FILE_MAP_READ | FILE_MAP_WRITE, 0, 0, 0);//将内存映射文件的一个视图映射到当前的地址空间
if((int*)lpView != NULL)
{
lpView->isStoped = 1;
UnmapViewOfFile((LPVOID) lpView);
}
//解锁
ReleaseMutex( _hSharedMemoryLock );
CloseHandle( _hSharedMemoryWriteFile );
_hSharedMemoryWriteFile = NULL;
}
if( _hSharedMemoryLock )
{
CloseHandle( _hSharedMemoryLock );
_hSharedMemoryLock = NULL;
}
if( _hSharedMemoryReadThread )
{
CloseHandle( _hSharedMemoryReadThread );
_hSharedMemoryReadThread = NULL;
}
}
void DirectXWebcamDriver::cleanup()
{
_isRunning = false;
_isCaptureGraphBuild = false;
_isOpen = false;
_desiredPalette = PIX_OSI_RGB24;
_cachedWidth = 0;
_cachedHeight = 0;
_cachedFPS = 10;
_forcedFPS = 10;
_fpsTimerLast = 0;
_cachedPalette = PIX_OSI_UNSUPPORTED;
SAFE_RELEASE_POINTER(_pGrabberF);
SAFE_RELEASE_POINTER(_pGrabber);
SAFE_RELEASE_POINTER(_pNull);
SAFE_RELEASE_POINTER(_iam);
SAFE_RELEASE(_pGraph);
SAFE_RELEASE(_pCap);
SAFE_RELEASE(_pBuild);
}
//StringList DirectXWebcamDriver::getDeviceList() {
// StringList deviceList;
//
// //create an enumerator
// CComPtr< ICreateDevEnum > pCreateDevEnum;
// pCreateDevEnum.CoCreateInstance(CLSID_SystemDeviceEnum);
// if (!pCreateDevEnum) {
// return deviceList;
// }
//
// //enumerate video capture devices
// CComPtr< IEnumMoniker > pEnumMoniker;
// pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumMoniker, 0);
// if (!pEnumMoniker) {
// return deviceList;
// }
//
// pEnumMoniker->Reset();
// //go through and find all video capture device(s)
// while (true) {
// CComPtr< IMoniker > pMoniker;
// HRESULT hr = pEnumMoniker->Next(1, &pMoniker, 0);
// if (hr != S_OK) {
// break;
// }
//
// //get the property bag for this moniker
// CComPtr< IPropertyBag > pPropertyBag;
// hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void**) &pPropertyBag);
// if (hr != S_OK) {
// continue;
// }
//
// //ask for the english-readable name
// CComVariant FriendlyName;
// CComVariant DevicePath;
// hr = pPropertyBag->Read(L"FriendlyName", &FriendlyName, NULL);
// hr = pPropertyBag->Read(L"DevicePath", &DevicePath, NULL);
// if (hr != S_OK) {
// continue;
// }
//
// std::string deviceName;
// if (((string) _bstr_t(DevicePath)).find("pci") == string::npos) {
// deviceName = (string) _bstr_t(FriendlyName);
// deviceList += deviceName;
// }
//
// /* TODO: do we still use this variable? see lib video in classic.
// else if (pci_device) {
// deviceName = (string) _bstr_t(FriendlyName);
// deviceList += deviceName;
// }
// */
// }
//
// return deviceList;
//}
std::wstring DirectXWebcamDriver::getDefaultDevice()
{
std::wstring defaultDevice =L"";
//create an enumerator
CComPtr< ICreateDevEnum > pCreateDevEnum;
pCreateDevEnum.CoCreateInstance(CLSID_SystemDeviceEnum);
//_ASSERTE(pCreateDevEnum);
if (!pCreateDevEnum) {
return defaultDevice;
}
//enumerate video capture devices
CComPtr< IEnumMoniker > pEm;
pCreateDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEm, 0);
//_ASSERTE(pEm);
if (!pEm) {
return defaultDevice;
}
pEm->Reset();
//go through and find first video capture device
while (true)
{
ULONG ulFetched = 0;
CComPtr< IMoniker > pM;
HRESULT hr = pEm->Next(1, &pM, &ulFetched);
if (hr != S_OK) {
break;
}
//get the property bag
CComPtr< IPropertyBag > pBag;
hr = pM->BindToStorage(0, 0, IID_IPropertyBag, (void**) &pBag);
if (hr != S_OK) {
continue;
}
//ask for the english-readable name
CComVariant var;
var.vt = VT_BSTR;
hr = pBag->Read(L"FriendlyName", &var, NULL);
if (hr != S_OK) {
continue;
}
defaultDevice = (const wchar_t *) _bstr_t(var);
//ask for the actual filter
CComPtr< IBaseFilter > ppCap;
hr = pM->BindToObject(0, 0, IID_IBaseFilter, (void**) &ppCap);
if (ppCap) {
break;
}
}
return defaultDevice;
}
int DirectXWebcamDriver::setDevice(const std::wstring & deviceName )
{
_szCurrentDeviceName = deviceName;
//TODO: test if a webcam is already open
if ( _isCaptureGraphBuild)
{
stopCapture2();
cleanup();
}
_pBuild.CoCreateInstance(CLSID_CaptureGraphBuilder2);
if (!_pBuild) {
//LOG_ERROR("failed to create Capture Graph builder");
return E_FAIL;
}
_pGraph.CoCreateInstance(CLSID_FilterGraph);
if (!_pGraph) {
//LOG_ERROR("failed to create Graph builder");
return E_FAIL;
}
_pBuild->SetFiltergraph(_pGraph);
//Create the Sample Grabber
HRESULT hr = CoCreateInstance(CLSID_SampleGrabber, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&(_pGrabberF));
if (hr != S_OK) {
//LOG_ERROR("failed to create COM instance");
return E_FAIL;
}
//Add the filter to the graph
hr = (_pGraph)->AddFilter(_pGrabberF, L"Sample Grabber");
if (hr != S_OK) {
//LOG_ERROR("failed to add filter");
return E_FAIL;
}
//Query the Sample Grabber for the ISampleGrabber interface.
_pGrabberF->QueryInterface(IID_ISampleGrabber, (void**)&_pGrabber);
hr = _pGrabber->SetBufferSamples(FALSE);
hr = _pGrabber->SetOneShot(FALSE);
//Set the Sample Grabber callback
//0: SampleCB (the buffer is the original buffer, not a copy)
//1: BufferCB (the buffer is a copy of the original buffer)
if (_pGrabber->SetCallback(this, 0) != S_OK)
{
//LOG_ERROR("failed to assign callback");
return E_FAIL;
}
CComBSTR bstrName(deviceName.c_str());
hr = FindMyCaptureDevice(&_pCap, bstrName);
if ((hr != S_OK) || !_pCap )
{
return E_FAIL;
}
//initialize IAMStreamConfig
_iam = GetIAMStreamConfig(_pCap);
if (!_iam) {
return E_FAIL;
}
//add the capture filter to the graph
hr = (_pGraph)->AddFilter(_pCap, L"VideoCapture");
if (hr != S_OK) {
//LOG_ERROR("failed to add filter");
return E_FAIL;
}
//Add a null renderer filter
//CComPtr
//hr = pNull.CoCreateInstance (CLSID_NullRenderer);
hr = CoCreateInstance(CLSID_NullRenderer, NULL, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&_pNull);
hr = (_pGraph)->AddFilter(_pNull, L"NullRender");
hr = _pBuild->RenderStream(&PIN_CATEGORY_CAPTURE, /*NULL*/&MEDIATYPE_Video, _pCap, NULL, _pGrabberF);
if ( FAILED (hr ))
{
//当访问同一个摄像头的时候,第2个程序会失败,因此在这里需要打开共享内存,以获取图像数据
//共享内存的名字当中包含该摄像头的名字
std::wstring szFileName = L"SharedMemory_";
szFileName += deviceName;
_hSharedMemoryReadFile = OpenFileMapping( FILE_MAP_READ | FILE_MAP_WRITE, FALSE, szFileName.c_str() );
if( _hSharedMemoryReadFile == NULL )
return hr;
//从共享内存中读取各种信息,包括格式,分辨率等等
PSHAREDMEMORY lpView = (PSHAREDMEMORY)MapViewOfFile(_hSharedMemoryReadFile, FILE_MAP_READ | FILE_MAP_WRITE, 0, 0, 0);//将内存映射文件的一个视图映射到当前的地址空间
if ((int *)lpView == NULL)
{
return hr;
}
std::wstring szLockName = L"Global\\SharedMemoryLock_";
szLockName += deviceName;
_hSharedMemoryLock = OpenMutex( MUTEX_ALL_ACCESS , FALSE , szLockName.c_str() );
if( _hSharedMemoryLock == NULL )
{
UnmapViewOfFile((LPVOID) lpView);
CloseHandle( _hSharedMemoryReadFile );
_hSharedMemoryReadFile = NULL;
return hr;
}
_cachedFPS = lpView->_cachedFPS;
_desiredWidth= lpView->_cachedWidth;
_desiredHeight = lpView->_cachedHeight;
_cachedPalette = _desiredPalette = lpView->_cachedPalette;
_isCaptureGraphBuild = true;
_isOpen = true;
UnmapViewOfFile((LPVOID) lpView);
return S_OK;
}
//以下代码无驱摄像头 I420
//try to assign some palette until the webcam supports it
//*
//added for UVC(usb video class camero) 20080301 yangkai
_cachedFPS = 15;
if (setCaps(PIX_OSI_YUY2, _cachedFPS, _desiredWidth, _desiredHeight) == S_OK )
{
_desiredPalette = PIX_OSI_YUY2;
}
else if (setCaps(PIX_OSI_RGB24, _cachedFPS, _desiredWidth, _desiredHeight) == S_OK )
{
_desiredPalette = PIX_OSI_RGB24;
}
else if (setCaps(PIX_OSI_YUV420P, _cachedFPS, _desiredWidth, _desiredHeight) == S_OK )
{
_desiredPalette = PIX_OSI_YUV420P;
}
else if (setCaps(PIX_OSI_I420, _cachedFPS, _desiredWidth, _desiredHeight) == S_OK )
{
_desiredPalette = PIX_OSI_I420;
}
else
return E_FAIL;
/*
else if (setCaps(PIX_OSI_RGB24, 15, 176, 144) == S_OK )
{
}else if (setCaps(PIX_OSI_YUY2, _cachedFPS, _desiredWidth, _desiredHeight) == S_OK )
//if (setCaps(PIX_OSI_YUY2, _cachedFPS, _desiredWidth, _desiredHeight) == S_OK )
{
}
else if (setCaps(PIX_OSI_YUV420P, _cachedFPS, _desiredWidth, _desiredHeight) == S_OK )
{
}
else if (setCaps(PIX_OSI_RGB24, _cachedFPS, _desiredWidth, _desiredHeight) == S_OK )
{
}else {
return E_FAIL;
}
*/
/*
else if (setCaps(PIX_OSI_YUV420P, _cachedFPS, 160, 120) != S_OK )
{
}
else if (setCaps(PIX_OSI_I420, _cachedFPS, 176, 144) != S_OK )
{
}
else if (setCaps(PIX_OSI_I420, _cachedFPS, 160, 120) != S_OK )
{
}
else if (setCaps(PIX_OSI_RGB32, _cachedFPS, 176, 144) != S_OK )
{
}
else if (setCaps(PIX_OSI_RGB32, _cachedFPS, 160, 120) != S_OK )
{
}
else if (setCaps(PIX_OSI_RGB24, _cachedFPS, 176, 144) != S_OK )
{
}
else if (setCaps(PIX_OSI_RGB24, _cachedFPS, 160, 120) != S_OK )
{
}
else if (setCaps(PIX_OSI_YUV422, _cachedFPS, 176, 144) != S_OK )
{
}
else if (setCaps(PIX_OSI_YUV422, _cachedFPS, 160, 120) != S_OK )
{
}
else if (setCaps(PIX_OSI_RGB565, _cachedFPS, 176, 144) != S_OK )
{
}
else if (setCaps(PIX_OSI_RGB565, _cachedFPS, 160, 120) != S_OK )
{
}
else if (setCaps(PIX_OSI_RGB555, _cachedFPS, 176, 144) != S_OK )
{
}
else if (setCaps(PIX_OSI_RGB555, _cachedFPS, 160, 120) != S_OK )
{
}
else if (setCaps(PIX_OSI_UYVY, _cachedFPS, 176, 144) != S_OK )
{
}
else if (setCaps(PIX_OSI_UYVY, _cachedFPS, 160, 120) != S_OK )
{
}
//*/ //added for UVC(usb video class camero) 20080301 yangkai
//if (E_FAIL == setCaps( PIX_OSI_RGB24, _cachedFPS, _desiredWidth, _desiredHeight))
// return E_FAIL;
readCaps();
if (getPalette() == PIX_OSI_UNSUPPORTED)
{
return E_FAIL;
}
//这里成功初始化了摄像头,需创建共享内存,以备其他程序使用。
std::wstring szLockName = L"Global\\SharedMemoryLock_";
szLockName += deviceName;
_hSharedMemoryLock = CreateMutex( NULL , FALSE , szLockName.c_str() );
if( _hSharedMemoryLock == NULL )
{
_isCaptureGraphBuild = true;
_isOpen = true;
return E_FAIL;
}
std::wstring szFileName = L"SharedMemory_";
szFileName += deviceName;
_hSharedMemoryWriteFile=CreateFileMapping((HANDLE)0XFFFFFFFF,NULL,PAGE_READWRITE, 0, ((sizeof(SHAREDMEMORY)+4)/4)*4,szFileName.c_str() );
if( _hSharedMemoryWriteFile )
{
PSHAREDMEMORY lpView = (PSHAREDMEMORY)MapViewOfFile(_hSharedMemoryWriteFile, FILE_MAP_READ | FILE_MAP_WRITE, 0, 0, 0);//将内存映射文件的一个视图映射到当前的地址空间
if((int *)lpView == NULL)
{
CloseHandle( _hSharedMemoryWriteFile );
_hSharedMemoryWriteFile = NULL;
CloseHandle( _hSharedMemoryLock );
_hSharedMemoryLock = NULL;
_isCaptureGraphBuild = true;
_isOpen = true;
return S_OK;
}
lpView->lBufferSize = 0;
lpView->_cachedFPS = _cachedFPS ;
lpView->_cachedWidth = _desiredWidth;
lpView->_cachedHeight = _desiredHeight;
lpView->_cachedPalette = _desiredPalette;
UnmapViewOfFile((LPVOID) lpView);
}
else
{
CloseHandle( _hSharedMemoryLock );
_hSharedMemoryLock = NULL;
}
_isCaptureGraphBuild = true;
_isOpen = true;
return S_OK;
}
bool DirectXWebcamDriver::isOpen() const
{
return _isOpen;
}
void DirectXWebcamDriver::startCapture2()
{
if ( !_isRunning)
{
if ( !_isCaptureGraphBuild)
return;
if (!_pGraph) {
return;
}
CComQIPtr< IMediaControl, &IID_IMediaControl > pControl = _pGraph;
HRESULT hr = pControl->Run();
if (hr != S_OK) {
//LOG_ERROR("Could not run graph");
return;
}
_isRunning = true;
}
}
void DirectXWebcamDriver::startCapture()
{
/*
if ( !_isRunning)
{
if ( !_isCaptureGraphBuild)
return;
if (!_pGraph) {
return;
}
CComQIPtr< IMediaControl, &IID_IMediaControl > pControl = _pGraph;
HRESULT hr = pControl->Run();
if (hr != S_OK) {
//LOG_ERROR("Could not run graph");
return;
}
_isRunning = true;
}
*/
_isStartCaptured = true;
//判断如果是通过共享内存打开的,则需要在这里创建一个线程,用于读取视频数据,并回调上去
if( _hSharedMemoryReadFile )
{
DWORD id=0;
if( _hSharedMemoryReadThread == NULL )
_hSharedMemoryReadThread = CreateThread( NULL , 0 , SharedMemoryReadThread , this , 0 , &id );
}
}
void DirectXWebcamDriver::pauseCapture()
{
}
void DirectXWebcamDriver::stopCapture2()
{
if (!_isRunning)
return;
if ( !_isCaptureGraphBuild)
return;
if (!_pGraph) {
return;
}
CComQIPtr< IMediaControl, &IID_IMediaControl > pControl = _pGraph;
HRESULT hr = pControl->Stop();
if (hr != S_OK) {
//LOG_ERROR("Could not stop capture");
return;
}
_isRunning = false;
}
void DirectXWebcamDriver::stopCapture()
{
_isStartCaptured = FALSE;
/*
if( _hSharedMemoryWriteFile == NULL )
{
if (!_isRunning)
return;
if ( !_isCaptureGraphBuild)
return;
if (!_pGraph) {
return;
}
CComQIPtr< IMediaControl, &IID_IMediaControl > pControl = _pGraph;
HRESULT hr = pControl->Stop();
if (hr != S_OK) {
//LOG_ERROR("Could not stop capture");
return;
}
_isRunning = false;
}
*/
if (_hSharedMemoryReadThread)
{
WaitForSingleObject( _hSharedMemoryReadThread, INFINITE);
CloseHandle( _hSharedMemoryReadThread);
_hSharedMemoryReadThread = NULL;
}
}
int DirectXWebcamDriver::setPalette(pixosi palette)
{
setCaps(palette, getFPS(), getWidth(), getHeight());
if (_cachedPalette != palette)
return E_FAIL;
return S_OK;
}
pixosi DirectXWebcamDriver::getPalette() const
{
return _cachedPalette;
}
int DirectXWebcamDriver::setFPS(unsigned int fps)
{
HRESULT hr = E_FAIL;
if (fps >25 || fps < 0)
return E_FAIL;
hr = setCaps(getPalette(), fps, getWidth(), getHeight());
if ( SUCCEEDED (hr ))
{
_cachedFPS = fps;
return S_OK;
}
return E_FAIL;
}
unsigned int DirectXWebcamDriver::getFPS() const
{
return _cachedFPS;
}
int DirectXWebcamDriver::setResolution(unsigned int width, unsigned int height)
{
if(_desiredWidth == width && _desiredHeight == height)
return S_OK;
setCaps(getPalette(), getFPS(), width, height);
if ((_cachedWidth != width) || (_cachedHeight != height))
{
return E_FAIL;
}
//更新目标输出size,注意应同时更改编码器的输入size
_desiredPalette = getPalette();
_desiredWidth = width;
_desiredHeight = height;
return S_OK;
}
unsigned int DirectXWebcamDriver::getWidth() const
{
return _cachedWidth;
}
unsigned int DirectXWebcamDriver::getHeight() const
{
return _cachedHeight;
}
void DirectXWebcamDriver::setBrightness(int brightness)
{
}
int DirectXWebcamDriver::getBrightness() const
{
return 0;
}
void DirectXWebcamDriver::setContrast(int contrast)
{
}
int DirectXWebcamDriver::getContrast() const
{
return 0;
}
void DirectXWebcamDriver::flipHorizontally(bool flip)
{
//TODO: add horizontal flip support
}
int DirectXWebcamDriver::setCaps(pixosi palette, unsigned fps, unsigned resolutionWidth, unsigned resolutionHeight)
{
bool brestart = false;
_cachedFPS = fps;
/*
if (!isOpen()) {
return E_FAIL;
}
*/
if (!_pCap) {
//LOG_FATAL("webcam not initialized");
return E_FAIL;
}
if (!_iam) {
//LOG_FATAL("webcam not initialized");
return E_FAIL;
}
//if start then pause.
if (_isRunning)
{
brestart =true;
stopCapture2();
}
int iCount, iSize;
HRESULT hr = _iam->GetNumberOfCapabilities(&iCount, &iSize);
VIDEO_STREAM_CONFIG_CAPS scc;
if (sizeof(scc) != iSize) {
//LOG_ERROR("wrong config structure");
return E_FAIL;
}
for (int i = 0; i < 1/*iCount*/; i++)
{
AM_MEDIA_TYPE * pmt = NULL;
hr = _iam->GetStreamCaps(i, &pmt, reinterpret_cast
if (hr == S_OK)
{
pixosi wc_palette = pix_directx_to_pix_osi(pmt->subtype);
if (wc_palette != palette)
{
hr = E_FAIL;
continue;
}
VIDEOINFOHEADER * pvi = (VIDEOINFOHEADER *) pmt->pbFormat;
pvi->bmiHeader.biWidth = resolutionWidth;
pvi->bmiHeader.biHeight = resolutionHeight;
pvi->AvgTimePerFrame = (LONGLONG) (10000000. / (double)fps);
hr = _iam->SetFormat(pmt);
if (hr != S_OK)
{
hr = E_FAIL;
continue;
}
else
{
/*
LOG_DEBUG("assigned caps : ("
+ String::fromNumber(palette)
+ "," + String::fromNumber(fps)
+ "," + String::fromNumber(resolutionWidth)
+ "," + String::fromNumber(resolutionHeight)
+ ")");
*/
break;
}
}
}
readCaps();
if (hr == S_OK)
{
if (brestart)
startCapture2();
return S_OK;
}
/*
LOG_ERROR("failed caps request: ("
+ String::fromNumber(palette)
+ "," + String::fromNumber(fps)
+ "," + String::fromNumber(resolutionWidth)
+ "," + String::fromNumber(resolutionHeight)
+ ")");
*/
return E_FAIL;
}
void DirectXWebcamDriver::readCaps() {
VIDEOINFOHEADER * pvi;
pixosi palette;
AM_MEDIA_TYPE * pmt = NULL;
HRESULT hr = _iam->GetFormat(&pmt);
if (pmt->formattype == FORMAT_VideoInfo) {
pvi = (VIDEOINFOHEADER *) pmt->pbFormat;
palette = pix_directx_to_pix_osi(pmt->subtype);
}
_cachedPalette = palette;
_cachedWidth = pvi->bmiHeader.biWidth;
_cachedHeight = pvi->bmiHeader.biHeight;
}
STDMETHODIMP DirectXWebcamDriver::QueryInterface(REFIID riid, void ** ppv)
{
//LOG_DEBUG("CSampleGrabberCB::QueryInterface");
if ( ppv == NULL)
return E_POINTER;
if (riid == IID_ISampleGrabberCB || riid == IID_IUnknown) {
*ppv = (void *) static_cast
return NOERROR;
}
return E_NOINTERFACE;
}
STDMETHODIMP DirectXWebcamDriver::BufferCB(double dblSampleTime, BYTE * pBuffer, long lBufferSize)
{
//This method is not used but must be implemented
//LOG_ERROR("this method should not be called");
return 0;
}
STDMETHODIMP DirectXWebcamDriver::SampleCB(double SampleTime, IMediaSample * pSample)
{
if (!pSample)
return E_POINTER;
BYTE * pBuffer;
pSample->GetPointer(&pBuffer);
long lBufferSize = pSample->GetSize();
if (!pBuffer)
return E_POINTER;
_capturedImage.data = (unsigned char *) pBuffer; //保存要编码的数据,之后要转化为(AVFrame*)使用.
_capturedImage.datasize = lBufferSize;
_capturedImage.width = _cachedWidth;
_capturedImage.height = _cachedHeight;
_capturedImage.palette = _cachedPalette;
if( _isStartCaptured )
/*_webcamDriver*/this->frameBufferAvailable(&_capturedImage);
//加锁
WaitForSingleObject( _hSharedMemoryLock , INFINITE );
PSHAREDMEMORY lpView = (PSHAREDMEMORY)MapViewOfFile( _hSharedMemoryWriteFile, FILE_MAP_READ | FILE_MAP_WRITE, 0, 0, 0);//将内存映射文件的一个视图映射到当前的地址空间
if((int*)lpView == NULL)
{
ReleaseMutex( _hSharedMemoryLock );
return 0;
}
lpView->isStoped = 0;
memcpy( lpView->DataBuffer , pBuffer , lBufferSize );
lpView->lBufferSize = lBufferSize;
lpView->_cachedWidth = _cachedWidth;
lpView->_cachedHeight = _cachedHeight;
lpView->_cachedPalette = _cachedPalette;
UnmapViewOfFile((LPVOID) lpView);
//解锁
ReleaseMutex( _hSharedMemoryLock );
return 0;
}
void DirectXWebcamDriver::frameBufferAvailable(piximage * image)
{
if( _hSharedMemoryReadFile == NULL )
{
if ( _forcedFPS != 0)
{
float now;
float fpsTimerInter = 1000/(float)_forcedFPS;
now = timeGetTime();
if ((now - _fpsTimerLast) < fpsTimerInter )
return;
_fpsTimerLast = now;
}
}
//TODO: convert palette
if ((_desiredPalette != image->palette) || (_desiredWidth != image->width) || (_desiredHeight != image->height)
|| ( _convFlags != PIX_NO_FLAG))
{
//TODO:
//pix_convert(_convFlags, _convImage, image);
if ( _cb_cbk)
(*_cb_cbk) (this, _convImage, _cb_udata);
return;
}
//TODO:new a buffer to process?
if ( _cb_cbk)
(* _cb_cbk) (this, image, _cb_udata);
}
void DirectXWebcamDriver::initializeConvImage()
{
/*
if (_convImage)
pix_free (_convImage);
_convImage = pix_alloc( _desiredPalette, _desiredWidth, _desiredHeight);
*/
}
void DirectXWebcamDriver::ProcessData()
{
static unsigned int nWaiting = 0;
BYTE pTempBuffer[MAX_VIDEO_FRAME_SIZE];
memset( pTempBuffer , 0 , MAX_VIDEO_FRAME_SIZE );
while( _isStartCaptured )
{
if ( _forcedFPS != 0)
{
float now;
float fpsTimerInter = 1000/(float)_forcedFPS;
now = timeGetTime();
if ((now - _fpsTimerLast) < fpsTimerInter )
{
Sleep( 1 );
continue;
}
_fpsTimerLast = now;
}
//加锁
WaitForSingleObject( _hSharedMemoryLock , INFINITE );
PSHAREDMEMORY lpView = (PSHAREDMEMORY)MapViewOfFile( _hSharedMemoryReadFile, FILE_MAP_READ | FILE_MAP_WRITE, 0, 0, 0);//将内存映射文件的一个视图映射到当前的地址空间
if((int*)lpView == NULL)
{
Sleep( 10 );
ReleaseMutex( _hSharedMemoryLock );
continue;
}
if( lpView->lBufferSize == 0 )
{
Sleep( 0 );
UnmapViewOfFile((LPVOID) lpView);
ReleaseMutex( _hSharedMemoryLock );
Sleep( 10 );
nWaiting ++;
continue;
}
if( lpView->isStoped )
{
UnmapViewOfFile((LPVOID) lpView);
ReleaseMutex( _hSharedMemoryLock );
//这里开始重新初始化
CloseHandle( _hSharedMemoryReadFile ); //关闭读文件
_hSharedMemoryReadFile=NULL;
CloseHandle( _hSharedMemoryLock ); //关闭锁
_hSharedMemoryLock=NULL;
setDevice( _szCurrentDeviceName); //重新初始化
if( _hSharedMemoryWriteFile )
{
startCapture2( );
_isStartCaptured = TRUE;
}
return; //退出线程
}
nWaiting = 0;
memcpy( pTempBuffer , lpView->DataBuffer , lpView->lBufferSize );
_capturedImage.data = (unsigned char *) pTempBuffer;
_capturedImage.datasize = lpView->lBufferSize;
_capturedImage.width = lpView->_cachedWidth;
_capturedImage.height = lpView->_cachedHeight;
_capturedImage.palette = lpView->_cachedPalette;
UnmapViewOfFile((LPVOID) lpView);
//解锁
ReleaseMutex( _hSharedMemoryLock );
//上传
this->frameBufferAvailable(&_capturedImage);
Sleep(10);
}
}
void DirectXWebcamDriver::webcam_add_callback (webcamcallback cb, void* userdate)
{
_cb_cbk =cb;
_cb_udata = userdate;
if( _hSharedMemoryWriteFile )
{
startCapture2( );
}
};