void VC::SetResolution(IBaseFilter* pSrcFilter, int Width, int Height) { IAMStreamConfig *pConfig = NULL; g_pCapture->FindInterface(&PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, pSrcFilter, IID_IAMStreamConfig, (void**)&pConfig); int resolutions, size; VIDEO_STREAM_CONFIG_CAPS caps; pConfig->GetNumberOfCapabilities(&resolutions, &size); for (int i = 0; i < resolutions; i++) { AM_MEDIA_TYPE *mediaType; if (pConfig->GetStreamCaps(i, &mediaType, reinterpret_cast<BYTE*>(&caps)) == S_OK) { int maxWidth = caps.MaxOutputSize.cx; int maxHeigth = caps.MaxOutputSize.cy; if (maxWidth == Width && maxHeigth == Height) { VIDEOINFOHEADER *info = reinterpret_cast<VIDEOINFOHEADER*>(mediaType->pbFormat); info->bmiHeader.biWidth = maxWidth; info->bmiHeader.biHeight = maxHeigth; info->bmiHeader.biSizeImage = DIBSIZE(info->bmiHeader); HRESULT hr = pConfig->SetFormat(mediaType); MyDeleteMediaType(mediaType); break; } MyDeleteMediaType(mediaType); } } SAFE_RELEASE(pConfig); }
void VC::MyDeleteMediaType(AM_MEDIA_TYPE *pmt) { if (pmt != NULL) { // See FreeMediaType for the implementation. MyFreeMediaType(*pmt); CoTaskMemFree(pmt); } } void VC::MyFreeMediaType(AM_MEDIA_TYPE& mt) { if (mt.cbFormat != 0) { CoTaskMemFree((PVOID)mt.pbFormat); mt.cbFormat = 0; mt.pbFormat = NULL; } if (mt.pUnk != NULL) { // Unecessary because pUnk should not be used, but safest. mt.pUnk->Release(); mt.pUnk = NULL; } }
int VC::Begin(HWND hWnd, HWND prvWindow, IBaseFilter* pSrcFilter) { HRESULT hr; // Get DirectShow interfaces hr = GetInterfaces(hWnd); if (FAILED(hr)) { return -1; } // Attach the filter graph to the capture graph hr = g_pCapture->SetFiltergraph(g_pGraph); if (FAILED(hr)) { return -2; } /* hr = FindCaptureDevice(&pSrcFilter); if (FAILED(hr)) { // Don't display a message because FindCaptureDevice will handle it return hr; } */ SetResolution(pSrcFilter, 1280, 720); // Add Capture filter to our graph. hr = g_pGraph->AddFilter(pSrcFilter, L"Video Capture"); if(FAILED(hr)) { pSrcFilter->Release(); return -4; } SetResolution(pSrcFilter, 1280, 720); hr = sgAddSampleGrabber(g_pGraph); if (FAILED(hr)) { return -5; } hr = sgSetSampleGrabberMediaType(); /* AM_MEDIA_TYPE mt; IAMStreamConfig *pSC=NULL; //HRESULT hr; ZeroMemory(&mt, sizeof(AM_MEDIA_TYPE)); hr = g_pCapture->FindInterface(&PIN_CATEGORY_PREVIEW, 0 , pSrcFilter, IID_IAMStreamConfig,(void **)&pSC); if(FAILED(hr)) { AfxMessageBox(L"FAIL"); return hr; } mt.majortype = MEDIATYPE_Video; mt.subtype = MEDIASUBTYPE_RGB24; //VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER *)mt.pbFormat; //gChannels = pVih->bmiHeader.biBitCount / 8; pVih->bmiHeader.biWidth = 1280; pVih->bmiHeader.biHeight = 720; mt.pbFormat = (unsigned char *) pVih; pSC->SetFormat(&mt); hr = pGrabber->SetMediaType(&mt); if (FAILED(hr)) { return hr; } hr = pGrabber->SetOneShot(FALSE); hr = pGrabber->SetBufferSamples(TRUE); */ // if (FAILED(hr)) { return -6; } //IBaseFilter* pGrabber2 = sgGetSampleGrabber(); // Render the preview pin on the video capture filter // Use this instead of g_pGraph->RenderFile hr = g_pCapture->RenderStream(&PIN_CATEGORY_PREVIEW, &MEDIATYPE_Video, pSrcFilter, pGrabberFilter/*NULL*/, NULL); if (FAILED(hr)) { pSrcFilter->Release(); return -7; } hr = sgGetSampleGrabberMediaType(); // Now that the filter has been added to the graph and we have // rendered its stream, we can release this reference to the filter. pSrcFilter->Release(); // Set video window style and position hr = SetupVideoWindow(prvWindow); if (FAILED(hr)) { return -8; } return 0; }