屏幕捕获RGB数据回调函数(CVideoCapture.h):
#ifndef _VIDEODATA_CALLBACK_
#define _VIDEODATA_CALLBACK_
typedef void(*LPVideoDataRealCallback)(unsigned char *pRgbData, int size, int width, int height, void* pContext);
#endif
调用接口定义(CVideoCapture.h):
// DXGI方式,设置视频回调函数,并开始捕获视频数据
AVFILTER_API void* DXGI_VideoSourceStartCapture(int left, int top, int width, int height, LPVideoDataRealCallback pCallback, void* pUser);
// 停止视频数据捕获
AVFILTER_API bool DXGI_VideoSourceStopCapture(void* pSource);
实现源码(CVideoCapture.c):
#include
#include "CVideoCapture.h"
#include
#include
#include
#include
#include
#pragma comment(lib, "d3d11")
#pragma comment(lib, "dxgi")
#ifndef _VIDEO_SOURCE_
#define _VIDEO_SOURCE_
typedef struct tDXGIVideoSource
{
int m_nLeft;//捕获区域坐标值-左
int m_nTop;//捕获区域坐标值-上
int m_nWidth;//捕获区域-宽度
int m_nHeight;//捕获区域-高度
bool m_bCaptureCursor;//是否抓取光标
bool m_bActive;//是否激活,即是否获取到视频帧
HANDLE m_hCaptureThread;//捕获线程句柄
HANDLE m_hStopSignal;//线程停止信号
CRITICAL_SECTION m_csMemLock;//数据访问互斥锁
void* m_memRawBuffer;//RGB数据存放缓冲区
int m_nMemSize;//RGB数据缓冲区大小
ID3D11Device *m_hDevice;//设备对象
ID3D11DeviceContext *m_hContext;//设备上下文
IDXGIOutputDuplication *m_hDeskDupl;//桌面对象
DXGI_OUTPUT_DESC m_dxgiOutDesc;//桌面对象描述-保存了桌面分辨率等信息
LPVideoDataRealCallback m_pVideoDataRealCalBack;//实时捕获数据回调函数
void* m_pVideoDataRealCalBackUser;//实时捕获数据用户接受对象
}DXGIVideoSource, *LPDXGIVideoSource;
#endif
//申请一个视频源对象
void DXGIAllocVideoResource(LPDXGIVideoSource* pSource);
//释放视频源对象
void DXGIReleaseVideoResource(LPDXGIVideoSource pSource);
//开始捕获视频数据
bool DXGIStartVideoCapture(LPDXGIVideoSource pSource);
//停止捕获视频数据
void DXGIStopVideoCapture(LPDXGIVideoSource pSource);
//视频数据循环捕获线程
DWORD WINAPI DXGIOnVideoCaptureThread(LPVOID param);
//视频数据捕获函数
bool DXGIDoVideoCapture(LPDXGIVideoSource pSource);
//视频数据捕获函数
bool DXGIProcessVideoCaptureData(LPDXGIVideoSource pSource);
const IID IID_IDXGIDevice = { 0x54ec77fa, 0x1377, 0x44e6, { 0x8c, 0x32, 0x88, 0xfd, 0x5f, 0x44, 0xc8, 0x4c } };
const IID IID_IDXGIAdapter = { 0x2411e7e1, 0x12ac, 0x4ccf, { 0xbd, 0x14, 0x97, 0x98, 0xe8, 0x53, 0x4d, 0xc0 } };
const IID IID_IDXGIOutput1 = { 0x00cddea8, 0x939b, 0x4b83, { 0xa3, 0x40, 0xa6, 0x85, 0x22, 0x66, 0x66, 0xcc } };
const IID IID_ID3D11Texture2D = { 0x6f15aaf2, 0xd208, 0x4e89, { 0x9a, 0xb4, 0x48, 0x95, 0x35, 0xd3, 0x4f, 0x9c } };
const IID IID_IDXGISurface = { 0xcafcb56c, 0x6ac3, 0x4889, { 0xbf, 0x47, 0x9e, 0x23, 0xbb, 0xd2, 0x60, 0xec } };
#define RESET_OBJECT(A) {if (A) A->lpVtbl->Release(A); A = NULL;}
//申请一个视频源对象
void DXGIAllocVideoResource(LPDXGIVideoSource* ppSource)
{
//参数初始化
*ppSource = (LPDXGIVideoSource)malloc(sizeof(DXGIVideoSource));
((LPDXGIVideoSource)*ppSource)->m_bActive = false;
((LPDXGIVideoSource)*ppSource)->m_bCaptureCursor = false;
((LPDXGIVideoSource)*ppSource)->m_hCaptureThread = NULL;
((LPDXGIVideoSource)*ppSource)->m_hStopSignal = CreateEvent(NULL, TRUE, FALSE, NULL);
((LPDXGIVideoSource)*ppSource)->m_memRawBuffer = NULL;
((LPDXGIVideoSource)*ppSource)->m_pVideoDataRealCalBack = NULL;
((LPDXGIVideoSource)*ppSource)->m_pVideoDataRealCalBackUser = NULL;
InitializeCriticalSection(&((LPDXGIVideoSource)*ppSource)->m_csMemLock);
HRESULT hr = S_OK;
//Direct3D驱动类型
D3D_DRIVER_TYPE DriverTypes[] =
{
D3D_DRIVER_TYPE_HARDWARE, //硬件驱动
D3D_DRIVER_TYPE_WARP,//软件驱动-性能高
D3D_DRIVER_TYPE_REFERENCE, //软件驱动-精度高,速度慢
D3D_DRIVER_TYPE_SOFTWARE,//软件驱动-性能低
};
UINT NumDriverTypes = ARRAYSIZE(DriverTypes);
D3D_FEATURE_LEVEL FeatureLevels[] =
{
D3D_FEATURE_LEVEL_11_0,
D3D_FEATURE_LEVEL_10_1,
D3D_FEATURE_LEVEL_10_0,
D3D_FEATURE_LEVEL_9_1
};
//初始化D3D设备-m_hDevice
UINT NumFeatureLevels = ARRAYSIZE(FeatureLevels);
D3D_FEATURE_LEVEL FeatureLevel;
for (UINT DriverTypeIndex = 0; DriverTypeIndex < NumDriverTypes; ++DriverTypeIndex)
{
hr = D3D11CreateDevice(NULL, DriverTypes[DriverTypeIndex], NULL, 0, FeatureLevels, NumFeatureLevels, D3D11_SDK_VERSION, &((LPDXGIVideoSource)*ppSource)->m_hDevice, &FeatureLevel, &((LPDXGIVideoSource)*ppSource)->m_hContext);
if (SUCCEEDED(hr))
{
break;
}
}
if (FAILED(hr))
{
goto exit;
}
IDXGIDevice *hDxgiDevice = NULL;
hr = ((LPDXGIVideoSource)*ppSource)->m_hDevice->lpVtbl->QueryInterface(((LPDXGIVideoSource)*ppSource)->m_hDevice, &IID_IDXGIDevice/*__uuidof(IDXGIDevice)*/, (void**)(&hDxgiDevice));
if (FAILED(hr))
{
goto exit;
}
//获取桌面对象描述符-m_dxgiOutDesc,主要是获取桌面分辨率大小
IDXGIAdapter *hDxgiAdapter = NULL;
hr = hDxgiDevice->lpVtbl->GetParent(hDxgiDevice, &IID_IDXGIAdapter, (void**)(&hDxgiAdapter));
RESET_OBJECT(hDxgiDevice);
if (FAILED(hr))
{
goto exit;
}
INT nOutput = 0;
IDXGIOutput *hDxgiOutput = NULL;
hr = hDxgiAdapter->lpVtbl->EnumOutputs(hDxgiAdapter, nOutput, &hDxgiOutput);
RESET_OBJECT(hDxgiAdapter);
if (FAILED(hr))
{
goto exit;
}
hDxgiOutput->lpVtbl->GetDesc(hDxgiOutput, &((LPDXGIVideoSource)*ppSource)->m_dxgiOutDesc);
IDXGIOutput1 *hDxgiOutput1 = NULL;
hr = hDxgiOutput->lpVtbl->QueryInterface(hDxgiOutput, &IID_IDXGIOutput1, (void**)(&hDxgiOutput1));
RESET_OBJECT(hDxgiOutput);
if (FAILED(hr))
{
goto exit;
}
hr = hDxgiOutput1->lpVtbl->DuplicateOutput(hDxgiOutput1, (IUnknown*)((LPDXGIVideoSource)*ppSource)->m_hDevice, (IDXGIOutputDuplication**)&((LPDXGIVideoSource)*ppSource)->m_hDeskDupl);
RESET_OBJECT(hDxgiOutput1);
if (FAILED(hr))
{
goto exit;
}
return;
exit:
DXGIReleaseVideoResource((LPDXGIVideoSource)*ppSource);
*ppSource = NULL;
}
//释放视频源对象
void DXGIReleaseVideoResource(LPDXGIVideoSource pSource)
{
RESET_OBJECT(pSource->m_hDeskDupl);
RESET_OBJECT(pSource->m_hContext);
RESET_OBJECT(pSource->m_hDevice);
free(((LPDXGIVideoSource)pSource)->m_memRawBuffer);
((LPDXGIVideoSource)pSource)->m_memRawBuffer = NULL;
DeleteCriticalSection(&((LPDXGIVideoSource)pSource)->m_csMemLock);
free(pSource);
pSource = NULL;
}
//开始捕获视频数据
bool DXGIStartVideoCapture(LPDXGIVideoSource pSource)
{
// 释放之前申请的的存放图像的缓存
free(((LPDXGIVideoSource)pSource)->m_memRawBuffer);
((LPDXGIVideoSource)pSource)->m_memRawBuffer = NULL;
((LPDXGIVideoSource)pSource)->m_nMemSize = 0;
//计算所需存放图像的缓存大小
((LPDXGIVideoSource)pSource)->m_nMemSize = pSource->m_nWidth * pSource->m_nHeight * 4;//获取的图像位图深度32位,所以是*4
((LPDXGIVideoSource)pSource)->m_memRawBuffer = (char*)malloc(((LPDXGIVideoSource)pSource)->m_nMemSize);
memset(((LPDXGIVideoSource)pSource)->m_memRawBuffer, 0, ((LPDXGIVideoSource)pSource)->m_nMemSize);
//创建捕获图像的线程
unsigned int dwThreadId;
pSource->m_hCaptureThread = (HANDLE)_beginthreadex(NULL,
0,
&DXGIOnVideoCaptureThread,
pSource,
THREAD_PRIORITY_NORMAL,
&dwThreadId);
if (!pSource->m_hCaptureThread)
return false;
return true;
}
//停止捕获视频数据
void DXGIStopVideoCapture(LPDXGIVideoSource pSource)
{
//发送线程停止工作信号
SetEvent(pSource->m_hStopSignal);
//等待线程安全退出
if (pSource->m_bActive)
{
WaitForSingleObject(pSource->m_hCaptureThread, INFINITE);
}
pSource->m_nMemSize = 0;
}
//视频数据循环捕获线程
DWORD WINAPI DXGIOnVideoCaptureThread(LPVOID param)
{
LPDXGIVideoSource pSource = (LPDXGIVideoSource)param;
DXGIDoVideoCapture(pSource);
_endthreadex(0);
return 0;
}
//视频数据循环捕获线程-处理线程
bool DXGIDoVideoCapture(LPDXGIVideoSource pSource)
{
DWORD dwTimeout = 1;
DWORD dwStartTickCount = GetTickCount();
// 等待超时进入下一次图像数据获取
while (WaitForSingleObject(pSource->m_hStopSignal, dwTimeout) == WAIT_TIMEOUT)
{
if (DXGIProcessVideoCaptureData(pSource))
{
if (((LPDXGIVideoSource)pSource)->m_bActive)
{
//获取图像成功,回调数据给应用层用户处理
if (pSource->m_pVideoDataRealCalBack)
{
pSource->m_pVideoDataRealCalBack((LPBYTE)pSource->m_memRawBuffer, pSource->m_nMemSize, pSource->m_nWidth, pSource->m_nHeight, pSource->m_pVideoDataRealCalBackUser);
}
}
else
{
// 因为第一帧是黑屏。需要忽略,具体原因???
((LPDXGIVideoSource)pSource)->m_bActive = true;
}
}
}
//关闭信号
CloseHandle(pSource->m_hStopSignal);
pSource->m_hStopSignal = NULL;
//关闭线程句柄
CloseHandle(pSource->m_hCaptureThread);
pSource->m_hCaptureThread = NULL;
((LPDXGIVideoSource)pSource)->m_bActive = false;
return true;
}
//
//将桌面挂到这个进程中
//
bool AttatchToThread(VOID)
{
HDESK hOldDesktop = GetThreadDesktop(GetCurrentThreadId());
HDESK hCurrentDesktop = OpenInputDesktop(0, FALSE, GENERIC_ALL);
if (!hCurrentDesktop)
{
return false;
}
bool bDesktopAttached = SetThreadDesktop(hCurrentDesktop);
CloseDesktop(hOldDesktop);
CloseDesktop(hCurrentDesktop);
hCurrentDesktop = NULL;
return bDesktopAttached;
}
//视频数据捕获函数
bool DXGIProcessVideoCaptureData(LPDXGIVideoSource pSource)
{
//将桌面挂到这个进程中
if (!AttatchToThread())
{
return false;
}
///截取屏幕数据
IDXGIResource *hDesktopResource = NULL;
DXGI_OUTDUPL_FRAME_INFO FrameInfo;
HRESULT hr = pSource->m_hDeskDupl->lpVtbl->AcquireNextFrame(pSource->m_hDeskDupl, 500, &FrameInfo, &hDesktopResource);
if (FAILED(hr))
{
//
// 在一些win10的系统上,如果桌面没有变化的情况下,
// 这里会发生超时现象,但是这并不是发生了错误,而是系统优化了刷新动作导致的。
// 所以,这里没必要返回FALSE,返回不带任何数据的TRUE即可
//
return true;
}
//获取纹理2D
ID3D11Texture2D *hAcquiredDesktopImage = NULL;
hr = hDesktopResource->lpVtbl->QueryInterface(hDesktopResource, &IID_ID3D11Texture2D, (void **)(&hAcquiredDesktopImage));
RESET_OBJECT(hDesktopResource);
if (FAILED(hr))
{
return false;
}
D3D11_TEXTURE2D_DESC frameDescriptor;
hAcquiredDesktopImage->lpVtbl->GetDesc(hAcquiredDesktopImage, &frameDescriptor);
//创建一个新的2D纹理对象,用于把 hAcquiredDesktopImage的数据copy进去
ID3D11Texture2D *hNewDesktopImage = NULL;
frameDescriptor.Usage = D3D11_USAGE_STAGING;
frameDescriptor.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
frameDescriptor.BindFlags = 0;
frameDescriptor.MiscFlags = 0;
frameDescriptor.MipLevels = 1;
frameDescriptor.ArraySize = 1;
frameDescriptor.SampleDesc.Count = 1;
hr = pSource->m_hDevice->lpVtbl->CreateTexture2D(pSource->m_hDevice, &frameDescriptor, NULL, &hNewDesktopImage);
if (FAILED(hr))
{
RESET_OBJECT(hAcquiredDesktopImage);
pSource->m_hDeskDupl->lpVtbl->ReleaseFrame(pSource->m_hDeskDupl);
return false;
}
///获取整个帧的数据
pSource->m_hContext->lpVtbl->CopyResource(pSource->m_hContext, (ID3D11Resource*)hNewDesktopImage, (ID3D11Resource*)hAcquiredDesktopImage);
RESET_OBJECT(hAcquiredDesktopImage);
pSource->m_hDeskDupl->lpVtbl->ReleaseFrame(pSource->m_hDeskDupl);
// 获取这个2D纹理对象的表面
IDXGISurface *hStagingSurf = NULL;
hr = hNewDesktopImage->lpVtbl->QueryInterface(hNewDesktopImage, &IID_IDXGISurface, (void **)(&hStagingSurf));
RESET_OBJECT(hNewDesktopImage);
if (FAILED(hr))
{
return false;
}
//映射锁定表面,从而获取表面的数据地址
//这个时候 mappedRect.pBits 指向的内存就是原始的图像数据, 因为DXGI固定为 32位深度色
//mappedRect.pBits 指向的就是 BGRA 元数组
DXGI_MAPPED_RECT mappedRect;
hr = hStagingSurf->lpVtbl->Map(hStagingSurf, &mappedRect, DXGI_MAP_READ);
if (SUCCEEDED(hr))
{
WORD bitsPerPixel = 4; // 每个像素点数据占4个字节 - RGBA
int iWidth = pSource->m_dxgiOutDesc.DesktopCoordinates.right - pSource->m_dxgiOutDesc.DesktopCoordinates.left;
int iHeight = pSource->m_dxgiOutDesc.DesktopCoordinates.bottom - pSource->m_dxgiOutDesc.DesktopCoordinates.top;
int iCopyDataLength = (pSource->m_nWidth * 32 + 31) / 32 * bitsPerPixel;
int j = 0;
for (int h = pSource->m_nTop; h < pSource->m_nTop + pSource->m_nHeight; h++)
{
int iSrcOffset = (h * iWidth + pSource->m_nLeft) * bitsPerPixel;
BYTE *pSrc = mappedRect.pBits + iSrcOffset;
int iDesOffset = (pSource->m_nWidth * 32 + 31) / 32 * j * bitsPerPixel;
BYTE *pDes = (BYTE*)pSource->m_memRawBuffer + iDesOffset;
memcpy(pDes, pSrc, iCopyDataLength);
j++;
}
hStagingSurf->lpVtbl->Unmap(hStagingSurf);
}
RESET_OBJECT(hStagingSurf);
return SUCCEEDED(hr);
}
//设置图像数据回调函数
void DXGISetVideoCaptureDataRealCallback(LPDXGIVideoSource pSource, LPVideoDataRealCallback pVideoDataRealCalBack, void* pUser)
{
pSource->m_pVideoDataRealCalBack = pVideoDataRealCalBack;
pSource->m_pVideoDataRealCalBackUser = pUser;
}
// DXGI方式,设置视频回调函数,并开始捕获视频数据
AVFILTER_API void* DXGI_VideoSourceStartCapture(int left, int top, int width, int height, LPVideoDataRealCallback pCallback, void* pUser)
{
// 申请一个视频数据源
LPDXGIVideoSource pSource = NULL;
DXGIAllocVideoResource(&pSource);
//保存捕获区域参数
printf("录制区域left=%d,top=%d,w=%d,h=%d\n", left, top, width, height);
pSource->m_nLeft = left;
pSource->m_nTop = top;
pSource->m_nWidth = width;
pSource->m_nHeight = height;
//设置图像数据回调函数
DXGISetVideoCaptureDataRealCallback(pSource, pCallback, pUser);
if (pSource && DXGIStartVideoCapture(pSource))
{
return pSource;
}
else
{
goto exit;
}
exit:
//释放,释放所有资源
DXGIReleaseVideoResource(pSource);
pSource = NULL;
return pSource;
}
// 停止视频数据捕获
AVFILTER_API bool DXGI_VideoSourceStopCapture(void* pSource)
{
if (pSource == NULL)
return true;
DXGISetVideoCaptureDataRealCallback(pSource, NULL, NULL);
DXGIStopVideoCapture(pSource);
return true;
}
GXDI方式在1920*1080分辨率下,实时性较高(帧率大概60帧/秒左右)。