MFC文件-屏幕录像

下载本文件

本文件将获取屏幕图像数据的所有代码整合到两个文件中(ScreenRecorder.h和ScreenRecorder.cpp),使获取屏幕图像数据变得简单。输出IYUV视频流。还可以获取系统播放的声音,输出PCM音频流。由于使用了MFC类,本文件只适用于MFC程序。

使用方法

1.创建MFC项目。

2.将ScreenRecorder.h和ScreenRecorder.cpp文件复制到你的MFC项目目录下。

3.将ScreenRecorder.h和ScreenRecorder.cpp文件添加到项目。

4.包含ScreenRecorder.h头文件,声明ScreenRecorder类对象。对象应在录屏生命周期内不被析构掉,比如放在对话框头文件对话框类定义中。

cpp 复制代码
	ScreenRecorder SR;

5.声明SR_INIT初始化结构,填写结构参数。提供录制区域矩形,可以是整个屏幕,也可以是屏幕的某个区域。提供录制帧率。提供样本输出函数。创建"停止"和"退出"事件,提供事件句柄。

cpp 复制代码
int VideoSample(BYTE* pB, LONG len)//视频样本输出函数
{
	return 0;
}

int AudioSample(BYTE* pB, LONG len)//音频样本输出函数
{
	return 0;
}

	HANDLE hStop = CreateEvent(NULL, TRUE, TRUE, NULL);//创建"停止"事件。手动重置
	HANDLE hExit = CreateEvent(NULL, TRUE, FALSE, NULL);//创建"退出"事件。手动重置
	CRect rect;rect.left=0;rect.top=0;rect.right=1920;rect.bottom=1080;

		SR_INIT SrInit;//SR初始化结构
		SrInit.rect = rect;//录制矩形
		SrInit.ShowCursor = TRUE;//TRUE显示光标,FALSE不显示
		SrInit.nFramePerSec = 30;//视频帧率
		SrInit.VideoSample = (MYPROC_VideoSample)VideoSample;//视频样本输出函数
		SrInit.AudioSample = (MYPROC_AudioSample)AudioSample;//音频样本输出函数
		SrInit.hStop = hStop;//"停止"事件句柄
		SrInit.hExit = hExit;//"退出"事件句柄

6.运行。调用初始化函数创建录屏线程,如果提供了音频样本输出函数,还将创建录制系统声音线程。设置"停止"无信号后,视频和音频样本输出函数将被反复调用;函数参数1为样本缓冲区指针,参数2为样本的字节大小。

cpp 复制代码
		SR.Init(SrInit);//使用初始化结构作为参数,调用初始化函数
		ResetEvent(hStop);//设置"停止"无信号

7.暂停。此时,样本输出函数将停止调用,但录屏线程仍然存在。

cpp 复制代码
		SetEvent(hStop);//设置"停止"有信号

8.停止。将退出录屏线程。

cpp 复制代码
		SetEvent(hStop);//设置"停止"有信号
		SetEvent(hExit);//设置"退出"有信号

代码没有提供样本时间戳,视频一个样本就是一个IYUV视频帧,根据帧数量就可以计算出当前的样本时间:

cpp 复制代码
	int index=0;//帧索引
	
		double dur=(double)10000000/(double)30;//1帧的持续时间,单位100纳秒。30为帧率
		LONGLONG SampleTime=(LONGLONG)(dur*index);//当前的样本时间,单位100纳秒
		index++;

音频样本为若干音频帧。音频为2声道,16位PCM,采样率48000。一个音频帧占4字节,根据累计的音频帧数量,可以计算出音频样本的当前时间:

cpp 复制代码
	int FrameCount=0;

		double Adur=(double)10000000/(double)48000;//1帧的持续时间,单位100纳秒
		LONGLONG ASampleTime=(LONGLONG)(Adur*FrameCount);//当前的样本时间,单位100纳秒
		FrameCount+=len/4;//len为此次样本的字节大小

本文件的使用示例,可以看Windows应用-屏幕录像

ScreenRecorder.h文件的全部代码

cpp 复制代码
#pragma once

#include "mmsystem.h"
#pragma comment(lib, "winmm.lib")

#include "D3D11.h"
#pragma comment(lib, "D3D11.lib")
#include "DXGI1_2.h"
#pragma comment(lib, "DXGI.lib")

#include "mmdeviceapi.h" 
#include "audioclient.h"

#ifndef  SAFE_RELEASE
#define SAFE_RELEASE

template <class T> void SafeRelease(T** ppT)
{
	if (*ppT)
	{
		(*ppT)->Release();
		*ppT = NULL;
	}
}

#endif //SAFE_RELEASE

typedef int(__cdecl *MYPROC_VideoSample)(BYTE* pB, LONG len);
typedef int(__cdecl *MYPROC_AudioSample)(BYTE* pB, LONG len);

struct SR_INIT
{
	CRect rect;//录制区域矩形
	BOOL ShowCursor = TRUE;//为TRUE,显示光标
	UINT nFramePerSec;//每秒帧数
	MYPROC_VideoSample VideoSample = NULL;//视频样本接收函数指针
	MYPROC_AudioSample AudioSample = NULL;//音频样本接收函数指针
	HANDLE hStop = NULL;//"停止"事件句柄
	HANDLE hExit = NULL;//"退出"事件句柄
};

class ScreenRecorder
{
public:
	SR_INIT mInit;//初始化信息结构
	ScreenRecorder();
	~ScreenRecorder();
	BOOL Init(SR_INIT init);
	DWORD GetAudioEndpoint();//获取音频端点
	HRESULT GetScreenData();//获取屏幕图像数据
	void DrawCursor(BYTE* pB);//绘制光标
	int ScreenWidth = GetSystemMetrics(SM_CXSCREEN);//获取主显示器的宽度,以像素为单位
	int ScreenHeight = GetSystemMetrics(SM_CYSCREEN);//获取主显示器的高度,以像素为单位
	UINT len;
	ID3D11Device* p3D11Device = NULL;
	ID3D11DeviceContext* p3D11DeviceContext = NULL;
	IDXGIOutputDuplication *pDuplication = NULL;
	BYTE* pPreBuffer = NULL;//不包含光标的图像缓冲区
	BYTE* pDrawCursorBuffer = NULL;//包含光标的图像缓冲区
	BYTE* pBuffer1 = NULL;//视频输出缓冲区
	BYTE* pBuffer2 = NULL;//音频缓冲区
	HANDLE hVideoThread = NULL;
	HANDLE hAudioThread = NULL;
	IAudioClient *pAudioClient = NULL;
	IAudioCaptureClient *pCaptureClient = NULL;
	REFERENCE_TIME nDur;//音频包传递默认时间间隔,100纳秒单位
	LONG BufferSize;//音频样本缓冲区大小,单位字节
	CBitmap bmp;//用于绘制光标
	CDC mDC;//内存DC,用于绘制光标
	int mState = 0;//状态标志。0停止,1运行,2暂停
};

ScreenRecorder.cpp文件的全部代码

cpp 复制代码
#include "stdafx.h"
#include "ScreenRecorder.h"


ScreenRecorder::ScreenRecorder()
{
	HRESULT hr = CoInitialize(NULL);//初始化COM库
	if (hr != S_OK)
	{
		MessageBox(NULL, L"COM库初始化失败!", L"ScreenRecorder", MB_OK);
	}
	D3D_DRIVER_TYPE driver_types[] =
	{
		D3D_DRIVER_TYPE_HARDWARE,
		D3D_DRIVER_TYPE_WARP,
		D3D_DRIVER_TYPE_REFERENCE,
	};
	UINT n_driver_types = ARRAYSIZE(driver_types);
	D3D_FEATURE_LEVEL feature_levels[] =
	{
		D3D_FEATURE_LEVEL_11_0,
		D3D_FEATURE_LEVEL_10_1,
		D3D_FEATURE_LEVEL_10_0,
		D3D_FEATURE_LEVEL_9_1
	};
	UINT n_feature_levels = ARRAYSIZE(feature_levels);
	D3D_FEATURE_LEVEL feature_level;
	hr = D3D11CreateDevice(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL, 0, feature_levels, n_feature_levels, D3D11_SDK_VERSION, &p3D11Device, &feature_level, &p3D11DeviceContext);
	IDXGIDevice* pIDXGIDevice = NULL;
	if (hr == S_OK)
	{
		hr = p3D11Device->QueryInterface(__uuidof(IDXGIDevice), reinterpret_cast<void**>(&pIDXGIDevice));//获取对应的DXGI设备接口
	}
	IDXGIAdapter* pDXGIAdapter = NULL;
	if (hr == S_OK)
	{
		hr = pIDXGIDevice->GetParent(__uuidof(IDXGIAdapter), reinterpret_cast<void**>(&pDXGIAdapter));//获取DXGI设备适配器
	}
	SafeRelease(&pIDXGIDevice);
	IDXGIOutput* pDXGIOutput = NULL;
	if (hr == S_OK)
	{
		hr = pDXGIAdapter->EnumOutputs(0, &pDXGIOutput); //获取设备输出接口
	}
	SafeRelease(&pDXGIAdapter);
	DXGI_OUTPUT_DESC _output_des;
	if (hr == S_OK)
	{
		hr = pDXGIOutput->GetDesc(&_output_des);//获取设备输出描述
	}
	IDXGIOutput1* pDXGIOutput1 = NULL;
	if (hr == S_OK)
	{
		hr = pDXGIOutput->QueryInterface(__uuidof(pDXGIOutput1), reinterpret_cast<void**>(&pDXGIOutput1));
	}
	SafeRelease(&pDXGIOutput);
	if (hr == S_OK)
	{
		hr = pDXGIOutput1->DuplicateOutput(p3D11Device, &pDuplication);//根据设备输出接口创建一个duplication接口
	}
	SafeRelease(&pDXGIOutput1);
	if (hr != S_OK)MessageBox(0, L"DXGI初始化失败", L"屏幕录像", MB_OK);
	len = ScreenWidth * ScreenHeight * 4;
	pBuffer1 = new BYTE[len]; pPreBuffer = new BYTE[len]; pDrawCursorBuffer = new BYTE[len];
	bmp.CreateBitmap(ScreenWidth, ScreenHeight, 1, 32, NULL);
	mDC.CreateCompatibleDC(NULL);
	mDC.SelectObject(&bmp);
}

ScreenRecorder::~ScreenRecorder()
{
	SafeRelease(&p3D11Device); SafeRelease(&pDuplication); SafeRelease(&p3D11DeviceContext);
	SafeRelease(&pAudioClient); SafeRelease(&pCaptureClient);
	delete[] pBuffer1; delete[] pPreBuffer; delete[] pDrawCursorBuffer;
	if (pBuffer2)delete[] pBuffer2;
	CoUninitialize();//关闭COM库
	mDC.DeleteDC();
}

DWORD ScreenRecorder::GetAudioEndpoint()//获取音频端点
{
	SafeRelease(&pAudioClient); SafeRelease(&pCaptureClient);
	HRESULT hr;
	IMMDeviceEnumerator *pEnumerator = NULL;
	hr = CoCreateInstance(__uuidof(MMDeviceEnumerator), NULL, CLSCTX_ALL, __uuidof(IMMDeviceEnumerator), (void**)&pEnumerator);//创建设备枚举器
	IMMDevice *pDevice = NULL;
	if (hr == S_OK)
	{
		hr = pEnumerator->GetDefaultAudioEndpoint(eRender, eConsole, &pDevice);//获取默认音频端点设备
	}
	SafeRelease(&pEnumerator);
	if (hr == S_OK)
	{
		hr = pDevice->Activate(__uuidof(IAudioClient), CLSCTX_ALL, NULL, (void**)&pAudioClient);//激活默认音频端点设备
	}
	SafeRelease(&pDevice);
	WAVEFORMATEX wfx;
	wfx.wFormatTag = 1;
	wfx.nChannels = 2;
	wfx.nSamplesPerSec = 48000;
	wfx.nAvgBytesPerSec = 48000 * 4;
	wfx.nBlockAlign = 4;
	wfx.wBitsPerSample = 16;
	wfx.cbSize = 0;
	if (hr == S_OK)
	{
		hr = pAudioClient->Initialize(AUDCLNT_SHAREMODE_SHARED, AUDCLNT_STREAMFLAGS_LOOPBACK, (REFERENCE_TIME)10000000, 0, &wfx, NULL);//创建端点缓冲区,可以容纳1秒的音频数据
	}
	REFERENCE_TIME r2;
	if (hr == S_OK)
	{
		hr = pAudioClient->GetDevicePeriod(&nDur, &r2);//获取单个音频包持续时间,单位100纳秒
	}
	UINT32 Size;
	if (hr == S_OK)
	{
		hr = pAudioClient->GetBufferSize(&Size);//获取申请的端点缓冲区总大小,单位音频帧
	}
	if (hr == S_OK)
	{
		BufferSize = (LONG)(Size * wfx.nChannels * 2 * nDur / 10000000);//计算样本大小,单位字节
		if (pBuffer2 == NULL)pBuffer2 = new BYTE[BufferSize];
		hr = pAudioClient->GetService(__uuidof(IAudioCaptureClient), (void**)&pCaptureClient);//获取音频服务
	}
	if (hr != S_OK)
	{
		SafeRelease(&pAudioClient); SafeRelease(&pCaptureClient);
		MessageBox(NULL, L"获取音频端点失败!", L"提示", MB_OK);
		return 0;
	}
	return 1;
}

void ScreenRecorder::DrawCursor(BYTE* pB)//绘制光标
{
	CURSORINFO CursorInfo;
	CursorInfo.cbSize = sizeof(CURSORINFO);
	if (!GetCursorInfo(&CursorInfo))return;//获取光标的信息
	if (CursorInfo.flags != CURSOR_SHOWING)return;//如果光标没有显示,不绘制光标
	bmp.SetBitmapBits(len, pB);
	mDC.DrawIcon(CursorInfo.ptScreenPos, CursorInfo.hCursor);//在内存DC绘制光标
	BITMAPINFOHEADER Hdr;
	Hdr.biSize = sizeof(BITMAPINFOHEADER);
	Hdr.biWidth = ScreenWidth;
	Hdr.biHeight = -ScreenHeight;
	Hdr.biPlanes = 1;
	Hdr.biBitCount = 32;
	Hdr.biCompression = BI_RGB;
	Hdr.biSizeImage = len;
	Hdr.biXPelsPerMeter = 0;
	Hdr.biYPelsPerMeter = 0;
	Hdr.biClrUsed = 0;
	Hdr.biClrImportant = 0;
	GetDIBits(mDC.m_hDC, (HBITMAP)bmp, 0, ScreenHeight, pB, (BITMAPINFO*)&Hdr, DIB_RGB_COLORS);//将内存DC位图的位 复制到pB
}

HRESULT ScreenRecorder::GetScreenData()//获取屏幕图像数据
{
	IDXGIResource* pIDXGIResource = NULL;
	DXGI_OUTDUPL_FRAME_INFO frame_info;
	HRESULT DuplicationHr = pDuplication->AcquireNextFrame(0, &frame_info, &pIDXGIResource);//获取下一个桌面映像
	D3D11_TEXTURE2D_DESC frame_desc;
	DXGI_MAPPED_RECT mapped_rect;
	IDXGISurface *dxgi_surface = NULL;
	HRESULT MapHr = S_FALSE;
	if (DuplicationHr == S_OK)
	{
		ID3D11Texture2D *_image = NULL;
		HRESULT Texture2DHr = pIDXGIResource->QueryInterface(__uuidof(ID3D11Texture2D), reinterpret_cast<void **>(&_image));//获取一帧图像纹理
		SafeRelease(&pIDXGIResource);
		if (Texture2DHr == S_OK)
		{
			_image->GetDesc(&frame_desc);
			frame_desc.MipLevels = 1;
			frame_desc.ArraySize = 1;
			frame_desc.SampleDesc.Count = 1;
			frame_desc.Usage = D3D11_USAGE_STAGING;
			frame_desc.BindFlags = 0;
			frame_desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
			frame_desc.MiscFlags = 0;
			ID3D11Texture2D *new_image = NULL;
			HRESULT NewHr = p3D11Device->CreateTexture2D(&frame_desc, NULL, &new_image);//创建新的纹理
			if (NewHr == S_OK)
			{
				p3D11DeviceContext->CopyResource(new_image, _image);//拷贝图像
				HRESULT SurfaceHr = new_image->QueryInterface(__uuidof(IDXGISurface), (void **)(&dxgi_surface));
				SafeRelease(&new_image);
				if (SurfaceHr == S_OK)
				{
					MapHr = dxgi_surface->Map(&mapped_rect, DXGI_MAP_READ);//将图像从GPU映射到内存中
					if (MapHr == S_OK)
					{
						CopyMemory(pPreBuffer, mapped_rect.pBits, len);
					}
				}
			}
			SafeRelease(&_image);
		}
	}
	BYTE* RGB32_data;
	if (MapHr == S_OK)//如果获取图像和映射成功
	{
		RGB32_data = mapped_rect.pBits;//直接在mapped_rect.pBits中绘制光标
	}
	else//如果失败
	{
		CopyMemory(pDrawCursorBuffer, pPreBuffer, len);//复制上一帧图像到pDrawCursorBuffer
		RGB32_data = pDrawCursorBuffer;//在pDrawCursorBuffer中绘制光标
	}
	if (mInit.ShowCursor)//如果要求显示光标
	{
		DrawCursor(RGB32_data);//在RGB32_data中绘制光标
	}
	int iY = 0;
	int YSize = mInit.rect.Width() * mInit.rect.Height();
	int iU = YSize;
	int iV = YSize + YSize / 4;
	BYTE Color32[4];
	for (int y = mInit.rect.top; y < mInit.rect.bottom; y++)//将指定矩形内的RGB32数据转换为IYUV存储在pBuffer1
	{
		for (int x = mInit.rect.left; x < mInit.rect.right; x++)
		{
			CopyMemory(Color32, &RGB32_data[y * ScreenWidth * 4 + x * 4], 4);
			pBuffer1[iY] = (BYTE)(0.299 * Color32[2] + 0.587 * Color32[1] + 0.114 * Color32[0]);//Y
			if ((x & 1) && (y & 1))
			{
				pBuffer1[iU] = (BYTE)(-0.1687 * Color32[2] - 0.3313 * Color32[1] + 0.5 * Color32[0] + 128);//U
				pBuffer1[iV] = (BYTE)(0.5 * Color32[2] - 0.4187 * Color32[1] - 0.0813 * Color32[0] + 128);//V
				iU++; iV++;
			}
			iY++;
		}
	}
	if (MapHr == S_OK)
	{
		dxgi_surface->Unmap();
	}
	SafeRelease(&dxgi_surface);
	if (DuplicationHr == S_OK)
	{
		pDuplication->ReleaseFrame();//释放桌面映像
	}
	mInit.VideoSample(pBuffer1, (LONG)(mInit.rect.Width() * mInit.rect.Height() * 1.5));//调用外部函数,发送视频样本。如果获取桌面图像成功,发送新的图像;失败发送上一帧图像
	return DuplicationHr;
}

DWORD WINAPI ScreenRecorderThread(LPVOID lp)
{
	ScreenRecorder* pSR = (ScreenRecorder*)lp;
	LONGLONG index = 0; double Ntime = (double)1000 / (double)pSR->mInit.nFramePerSec;
	DWORD STAR = timeGetTime();//记录开始时间
Agan:
	DWORD Cur = timeGetTime() - STAR;//当前时间,单位毫秒
	DWORD Sur = (DWORD)(Ntime * (double)index);//帧呈现时间,单位毫秒
	if (Cur < Sur)goto Agan;//如果没有到帧显示时间,等待
	if (Cur >= (DWORD)(Ntime * (double)(index+1)))//如果当前时间大于下一帧的呈现时间
	{
		pSR->mInit.VideoSample(pSR->pBuffer1, (LONG)(pSR->mInit.rect.Width() * pSR->mInit.rect.Height() * 1.5));
		index++;
		goto Agan;
	}
	DWORD mStop = WaitForSingleObject(pSR->mInit.hStop, 0);
	if (mStop == WAIT_OBJECT_0)//如果"停止"有信号
	{
		pSR->mState = 2;//状态为暂停
	}
	else
	{
		pSR->mState = 1;//状态为运行
		HRESULT hr = pSR->GetScreenData();
	}
	DWORD mExit = WaitForSingleObject(pSR->mInit.hExit, 0);
	if (mExit == WAIT_OBJECT_0)//如果"退出"有信号
	{
		pSR->mState = 0;//状态为停止
		return 1;
	}
	index++;
	goto Agan;
}

DWORD WINAPI AudioEndpointThread(LPVOID lp)
{
	ScreenRecorder* pSR = (ScreenRecorder*)lp;
	HRESULT hr; LONGLONG index = 0; BOOL Run = FALSE, Stop = TRUE; double Ntime = (double)pSR->nDur / (double)10000;
	DWORD STAR = timeGetTime();//记录开始时间
Agan:
	DWORD Cur = timeGetTime() - STAR;//当前时间,单位毫秒
	DWORD Sur = (DWORD)(Ntime * (double)index);//帧呈现时间,单位毫秒
	if (Cur < Sur)goto Agan;//如果没有到间隔时间(间隔10毫秒),等待
	DWORD mExit = WaitForSingleObject(pSR->mInit.hExit, 0);
	if (mExit == WAIT_OBJECT_0)
	{
		return 1;
	}
	DWORD mStop = WaitForSingleObject(pSR->mInit.hStop, 0);
	if (mStop == WAIT_OBJECT_0)//如果"停止"有信号
	{
		if (Stop == FALSE)
		{
			Stop = TRUE; Run = FALSE;
			hr = pSR->pAudioClient->Stop();  //停止音频流
			hr = pSR->pAudioClient->Reset();//刷新所有挂起的数据
		}
	}
	else//如果"停止"无信号
	{
		if (Run == FALSE)
		{
			Run = TRUE; Stop = FALSE;
			hr = pSR->pAudioClient->Start();  //启动音频流
		}
		DWORD flags; UINT32 numFramesAvailable = 0; BYTE *pData = NULL;//音频包缓冲区指针
		UINT32 packetLength = 0;//音频包大小,单位音频帧
		hr = pSR->pCaptureClient->GetNextPacketSize(&packetLength);//获取音频包的大小,单位音频帧
		if (packetLength != 0)//如果获取到音频包,将音频包数据转换为short类型,复制到缓冲区
		{
			hr = pSR->pCaptureClient->GetBuffer(&pData, &numFramesAvailable, &flags, NULL, NULL);//获取当前音频包的指针
			if (hr == S_OK && numFramesAvailable != 0 && pData != NULL)
			{
				int count = numFramesAvailable * 4 / pSR->BufferSize;//计算需要发送多少个样本
				for (int i = 0; i < count; i++)
				{
					pSR->mInit.AudioSample(pData + i*pSR->BufferSize, pSR->BufferSize);
				}
				hr = pSR->pCaptureClient->ReleaseBuffer(numFramesAvailable);//释放音频包
			}
		}
		else//如果当前没有音频流,将音频缓冲区全部置0
		{
			memset(pSR->pBuffer2, 0, pSR->BufferSize);
			pSR->mInit.AudioSample(pSR->pBuffer2, pSR->BufferSize);//发送10毫秒的0数据
		}
	}
	index++;
	goto Agan;
}

BOOL ScreenRecorder::Init(SR_INIT init)
{
	DWORD dwV = WaitForSingleObject(hVideoThread, 0);
	if (dwV == WAIT_TIMEOUT)return FALSE;//如果线程已存在,返回
	DWORD dwA = WaitForSingleObject(hAudioThread, 0);
	if (dwA == WAIT_TIMEOUT)return FALSE;
	if (init.hExit == NULL || init.hStop == NULL)
	{
		MessageBox(NULL, L"必须提供"停止"和"退出"事件句柄", L"写MP4", MB_OK); return FALSE;
	}
	if (init.VideoSample == NULL)
	{
		MessageBox(NULL, L"必须提供视频样本输出函数", L"写MP4", MB_OK); return FALSE;
	}
	mInit = init;
	mInit.rect.NormalizeRect();//使其为正常矩形(宽度和高度为正值)
	if (mInit.rect.left < 0)mInit.rect.left = 0;
	if (mInit.rect.top < 0)mInit.rect.top = 0;
	if (mInit.rect.right > ScreenWidth)mInit.rect.right = ScreenWidth;
	if (mInit.rect.bottom > ScreenHeight)mInit.rect.bottom = ScreenHeight;
	if (mInit.rect.Width() % 2)//确保录制矩形宽度为偶数
	{
		if (mInit.rect.right != ScreenWidth)mInit.rect.right += 1;
		else if (mInit.rect.left != 0)mInit.rect.left -= 1;
	}
	if (mInit.rect.Height() % 2)//确保录制矩形高度为偶数
	{
		if (mInit.rect.bottom != ScreenHeight)mInit.rect.bottom += 1;
		else if (mInit.rect.top != 0)mInit.rect.top -= 1;
	}
	ResetEvent(mInit.hExit);//设置"退出"无信号
	SetEvent(mInit.hStop);//设置"停止"有信号
	hVideoThread = CreateThread(NULL, 0, ScreenRecorderThread, this, 0, NULL);//创建录屏线程
	if (mInit.AudioSample)//如果提供了音频样本输出函数,创建录制系统播放的声音线程。不提供该函数,则不创建
	{
		GetAudioEndpoint();//获取音频端点
		hAudioThread = CreateThread(NULL, 0, AudioEndpointThread, this, 0, NULL);//创建录制系统播放的声音线程
	}
	return TRUE;
}
相关推荐
世事如云有卷舒几秒前
《C++ Primer》学习笔记(四)
c++·笔记·学习
Lilith的AI学习日记7 分钟前
n8n 中文系列教程_05.如何在本机部署/安装 n8n(详细图文教程)
运维·windows·macos·ai编程·n8n
李匠202418 分钟前
C++学习之游戏服务器开发十四QT登录器实现
c++·学习·游戏
寂空_35 分钟前
【算法笔记】动态规划基础(一):dp思想、基础线性dp
c++·笔记·算法·动态规划
广龙宇44 分钟前
【一起学Rust】使用Thunk工具链实现Rust应用对Windows XP/7的兼容性适配实战
开发语言·windows·rust
扛枪的书生1 小时前
Windows 提权指南
windows·渗透·kali·提权
CheungChunChiu1 小时前
Qt 容器类使用指南
linux·开发语言·c++·qt·容器
小王努力学编程2 小时前
美团2024年春招第一场笔试 C++
开发语言·数据结构·c++·学习·算法
superior tigre2 小时前
C++学习:六个月从基础到就业——STL算法(一) 基础与查找算法
c++·学习·算法
刚入门的大一新生2 小时前
C++初阶-类和对象(下)
开发语言·c++·算法