c++使用win新api替代DirectShow驱动uvc摄像头,可改c#驱动

使用Media Foundation实现

DirectShow vs Media Foundation 的区别

1. DirectShow的回调模式

DirectShow通常使用拉模式(Pull Model) + 回调(Callback):

// DirectShow典型模式

graphBuilder->Run();

// 然后Sample Grabber过滤器会回调你的回调函数

复制代码
STDMETHODIMP SampleCB(double SampleTime, IMediaSample *pSample)
{
    // 新帧到达时的回调
    ProcessFrame(pSample);
    return S_OK;
}

2. Media Foundation的轮询模式

您提供的代码使用Media Foundation的Pull模式,这是MF的标准工作方式:

复制代码
void CaptureThread()
{
    while (m_isCapturing)
    {
        // 主动请求下一帧(轮询)
        HRESULT hr = m_pReader->ReadSample(
            MF_SOURCE_READER_FIRST_VIDEO_STREAM,
            0, &streamIndex, &flags, &timestamp, &pSample);
        
        if (SUCCEEDED(hr) && pSample)
        {
            ProcessSample(pSample);  // 处理样本
            pSample->Release();
        }
    }
}

为什么MF采用轮询模式?

优点:

控制权在应用程序:

可以控制读取节奏

可以跳过帧(通过设置MF_SOURCE_READER_CONTROL_FLAG_DISCARD)

可以请求特定时间戳的样本

更简单的事件处理:

不需要复杂的消息泵或事件循环

不需要处理COM单元线程问题

异步操作支持:

MF也支持异步回调,但Pull模式更简单直接

MF的异步模式(可选)

为什么不用ISampleGrabber 接口

DirectShow 是一项旧功能。 它已被 MediaPlayer、 IMFMediaEngine 和 媒体基金会中的音频/视频捕获取代。 这些功能已针对Windows 10和Windows 11进行了优化。 Microsoft 强烈建议新代码尽可能使用 MediaPlayer、 IMFMediaEngine 和 Media Foundation 中的音频/视频捕获 ,而不是 DirectShow。 如果可能,Microsoft 建议重写使用旧 API 的现有代码以使用新 API。]

代码实现

复制代码
#include <windows.h>
#include <mfapi.h>
#include <mfidl.h>
#include <mfreadwrite.h>
#include <mferror.h>
#include <shlwapi.h>
#include <dshow.h>
#include <ks.h>
#include <ksmedia.h>
#include <comdef.h>
#include <iostream>
#include <vector>
#include <thread>
#include <atomic>
#include <mutex>
#include <condition_variable>
#include <fstream>

#pragma comment(lib, "mf.lib")
#pragma comment(lib, "mfplat.lib")
#pragma comment(lib, "mfreadwrite.lib")
#pragma comment(lib, "mfuuid.lib")
#pragma comment(lib, "shlwapi.lib")
#pragma comment(lib, "strmiids.lib")
#pragma comment(lib, "ole32.lib")
#pragma comment(lib, "oleaut32.lib")

class UVCCamera
{
private:
    IMFMediaSource* m_pSource = nullptr;
    IMFSourceReader* m_pReader = nullptr;
    IMFAttributes* m_pAttributes = nullptr;
    IMFActivate** m_ppDevices = nullptr;
    UINT32 m_deviceCount = 0;

    // 摄像头控制接口
    IAMCameraControl* m_pCameraControl = nullptr;
    IAMVideoProcAmp* m_pVideoProcAmp = nullptr;

    // 当前帧数据
    std::vector<BYTE> m_frameBuffer;
    std::vector<BYTE> m_rgbBuffer;  // 用于存储转换后的RGB数据
    UINT32 m_frameWidth = 0;
    UINT32 m_frameHeight = 0;
    UINT32 m_frameStride = 0;
    GUID m_videoFormat = GUID_NULL;
    std::mutex m_frameMutex;

    // 捕获线程
    std::atomic<bool> m_isCapturing{ false };
    std::thread m_captureThread;
    std::condition_variable m_frameCV;
    bool m_newFrameAvailable = false;

public:
    UVCCamera() = default;
    ~UVCCamera() { Close(); }

    bool Initialize()
    {
        HRESULT hr = S_OK;

        // 初始化COM
        hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
        if (FAILED(hr))
        {
            std::cerr << "CoInitializeEx failed: 0x" << std::hex << hr << std::endl;
            return false;
        }

        // 初始化Media Foundation
        hr = MFStartup(MF_VERSION, MFSTARTUP_FULL);
        if (FAILED(hr))
        {
            std::cerr << "MFStartup failed: 0x" << std::hex << hr << std::endl;
            CoUninitialize();
            return false;
        }

        std::cout << "Media Foundation initialized successfully" << std::endl;

        return EnumerateDevices();
    }

    bool EnumerateDevices()
    {
        HRESULT hr = MFCreateAttributes(&m_pAttributes, 1);
        if (FAILED(hr))
        {
            std::cerr << "MFCreateAttributes failed: 0x" << std::hex << hr << std::endl;
            return false;
        }

        hr = m_pAttributes->SetGUID(
            MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
            MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID);
        if (FAILED(hr))
        {
            std::cerr << "SetGUID failed: 0x" << std::hex << hr << std::endl;
            m_pAttributes->Release();
            m_pAttributes = nullptr;
            return false;
        }

        hr = MFEnumDeviceSources(m_pAttributes, &m_ppDevices, &m_deviceCount);
        if (FAILED(hr))
        {
            std::cerr << "MFEnumDeviceSources failed: 0x" << std::hex << hr << std::endl;
            m_pAttributes->Release();
            m_pAttributes = nullptr;
            return false;
        }

        std::cout << "Found " << m_deviceCount << " video device(s)" << std::endl;
        return true;
    }

    void ListDevices()
    {
        std::wcout << L"\nAvailable cameras:" << std::endl;
        for (UINT32 i = 0; i < m_deviceCount; i++)
        {
            WCHAR* pFriendlyName = nullptr;
            UINT32 cchName = 0;

            HRESULT hr = m_ppDevices[i]->GetAllocatedString(
                MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME,
                &pFriendlyName,
                &cchName);

            if (SUCCEEDED(hr))
            {
                std::wcout << i << L": " << pFriendlyName << std::endl;
                CoTaskMemFree(pFriendlyName);
            }
        }
    }

    bool OpenDevice(UINT32 deviceIndex)
    {
        if (deviceIndex >= m_deviceCount)
        {
            std::cerr << "Invalid device index" << std::endl;
            return false;
        }

        std::wcout << L"\nOpening device " << deviceIndex << L"..." << std::endl;

        // 创建媒体源
        HRESULT hr = m_ppDevices[deviceIndex]->ActivateObject(
            IID_PPV_ARGS(&m_pSource));
        if (FAILED(hr))
        {
            std::cerr << "Failed to activate device: 0x" << std::hex << hr << std::endl;
            return false;
        }

        std::cout << "Device activated successfully" << std::endl;

        // 获取摄像头控制接口
        GetCameraControlInterfaces();

        return true;
    }

    bool GetCameraControlInterfaces()
    {
        if (!m_pSource) return false;

        HRESULT hr = S_OK;

        hr = m_pSource->QueryInterface(IID_IAMCameraControl, (void**)&m_pCameraControl);
        if (SUCCEEDED(hr))
        {
            std::cout << "Camera control interface obtained" << std::endl;
        }

        hr = m_pSource->QueryInterface(IID_IAMVideoProcAmp, (void**)&m_pVideoProcAmp);
        if (SUCCEEDED(hr))
        {
            std::cout << "Video Proc Amp interface obtained" << std::endl;
        }

        return (m_pCameraControl != nullptr || m_pVideoProcAmp != nullptr);
    }

    // 设置曝光
    bool SetExposure(long value, long flags = CameraControl_Flags_Manual)
    {
        if (!m_pCameraControl)
        {
            std::cerr << "Camera control interface not available" << std::endl;
            return false;
        }

        HRESULT hr = m_pCameraControl->Set(CameraControl_Exposure, value, flags);
        if (SUCCEEDED(hr))
        {
            std::cout << "Exposure set to: " << value << std::endl;
            return true;
        }

        std::cerr << "Failed to set exposure: 0x" << std::hex << hr << std::endl;
        return false;
    }

    // 开始捕获
    bool StartCapture()
    {
        if (!m_pSource || m_isCapturing) return false;

        std::cout << "Starting capture..." << std::endl;

        // 创建Source Reader的属性
        IMFAttributes* pReaderAttributes = nullptr;
        HRESULT hr = MFCreateAttributes(&pReaderAttributes, 1);
        if (FAILED(hr))
        {
            std::cerr << "Failed to create reader attributes: 0x" << std::hex << hr << std::endl;
            return false;
        }

        // 创建Source Reader
        hr = MFCreateSourceReaderFromMediaSource(
            m_pSource,
            pReaderAttributes,
            &m_pReader);

        pReaderAttributes->Release();

        if (FAILED(hr))
        {
            std::cerr << "Failed to create source reader: 0x" << std::hex << hr << std::endl;
            return false;
        }

        // 尝试设置媒体类型为YUY2(因为摄像头支持YUY2)
        IMFMediaType* pType = nullptr;
        hr = MFCreateMediaType(&pType);
        if (SUCCEEDED(hr))
        {
            hr = pType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
            if (SUCCEEDED(hr))
            {
                // 尝试YUY2格式
                hr = pType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_YUY2);
                if (SUCCEEDED(hr))
                {
                    // 尝试设置分辨率
                    hr = MFSetAttributeSize(pType, MF_MT_FRAME_SIZE, 640, 480);
                    if (SUCCEEDED(hr))
                    {
                        // 设置到source reader
                        hr = m_pReader->SetCurrentMediaType(
                            (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
                            nullptr,
                            pType);
                    }
                }
            }
            pType->Release();
        }

        if (FAILED(hr))
        {
            std::cout << "Using default media type" << std::endl;
        }

        m_isCapturing = true;
        m_captureThread = std::thread(&UVCCamera::CaptureThread, this);

        std::cout << "Capture started successfully" << std::endl;
        return true;
    }

    // 停止捕获
    void StopCapture()
    {
        m_isCapturing = false;
        if (m_captureThread.joinable())
        {
            m_captureThread.join();
        }

        if (m_pReader)
        {
            m_pReader->Release();
            m_pReader = nullptr;
        }

        std::cout << "Capture stopped" << std::endl;
    }

    // 捕获线程
    void CaptureThread()
    {
        std::cout << "Capture thread started" << std::endl;

        while (m_isCapturing)
        {
            IMFSample* pSample = nullptr;
            DWORD streamIndex, flags;
            LONGLONG timestamp;

            HRESULT hr = m_pReader->ReadSample(
                MF_SOURCE_READER_FIRST_VIDEO_STREAM,
                0,
                &streamIndex,
                &flags,
                &timestamp,
                &pSample);

            if (FAILED(hr))
            {
                std::this_thread::sleep_for(std::chrono::milliseconds(10));
                continue;
            }

            if (flags & MF_SOURCE_READERF_ENDOFSTREAM)
            {
                std::cout << "End of stream reached" << std::endl;
                break;
            }

            if (pSample)
            {
                ProcessSample(pSample);
                pSample->Release();
            }
        }

        std::cout << "Capture thread ended" << std::endl;
    }

    // 处理样本
    void ProcessSample(IMFSample* pSample)
    {
        IMFMediaBuffer* pBuffer = nullptr;
        HRESULT hr = pSample->ConvertToContiguousBuffer(&pBuffer);
        if (FAILED(hr))
        {
            return;
        }

        BYTE* pData = nullptr;
        DWORD dataLength = 0;
        hr = pBuffer->Lock(&pData, nullptr, &dataLength);
        if (FAILED(hr))
        {
            pBuffer->Release();
            return;
        }

        IMFMediaType* pType = nullptr;
        hr = m_pReader->GetCurrentMediaType(MF_SOURCE_READER_FIRST_VIDEO_STREAM, &pType);

        if (SUCCEEDED(hr))
        {
            UINT32 width = 640, height = 480;
            GUID subtype = GUID_NULL;

            // 获取帧大小
            UINT32 frameWidth = 0, frameHeight = 0;
            if (SUCCEEDED(MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &frameWidth, &frameHeight)))
            {
                width = frameWidth;
                height = frameHeight;
            }

            // 获取格式
            pType->GetGUID(MF_MT_SUBTYPE, &subtype);

            // 计算步长
            LONG lStride = width * 2;  // YUY2默认,每个像素2字节
            if (subtype == MFVideoFormat_RGB24)
            {
                lStride = width * 3;
            }
            else if (subtype == MFVideoFormat_RGB32 || subtype == MFVideoFormat_ARGB32)
            {
                lStride = width * 4;
            }
            lStride = (lStride + 3) & ~3;  // 4字节对齐

            std::lock_guard<std::mutex> lock(m_frameMutex);
            m_frameWidth = width;
            m_frameHeight = height;
            m_frameStride = lStride;
            m_videoFormat = subtype;


            // 输出格式信息
            static bool firstFrame = true;
            if (firstFrame)
            {
                std::cout << "Video format: " << GetFormatName(subtype)
                    << ", Size: " << width << "x" << height
                    << ", Stride: " << lStride << std::endl;
                firstFrame = false;
            }

            // 调整原始缓冲区大小
            size_t requiredSize = lStride * height;
            if (m_frameBuffer.size() < requiredSize)
            {
                m_frameBuffer.resize(requiredSize);
            }

            // 复制数据
            if (dataLength > 0)
            {
                size_t copySize = (dataLength < requiredSize) ? dataLength : requiredSize;
                memcpy(m_frameBuffer.data(), pData, copySize);

                // 立即转换格式
                ConvertToRGB24();

                m_newFrameAvailable = true;
                m_frameCV.notify_all();
            }

            pType->Release();
        }

        pBuffer->Unlock();
        pBuffer->Release();
    }
    bool AdjustCameraSettings()
    {
        if (!m_pCameraControl || !m_pVideoProcAmp)
            return false;

        HRESULT hr = S_OK;
        bool allSettingsApplied = true;

        //TODO 还是用默认配置的,如果要修改必须要获取设备原有的默认配置,还要设备可以选择是否开启彩色 

        std::cout << "=== Setting camera to AUTO mode ===" << std::endl;

        //// 1. 自动曝光
        //hr = m_pCameraControl->Set(CameraControl_Exposure, 0, CameraControl_Flags_Auto);
        //if (SUCCEEDED(hr))
        //{
        //    std::cout << "[OK] Auto exposure enabled" << std::endl;
        //}

        //// 2. 自动白平衡
        //hr = m_pVideoProcAmp->Set(VideoProcAmp_WhiteBalance, 50, VideoProcAmp_Flags_Auto);
        //if (SUCCEEDED(hr))
        //{
        //    std::cout << "[OK] Auto white balance enabled" << std::endl;
        //}

        //// 3. 自动亮度
        //hr = m_pVideoProcAmp->Set(VideoProcAmp_Brightness, 50, VideoProcAmp_Flags_Auto);
        //if (SUCCEEDED(hr))
        //{
        //    std::cout << "[OK] Auto brightness enabled" << std::endl;
        //}

        //// 4. 自动对比度
        //hr = m_pVideoProcAmp->Set(VideoProcAmp_Contrast, 0, VideoProcAmp_Flags_Auto);
        //if (SUCCEEDED(hr))
        //{
        //    std::cout << "[OK] Auto contrast enabled" << std::endl;
        //}

        // 5. 关键:将饱和度设为手动模式,并设置为正常值(如50)
        // 不要使用自动饱和度,因为有些摄像头自动饱和度会设为0
        //hr = m_pVideoProcAmp->Set(VideoProcAmp_Saturation, 50, VideoProcAmp_Flags_Manual);
        //if (SUCCEEDED(hr))
        //{
        //    std::cout << "[OK] Saturation set to 50 (manual)" << std::endl;
        //}
        //else
        //{
        //    // 如果50不行,尝试100
        //    hr = m_pVideoProcAmp->Set(VideoProcAmp_Saturation, 100, VideoProcAmp_Flags_Manual);
        //    if (SUCCEEDED(hr))
        //    {
        //        std::cout << "[OK] Saturation set to 100 (manual)" << std::endl;
        //    }
        //    else
        //    {
        //        std::cerr << "[ERROR] Failed to set saturation" << std::endl;
        //    }
        //}

        // 6. 自动增益
        //hr = m_pVideoProcAmp->Set(VideoProcAmp_Gain, 0, VideoProcAmp_Flags_Auto);
        //if (SUCCEEDED(hr))
        //{
        //    std::cout << "[OK] Auto gain enabled" << std::endl;
        //}

        std::cout << "=== Camera settings completed ===" << std::endl;
        std::this_thread::sleep_for(std::chrono::milliseconds(500));

        return allSettingsApplied;
    }

    // YUV 转 RGB
   // 修改YUVtoRGB函数,使用正确的转换公式
    void YUVtoRGB(int y, int u, int v, int& r, int& g, int& b)
    {
        // 正确的YUY2到RGB转换公式
        int c = y - 16;
        int d = u - 128;
        int e = v - 128;

        // 使用更精确的转换矩阵
        r = (298 * c + 409 * e + 128) >> 8;
        g = (298 * c - 100 * d - 208 * e + 128) >> 8;
        b = (298 * c + 516 * d + 128) >> 8;

        // 限制在0-255范围内
        r = (r < 0) ? 0 : (r > 255) ? 255 : r;
        g = (g < 0) ? 0 : (g > 255) ? 255 : g;
        b = (b < 0) ? 0 : (b > 255) ? 255 : b;
    }

    // 改进的YUY2转RGB24函数
    void ConvertToRGB24()
    {
        if (m_frameBuffer.empty() || m_frameWidth == 0 || m_frameHeight == 0)
            return;

        // 分配RGB缓冲区
        size_t rgbSize = m_frameWidth * m_frameHeight * 3;
        m_rgbBuffer.resize(rgbSize);

        if (m_videoFormat == MFVideoFormat_YUY2)
        {
            std::cout << "Converting YUY2 to RGB24: " << m_frameWidth
                << "x" << m_frameHeight << ", Stride: " << m_frameStride << std::endl;

            BYTE* yuy2Data = m_frameBuffer.data();
            BYTE* rgbData = m_rgbBuffer.data();

            // 计算步长
            UINT32 yuy2Stride = m_frameStride;  // 1280 bytes
            UINT32 rgbStride = m_frameWidth * 3;  // 1920 bytes (640×3)

            std::cout << "YUY2 stride: " << yuy2Stride << ", RGB stride: " << rgbStride << std::endl;

            // 测试:输出前几个像素的原始数据
            std::cout << "First 8 bytes of YUY2: ";
            for (int i = 0; i < 8; i++) {
                printf("%02X ", yuy2Data[i]);
            }
            std::cout << std::endl;

            for (UINT32 y = 0; y < m_frameHeight; y++)
            {
                BYTE* yuy2Line = yuy2Data + (y * yuy2Stride);
                BYTE* rgbLine = rgbData + (y * rgbStride);

                for (UINT32 x = 0; x < m_frameWidth; x += 2)
                {
                    // 读取两个像素的YUY2数据
                    int y0 = yuy2Line[0];
                    int u = yuy2Line[1];
                    int y1 = yuy2Line[2];
                    int v = yuy2Line[3];
                    yuy2Line += 4;

                    // 调试:如果是第一行的前几个像素,输出详细信息
                    if (y == 0 && x < 4)
                    {
                        printf("Pixel %d,%d: Y0=%3d, U=%3d, Y1=%3d, V=%3d\n",
                            x, y, y0, u, y1, v);
                    }

                    // 计算U和V的偏移
                    int uu = u - 128;
                    int vv = v - 128;

                    // 预计算公共部分
                    int r_add = 409 * vv;
                    int g_add_u = -100 * uu;
                    int g_add_v = -208 * vv;
                    int b_add = 516 * uu;

                    // 第一个像素
                    int yy0 = y0 - 16;
                    if (yy0 < 0) yy0 = 0;

                    int r0 = (298 * yy0 + r_add + 128) >> 8;
                    int g0 = (298 * yy0 + g_add_u + g_add_v + 128) >> 8;
                    int b0 = (298 * yy0 + b_add + 128) >> 8;

                    r0 = (r0 < 0) ? 0 : (r0 > 255) ? 255 : r0;
                    g0 = (g0 < 0) ? 0 : (g0 > 255) ? 255 : g0;
                    b0 = (b0 < 0) ? 0 : (b0 > 255) ? 255 : b0;

                    // 第二个像素
                    int yy1 = y1 - 16;
                    if (yy1 < 0) yy1 = 0;

                    int r1 = (298 * yy1 + r_add + 128) >> 8;
                    int g1 = (298 * yy1 + g_add_u + g_add_v + 128) >> 8;
                    int b1 = (298 * yy1 + b_add + 128) >> 8;

                    r1 = (r1 < 0) ? 0 : (r1 > 255) ? 255 : r1;
                    g1 = (g1 < 0) ? 0 : (g1 > 255) ? 255 : g1;
                    b1 = (b1 < 0) ? 0 : (b1 > 255) ? 255 : b1;

                    // 写入RGB数据
                    rgbLine[0] = (BYTE)b0;  // BMP需要BGR,所以这里直接输出BGR
                    rgbLine[1] = (BYTE)g0;
                    rgbLine[2] = (BYTE)r0;
                    rgbLine[3] = (BYTE)b1;
                    rgbLine[4] = (BYTE)g1;
                    rgbLine[5] = (BYTE)r1;
                    rgbLine += 6;
                }

                // 调试:验证步长
                if (y == 0)
                {
                    std::cout << "YUY2 line " << y << " ended at byte offset: "
                        << (yuy2Line - yuy2Data) << std::endl;
                    std::cout << "Expected offset: " << yuy2Stride << std::endl;
                }
            }

            // 测试:输出转换后的前几个RGB值
            std::cout << "First RGB pixel (should be BGR for BMP): ";
            for (int i = 0; i < 6; i++) {
                printf("%02X ", m_rgbBuffer[i]);
            }
            std::cout << std::endl;
        }
        else
        {
            std::cerr << "Unsupported format: " << GetFormatName(m_videoFormat) << std::endl;
        }
    }
    std::string GetFormatName(const GUID& guid)
    {
        if (guid == MFVideoFormat_RGB24) return "RGB24";
        if (guid == MFVideoFormat_RGB32) return "RGB32";
        if (guid == MFVideoFormat_ARGB32) return "ARGB32";
        if (guid == MFVideoFormat_YUY2) return "YUY2";
        if (guid == MFVideoFormat_NV12) return "NV12";
        if (guid == MFVideoFormat_MJPG) return "MJPG";
        if (guid == MFVideoFormat_I420) return "I420";
        return "Unknown";
    }

    // 转换RGB24到BGR24(BMP需要BGR顺序)
    void ConvertRGB24ToBGR24(const std::vector<BYTE>& rgbData, std::vector<BYTE>& bgrData,
        UINT32 width, UINT32 height)
    {
        size_t pixelCount = width * height;
        bgrData.resize(pixelCount * 3);

        for (UINT32 y = 0; y < height; y++)
        {
            const BYTE* srcLine = rgbData.data() + y * width * 3;
            BYTE* dstLine = bgrData.data() + y * width * 3;

            for (UINT32 x = 0; x < width; x++)
            {
                dstLine[x * 3 + 0] = srcLine[x * 3 + 2];  // B
                dstLine[x * 3 + 1] = srcLine[x * 3 + 1];  // G
                dstLine[x * 3 + 2] = srcLine[x * 3 + 0];  // R
            }
        }
    }

    // 保存为BMP
    bool SaveAsBMP000(const std::string& filename)
    {
        std::vector<BYTE> rgbData;
        UINT32 width, height;
        GUID format;

        {
            std::unique_lock<std::mutex> lock(m_frameMutex);

            // 等待新帧
            if (!m_frameCV.wait_for(lock, std::chrono::seconds(3),
                [this]() { return m_newFrameAvailable; }))
            {
                std::cerr << "Timeout waiting for frame" << std::endl;
                return false;
            }

            // 使用转换后的RGB数据
            rgbData = m_rgbBuffer;
            width = m_frameWidth;
            height = m_frameHeight;
            format = m_videoFormat;
            m_newFrameAvailable = false;
        }

        if (rgbData.empty() || width == 0 || height == 0)
        {
            std::cerr << "No valid frame data" << std::endl;
            return false;
        }

        std::cout << "Saving BMP: " << width << "x" << height
            << ", Format: " << GetFormatName(format) << std::endl;

        // 转换格式为BGR24(BMP需要BGR顺序)
        std::vector<BYTE> bgrData;
        ConvertRGB24ToBGR24(rgbData, bgrData, width, height);

        // 计算BMP文件大小
        DWORD bmpStride = ((width * 3 + 3) / 4) * 4;  // BMP要求4字节对齐
        DWORD imageSize = bmpStride * height;
        DWORD fileSize = sizeof(BITMAPFILEHEADER) + sizeof(BITMAPINFOHEADER) + imageSize;

        // 创建文件头
        BITMAPFILEHEADER bmpFileHeader = { 0 };
        bmpFileHeader.bfType = 0x4D42;  // 'BM'
        bmpFileHeader.bfSize = fileSize;
        bmpFileHeader.bfOffBits = sizeof(BITMAPFILEHEADER) + sizeof(BITMAPINFOHEADER);
        bmpFileHeader.bfReserved1 = 0;
        bmpFileHeader.bfReserved2 = 0;

        // 创建信息头
        BITMAPINFOHEADER bmpInfoHeader = { 0 };
        bmpInfoHeader.biSize = sizeof(BITMAPINFOHEADER);
        bmpInfoHeader.biWidth = width;
        bmpInfoHeader.biHeight = -static_cast<LONG>(height);  // 负值表示从上到下的位图
        bmpInfoHeader.biPlanes = 1;
        bmpInfoHeader.biBitCount = 24;  // 24位RGB
        bmpInfoHeader.biCompression = BI_RGB;
        bmpInfoHeader.biSizeImage = imageSize;
        bmpInfoHeader.biXPelsPerMeter = 0;
        bmpInfoHeader.biYPelsPerMeter = 0;
        bmpInfoHeader.biClrUsed = 0;
        bmpInfoHeader.biClrImportant = 0;

        // 写入文件
        std::ofstream file(filename, std::ios::binary);
        if (!file.is_open())
        {
            std::cerr << "Failed to open file: " << filename << std::endl;
            return false;
        }

        // 写入文件头
        file.write(reinterpret_cast<const char*>(&bmpFileHeader), sizeof(BITMAPFILEHEADER));

        // 写入信息头
        file.write(reinterpret_cast<const char*>(&bmpInfoHeader), sizeof(BITMAPINFOHEADER));

        // 写入像素数据(BGR格式,4字节对齐)
        for (UINT32 y = 0; y < height; y++)
        {
            const BYTE* srcLine = bgrData.data() + y * width * 3;
            file.write(reinterpret_cast<const char*>(srcLine), width * 3);

            // 写入对齐填充(如果需要)
            DWORD padding = bmpStride - width * 3;
            if (padding > 0)
            {
                BYTE padBuffer[4] = { 0 };
                file.write(reinterpret_cast<const char*>(padBuffer), padding);
            }
        }

        file.close();

        if (!file)
        {
            std::cerr << "Error writing to file: " << filename << std::endl;
            return false;
        }

        std::cout << "BMP file saved successfully: " << filename
            << " (" << width << "x" << height << ", " << fileSize << " bytes)" << std::endl;
        return true;
    }
    // 保存为BMP
    bool SaveAsBMP(const std::string& filename)
    {
        std::vector<BYTE> bgrData;  // 改名为bgrData
        UINT32 width, height;
        GUID format;

        {
            std::unique_lock<std::mutex> lock(m_frameMutex);

            // 等待新帧
            if (!m_frameCV.wait_for(lock, std::chrono::seconds(3),
                [this]() { return m_newFrameAvailable; }))
            {
                std::cerr << "Timeout waiting for frame" << std::endl;
                return false;
            }

            // 使用转换后的RGB数据(现在已经是BGR顺序)
            bgrData = m_rgbBuffer;  // 注意:现在m_rgbBuffer里是BGR
            width = m_frameWidth;
            height = m_frameHeight;
            format = m_videoFormat;
            m_newFrameAvailable = false;
        }

        if (bgrData.empty() || width == 0 || height == 0)
        {
            std::cerr << "No valid frame data" << std::endl;
            return false;
        }

        std::cout << "Saving BMP: " << width << "x" << height
            << ", Format: " << GetFormatName(format)
            << ", Data size: " << bgrData.size() << " bytes" << std::endl;

        // 验证数据大小
        size_t expectedSize = width * height * 3;
        if (bgrData.size() != expectedSize)
        {
            std::cerr << "Warning: Data size mismatch. Expected " << expectedSize
                << " bytes, got " << bgrData.size() << " bytes" << std::endl;
        }

        // 计算BMP文件大小
        DWORD bmpStride = ((width * 3 + 3) / 4) * 4;  // BMP要求4字节对齐
        DWORD imageSize = bmpStride * height;
        DWORD fileSize = sizeof(BITMAPFILEHEADER) + sizeof(BITMAPINFOHEADER) + imageSize;

        // 创建文件头
        BITMAPFILEHEADER bmpFileHeader = { 0 };
        bmpFileHeader.bfType = 0x4D42;  // 'BM'
        bmpFileHeader.bfSize = fileSize;
        bmpFileHeader.bfOffBits = sizeof(BITMAPFILEHEADER) + sizeof(BITMAPINFOHEADER);
        bmpFileHeader.bfReserved1 = 0;
        bmpFileHeader.bfReserved2 = 0;

        // 创建信息头
        BITMAPINFOHEADER bmpInfoHeader = { 0 };
        bmpInfoHeader.biSize = sizeof(BITMAPINFOHEADER);
        bmpInfoHeader.biWidth = width;
        bmpInfoHeader.biHeight = -static_cast<LONG>(height);  // 负值表示从上到下的位图
        bmpInfoHeader.biPlanes = 1;
        bmpInfoHeader.biBitCount = 24;  // 24位RGB
        bmpInfoHeader.biCompression = BI_RGB;
        bmpInfoHeader.biSizeImage = imageSize;
        bmpInfoHeader.biXPelsPerMeter = 0;
        bmpInfoHeader.biYPelsPerMeter = 0;
        bmpInfoHeader.biClrUsed = 0;
        bmpInfoHeader.biClrImportant = 0;

        // 写入文件
        std::ofstream file(filename, std::ios::binary);
        if (!file.is_open())
        {
            std::cerr << "Failed to open file: " << filename << std::endl;
            return false;
        }

        // 写入文件头
        file.write(reinterpret_cast<const char*>(&bmpFileHeader), sizeof(BITMAPFILEHEADER));

        // 写入信息头
        file.write(reinterpret_cast<const char*>(&bmpInfoHeader), sizeof(BITMAPINFOHEADER));

        // 写入像素数据(现在已经是BGR格式,但仍需4字节对齐)
        for (UINT32 y = 0; y < height; y++)
        {
            const BYTE* srcLine = bgrData.data() + y * width * 3;
            file.write(reinterpret_cast<const char*>(srcLine), width * 3);

            // 写入对齐填充(如果需要)
            DWORD padding = bmpStride - width * 3;
            if (padding > 0)
            {
                BYTE padBuffer[4] = { 0 };
                file.write(reinterpret_cast<const char*>(padBuffer), padding);
            }
        }

        file.close();

        if (!file)
        {
            std::cerr << "Error writing to file: " << filename << std::endl;
            return false;
        }

        std::cout << "BMP file saved successfully: " << filename
            << " (" << width << "x" << height << ", " << fileSize << " bytes)" << std::endl;
        return true;
    }
    // 拍照
    bool CapturePhoto(const std::string& filename)
    {
        std::cout << "\nCapturing photo..." << std::endl;

        // 等待一会儿确保有稳定的图像
        std::this_thread::sleep_for(std::chrono::milliseconds(500));

        bool result = SaveAsBMP(filename);

        if (result)
        {
            std::cout << "Photo captured successfully!" << std::endl;
        }
        else
        {
            std::cerr << "Failed to capture photo" << std::endl;
        }

        return result;
    }

    void Close()
    {
        StopCapture();

        if (m_pReader)
        {
            m_pReader->Release();
            m_pReader = nullptr;
        }

        if (m_pSource)
        {
            m_pSource->Release();
            m_pSource = nullptr;
        }

        if (m_pCameraControl)
        {
            m_pCameraControl->Release();
            m_pCameraControl = nullptr;
        }

        if (m_pVideoProcAmp)
        {
            m_pVideoProcAmp->Release();
            m_pVideoProcAmp = nullptr;
        }

        if (m_pAttributes)
        {
            m_pAttributes->Release();
            m_pAttributes = nullptr;
        }

        if (m_ppDevices)
        {
            for (UINT32 i = 0; i < m_deviceCount; i++)
            {
                m_ppDevices[i]->Release();
            }
            CoTaskMemFree(m_ppDevices);
            m_ppDevices = nullptr;
        }

        m_deviceCount = 0;
        m_frameBuffer.clear();
        m_rgbBuffer.clear();

        MFShutdown();
        CoUninitialize();

        std::cout << "Camera closed" << std::endl;
    }
};

int main()
{
    std::cout << "=== UVC Camera Application ===" << std::endl;

    UVCCamera camera;

    if (!camera.Initialize())
    {
        std::cerr << "Failed to initialize camera system" << std::endl;
        system("pause");
        return -1;
    }

    camera.ListDevices();

    int choice = 0;
    std::cout << "\nSelect device (0): ";
    std::cin >> choice;

    if (!camera.OpenDevice(choice))
    {
        std::cerr << "Failed to open device" << std::endl;
        camera.Close();
        system("pause");
        return -1;
    }

    // 调整摄像头设置
    camera.AdjustCameraSettings();

    // 开始捕获
    if (!camera.StartCapture())
    {
        std::cerr << "Failed to start capture" << std::endl;
        camera.Close();
        system("pause");
        return -1;
    }

    // 等待摄像头预热
    std::cout << "Camera warming up..." << std::endl;
    std::this_thread::sleep_for(std::chrono::seconds(2));

    // 拍照
    if (camera.CapturePhoto("camera_capture.bmp"))
    {
        std::cout << "\nSuccess! Photo saved as 'camera_capture.bmp'" << std::endl;
    }
    else
    {
        std::cerr << "\nFailed to capture photo" << std::endl;
    }

    // 停止捕获
    camera.StopCapture();

    // 清理
    camera.Close();

    std::cout << "\nDone!" << std::endl;
    system("pause");

    return 0;
}
相关推荐
曹牧4 小时前
Java:Foreach语法糖
java·开发语言·python
盼哥PyAI实验室4 小时前
Python验证码处理实战:从12306项目看验证码识别的技术演进
开发语言·网络·python
222you4 小时前
SpringIOC的注解开发
java·开发语言
William_cl4 小时前
【CSDN 专栏】C# ASP.NET Razor 视图引擎实战:.cshtml 从入门到避坑(图解 + 案例)
开发语言·c#·asp.net
charlie1145141914 小时前
深入理解CC++的编译与链接技术9:动态库细节
c语言·开发语言·c++·学习·动态库
席之郎小果冻4 小时前
【03】【创建型】【聊一聊,单例模式】
开发语言·javascript·单例模式
god004 小时前
Selenium等待判断元素页面加载完成
java·开发语言
Dxy12393102164 小时前
python如何去掉字符串中最后一个字符
开发语言·python
世转神风-4 小时前
qt-windows用户点击.exe,报错:缺少libgcc_s_seh-1.dll
c++·qt