一、前言
笔者在项目开发中遇到这样一个需求,硬件同事在 RK3588 麒麟v10系统(本质还是基于Linux系统)开发板上已经安装好了摄像头和相关驱动,现在需要软件去使用这个摄像头并使用摄像头捕捉的画面做一些开发。
之前使用 OpenCV 的 VideoCapture
能打开摄像头并拿到画面帧,但 OpenCV 默认是给我们转为 BGR888 格式的帧返回的,其内部做了转码操作,其一是转码操作如果是跑的 CPU 可能性能不高,其二是需要拿到 YUV 数据使用所以不需要做额外的转码操作。所以决定使用 GStreamer 直接去打开摄像头设备并获取流。
二、安装 GStreamer
建议是直接使用 apt install 去安装 GStreamer,有挺多依赖包的 apt install 的话能自动去下载依赖。命令如下:
bash
sudo apt update
# 核心开发库安装
sudo apt install -y \
libgstreamer1.0-dev \ # GStreamer核心开发库
libgstreamer-plugins-base1.0-dev \ # 基础插件开发库
libgstapp-1.0-dev \ # gstreamer-app-1.0开发库
libgstpbutils-1.0-dev \ # gstreamer-pbutils-1.0开发库
libgstriff-1.0-dev # gstreamer-riff-1.0开发库
使用 CMakeLists.txt 编译项目的话,可以参考如下代码去配置依赖:
bash
find_package(PkgConfig REQUIRED)
pkg_check_modules(GST_APP REQUIRED gstreamer-app-1.0)
pkg_check_modules(GST_PBUTILS REQUIRED gstreamer-pbutils-1.0)
pkg_check_modules(GST_RIFF REQUIRED gstreamer-riff-1.0)
message("GST_APP_LIBRARIES: ${GST_APP_LIBRARIES}") # gstapp-1.0;gstbase-1.0;gstreamer-1.0;gobject-2.0;glib-2.0
message("GST_PBUTILS_LIBRARIES: ${GST_PBUTILS_LIBRARIES}") # gstpbutils-1.0;gstaudio-1.0;gstvideo-1.0;gstbase-1.0;gstreamer-1.0;gobject-2.0;glib-2.0
message("GST_RIFF_LIBRARIES: ${GST_RIFF_LIBRARIES}") # gstriff-1.0;gstreamer-1.0;gobject-2.0;glib-2.0
target_link_libraries(${TARGET_NAME}
${GST_APP_LIBRARIES}
${GST_PBUTILS_LIBRARIES}
${GST_RIFF_LIBRARIES}
)
三、使用
1. 查看摄像头信息
使用前,我们需要先了解这个摄像头支持什么样的分辨率,支持输出的数据格式、帧率等,不然错误的配置会导致无法打开摄像头。我们可以使用 v4l2-ctl
命令去查看,如果没有这个命令,可能需要自己去安装一下。
另外你必须知道你的摄像头对应的设备是哪个,Linux 下摄像头设备都是在 "/dev/videoxx" 中,其中 xx 是一个数字,比如我这边摄像头安装在 "/dev/video33"。
bash
# 查看设备驱动信息
v4l2-ctl --device=/dev/video33 --info
# 查看详细格式参数(格式/分辨率/帧率)
v4l2-ctl --device=/dev/video33 --list-formats-ext
打印信息如下:
# v4l2-ctl --device=/dev/video33 --list-formats-ext
ioctl: VIDIOC_ENUM_FMT
Type: Video Capture Multiplanar
[0]: 'UYVY' (UYVY 4:2:2)
Size: Stepwise 32x32 - 3840x2160 with step 8/8
[1]: 'NV16' (Y/CbCr 4:2:2)
Size: Stepwise 32x32 - 3840x2160 with step 8/8
[2]: 'NV61' (Y/CrCb 4:2:2)
Size: Stepwise 32x32 - 3840x2160 with step 8/8
[3]: 'NV21' (Y/CrCb 4:2:0)
Size: Stepwise 32x32 - 3840x2160 with step 8/8
[4]: 'NV12' (Y/CbCr 4:2:0)
Size: Stepwise 32x32 - 3840x2160 with step 8/8
[5]: 'NM21' (Y/CrCb 4:2:0 (N-C))
Size: Stepwise 32x32 - 3840x2160 with step 8/8
[6]: 'NM12' (Y/CbCr 4:2:0 (N-C))
Size: Stepwise 32x32 - 3840x2160 with step 8/8
2. 头文件
cpp
#pragma once
#include "gstelement.h"
class GstreamerCamera {
public:
GstreamerCamera();
~GstreamerCamera();
// cameraId就是对应的/dev/video后的数字,比如33
void openCamera(int cameraId, int width, int height);
bool isOpened();
void releaseCamera();
private:
int cameraId_;
int width_;
int height_;
bool openFlag_;
GstElement* pipeline_;
static GstFlowReturn onPreviewCallback(GstElement* sink, gpointer user_data);
GstFlowReturn onPreview(GstElement* sink);
bool initGStreamer(int cameraId, int width, int height);
void buildPipeline(char* outPipeline, size_t bufferSize) const;
};
3. 源代码
cpp
#include "GstreamerCamera.hpp"
#include <stdio.h>
#include "app/gstappsink.h"
#include "gst.h"
GstreamerCamera::GstreamerCamera()
: cameraId_(-1), width_(0), height_(0), openFlag_(false), pipeline_(nullptr) {}
GstreamerCamera::~GstreamerCamera() {
if (openFlag_) {
releaseCamera();
}
}
void GstreamerCamera::openCamera(int cameraId, int width, int height) {
if (openFlag_) {
releaseCamera();
}
printf("cameraId: %d, width: %d, height: %d\n", cameraId, width, height);
cameraId_ = cameraId;
width_ = width;
height_ = height;
bool inited = initGStreamer(cameraId, width, height);
if (!inited) {
CS_LOGE("Failed to initGStreamer");
return;
}
openFlag_ = true;
}
bool GstreamerCamera::isOpened() { return openFlag_; }
void GstreamerCamera::releaseCamera() {
if (pipeline_) {
gst_element_set_state(pipeline_, GST_STATE_NULL); // 停止管道
gst_object_unref(pipeline_);
pipeline_ = nullptr;
}
cameraId_ = -1;
width_ = 0;
height_ = 0;
openFlag_ = false;
}
void cleanupResources(GstElement *pipeline, GError *error) {
if (error) {
g_error_free(error);
}
if (pipeline) {
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(pipeline);
}
}
void GstreamerCamera::buildPipeline(char *outPipeline, size_t bufferSize) const {
/*
v4l2src device=/dev/video%d : 视频源组件,从/dev/video0设备采集视频,对应Linux系统的V4L2视频采集接口
video/x-raw,format=NV12 : 指定摄像头输出格式
width=640,height=480 : 指定摄像头输出大小,需与摄像头支持的分辨率匹配
framerate=30/1 : 设置帧率为30 FPS
videoconvert : 格式转换组件,自动处理色彩空间/格式转换
appsink : 配置参数
name=sink: 命名sink端点便于程序访问
sync=false: 禁用时钟同步,减少延迟
emit-signals=true: 启用信号通知新帧到达
drop=true: 在应用处理不及时时丢弃旧帧
max-buffers=2: 限制缓冲队列长度为2帧
*/
snprintf(outPipeline, bufferSize,
"v4l2src device=/dev/video%d ! "
"video/x-raw,format=NV12,width=%d,height=%d,framerate=30/1 ! "
"videoconvert ! appsink name=sink sync=false emit-signals=true drop=true max-buffers=2",
cameraId_, width_, height_);
}
bool GstreamerCamera::initGStreamer(int cameraId, int width, int height) {
// 初始化GStreamer
gst_init(NULL, NULL);
// 构建管道字符串
char gstreamerStr[256];
buildPipeline(gstreamerStr, sizeof(gstreamerStr));
printf("gstreamer str=%s\n", gstreamerStr);
// 创建管道
GError *error = NULL;
GstElement *pipeline = gst_parse_launch(gstreamerStr, &error);
if (!pipeline) {
printf("Pipeline creation failed: %s\n", error ? error->message : "Unknown error");
cleanupResources(NULL, error);
return false;
}
// 获取 appsink 元素
GstElement *sink = gst_bin_get_by_name(GST_BIN(pipeline), "sink");
if (!sink) {
printf("Failed to get appsink element\n");
cleanupResources(pipeline, error);
return false;
}
// 连接信号
g_signal_connect(sink, "new-sample", G_CALLBACK(onPreviewCallback), this);
gst_object_unref(sink); // 释放sink的引用
// 启动管道
GstStateChangeReturn ret = gst_element_set_state(pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
printf("Failed to start pipeline\n");
gst_object_unref(pipeline);
pipeline_ = nullptr;
return false;
}
pipeline_ = GST_ELEMENT(gst_object_ref(pipeline));
gst_object_unref(pipeline);
return true;
}
GstFlowReturn GstreamerCamera::onPreviewCallback(GstElement *sink, gpointer user_data) {
GstreamerCamera *camera = static_cast<GstreamerCamera *>(user_data);
return camera->onPreview(sink);
}
GstFlowReturn GstreamerCamera::onPreview(GstElement *sink) {
GstSample *sample = gst_app_sink_pull_sample(GST_APP_SINK(sink));
if (!sample) {
printf("Failed to pull sample\n");
return GST_FLOW_ERROR;
}
GstBuffer *buffer = gst_sample_get_buffer(sample);
if (!buffer) {
gst_sample_unref(sample);
printf("Invalid buffer in sample\n");
return GST_FLOW_ERROR;
}
GstMapInfo map;
if (!gst_buffer_map(buffer, &map, GST_MAP_READ)) {
gst_sample_unref(sample);
printf("Failed to map buffer\n");
return GST_FLOW_ERROR;
}
unsigned char *data = (unsigned char *)map.data; // 使用完后是需要回收的
int width = width_;
int height = height_;
// 模拟使用摄像头数据,比如转为RGB图片
// cv::Mat image_nv12(height * 3 / 2, width, CV_8UC1, data);
// cv::Mat image_rgb(height, width, CV_8UC3, data);
// cv::cvtColor(image_nv12, image_rgb, cv::COLOR_YUV2RGB_NV12);
// Unmap and unref the buffer and sample
gst_buffer_unmap(buffer, &map);
gst_sample_unref(sample);
return GST_FLOW_OK;
}
上述代码在 onPreview()
函数中获取并处理摄像头数据,这里不建议做特别耗时的操作,否则会影响到摄像头的帧率。如果需要可以拷贝帧数据并存入到一个队列中缓存起来,但需要注意清除一下历史数据,否则内存占用会很大。
4. 调用示例
cpp
#include <stdio.h>
#include "GstreamerCamera.hpp"
int main(int argc, char* argv[]) {
GstreamerCamera camera;
int cameraId = 33;
int width = 1920;
int height = 1080;
camera.openCamera(cameraId, width, height);
if (!camera.isOpened()) {
printf("Can't open camera: %d\n", cameraId);
return;
}
while (true) {
// 模拟长时间获取和使用camera
// GstreamerCamera类中目前没有提供获取帧的接口,需要自己根据使用场景去完善下
}
return 0;
}