在奥比中光提供的sdk中没有直接使用opencv读取显示视频流的功能,参照案例与代码写了一个opencv显示奥比中光可见光流的cpp代码
代码:
python
#include "libobsensor/hpp/Pipeline.hpp"
#include "libobsensor/hpp/Error.hpp"
const char *metaDataTypes[] = { "TIMESTAMP",
"SENSOR_TIMESTAMP",
"FRAME_NUMBER",
"AUTO_EXPOSURE",
"EXPOSURE",
"GAIN",
"AUTO_WHITE_BALANCE",
"WHITE_BALANCE",
"BRIGHTNESS",
"CONTRAST",
"SATURATION",
"SHARPNESS",
"BACKLIGHT_COMPENSATION",
"HUE",
"GAMMA",
"POWER_LINE_FREQUENCY",
"LOW_LIGHT_COMPENSATION",
"MANUAL_WHITE_BALANCE",
"ACTUAL_FRAME_RATE",
"FRAME_RATE",
"AE_ROI_LEFT",
"AE_ROI_TOP",
"AE_ROI_RIGHT",
"AE_ROI_BOTTOM",
"EXPOSURE_PRIORITY",
"HDR_SEQUENCE_NAME",
"HDR_SEQUENCE_SIZE",
"HDR_SEQUENCE_INDEX",
"LASER_POWER",
"LASER_POWER_LEVEL",
"LASER_STATUS",
"GPIO_INPUT_DATA" };
int main(int argc, char **argv) try {
// Create a pipeline with default device
ob::Pipeline pipe;
// Configure which streams to enable or disable for the Pipeline by creating a Config
std::shared_ptr<ob::Config> config = std::make_shared<ob::Config>();
config->enableVideoStream(OB_STREAM_COLOR);
// Start the pipeline with config
pipe.start(config);
auto currentProfile = pipe.getEnabledStreamProfileList()->getProfile(0)->as<ob::VideoStreamProfile>();
cv::Mat rstMat;
// Create a window for rendering, and set the resolution of the window
while(1) {
// Wait for up to 100ms for a frameset in blocking mode.
auto frameSet = pipe.waitForFrames(1000);
if(frameSet == nullptr) {
continue;
}
// get color frame from frameset
auto colorFrame = frameSet->colorFrame();
if(colorFrame == nullptr) {
continue;
}
if(colorFrame->type() == OB_FRAME_COLOR) {
auto videoFrame = colorFrame->as<ob::VideoFrame>();
switch(videoFrame->format()) {
case OB_FORMAT_MJPG: {
cv::Mat rawMat(1, videoFrame->dataSize(), CV_8UC1, videoFrame->data());
rstMat = cv::imdecode(rawMat, 1);
} break;
case OB_FORMAT_NV21: {
cv::Mat rawMat(videoFrame->height() * 3 / 2, videoFrame->width(), CV_8UC1, videoFrame->data());
cv::cvtColor(rawMat, rstMat, cv::COLOR_YUV2BGR_NV21);
} break;
case OB_FORMAT_YUYV:
case OB_FORMAT_YUY2: {
cv::Mat rawMat(videoFrame->height(), videoFrame->width(), CV_8UC2, videoFrame->data());
cv::cvtColor(rawMat, rstMat, cv::COLOR_YUV2BGR_YUY2);
} break;
case OB_FORMAT_RGB: {
cv::Mat rawMat(videoFrame->height(), videoFrame->width(), CV_8UC3, videoFrame->data());
cv::cvtColor(rawMat, rstMat, cv::COLOR_RGB2BGR);
} break;
case OB_FORMAT_RGBA: {
cv::Mat rawMat(videoFrame->height(), videoFrame->width(), CV_8UC4, videoFrame->data());
cv::cvtColor(rawMat, rstMat, cv::COLOR_RGBA2BGRA);
} break;
case OB_FORMAT_BGRA: {
rstMat = cv::Mat(videoFrame->height(), videoFrame->width(), CV_8UC4, videoFrame->data());
} break;
case OB_FORMAT_UYVY: {
cv::Mat rawMat(videoFrame->height(), videoFrame->width(), CV_8UC2, videoFrame->data());
cv::cvtColor(rawMat, rstMat, cv::COLOR_YUV2BGR_UYVY);
} break;
case OB_FORMAT_I420: {
cv::Mat rawMat(videoFrame->height() * 3 / 2, videoFrame->width(), CV_8UC1, videoFrame->data());
cv::cvtColor(rawMat, rstMat, cv::COLOR_YUV2BGR_I420);
} break;
default:
break;
}
}
cv::imshow("ccc",rstMat);
cv::waitKey(100);
// Render frameset in the window, only color frames are rendered here.
}
// Stop the Pipeline, no frame data will be generated
pipe.stop();
return 0;
}
catch(ob::Error &e) {
std::cerr << "function:" << e.getName() << "\nargs:" << e.getArgs() << "\nmessage:" << e.getMessage() << "\ntype:" << e.getExceptionType() << std::endl;
exit(EXIT_FAILURE);
}