文章目录
主要梳理了图漾官网Sample_V1版本的例子
1.参考例子
主要增加了从storage区域读取相机参数的设置,使用图漾PercipioViewer软件,如何将相机参数保存到srorage区,可参考链接:保存相机参数操作
保存参数设置
注:在进行保存参数前,需要关闭相机各个组件取流
具体代码如下:
cpp
#include <limits>
#include <cassert>
#include <cmath>
#include "../common/common.hpp"
#include <TYImageProc.h>
#include <chrono>
//设置相机参数开关,默认使用相机内保存的参数。(使用保存的参数,也可以修改参数)
//不同型号相机具备不同的参数属性,可以使用PercipioViewer看图软件确认相机支持的参数属性和参数取值范围
bool setParameters = false;
//深度图对齐到彩色图开关,置1则将深度图对齐到彩色图坐标系,置0则不对齐
//因彩色图对齐到深度图时会有部分深度缺失的区域丢失彩色信息,因此默认使用深度图对齐到彩色图方式
#define MAP_DEPTH_TO_COLOR 0
//开启以下深度图渲染显示将会降低帧率
DepthViewer depthViewer0("OrgDepth");//用于显示渲染后的原深度图
DepthViewer depthViewer1("FillHoleDepth");//用于显示渲染后的填洞处理之后的深度图
DepthViewer depthViewer2("SpeckleFilterDepth"); //用于显示渲染后的经星噪滤波过的深度图
DepthViewer depthViewer3("EnhenceFilterDepth"); //用于显示渲染后的经时域滤波过的深度图
DepthViewer depthViewer4("MappedDepth"); //用于显示渲染后的对齐到彩色图坐标系的深度图
//事件回调
void eventCallback(TY_EVENT_INFO *event_info, void *userdata)
{
if (event_info->eventId == TY_EVENT_DEVICE_OFFLINE) {
LOGD("=== Event Callback: Device Offline!");
*(bool*)userdata = true;
// Note:
// Please set TY_BOOL_KEEP_ALIVE_ON OFF feature to false if you need to debug with breakpoint!
}
else if (event_info->eventId == TY_EVENT_LICENSE_ERROR) {
LOGD("=== Event Callback: License Error!");
}
}
//数据格式转换
//cv pixel format to TY_PIXEL_FORMAT
static int cvpf2typf(int cvpf)
{
switch(cvpf){
case CV_8U: return TY_PIXEL_FORMAT_MONO;
case CV_8UC3: return TY_PIXEL_FORMAT_RGB;
case CV_16UC1: return TY_PIXEL_FORMAT_DEPTH16;
default: return TY_PIXEL_FORMAT_UNDEFINED;
}
}
//数据格式转换
//mat to TY_IMAGE_DATA
static void mat2TY_IMAGE_DATA(int comp, const cv::Mat& mat, TY_IMAGE_DATA& data)
{
data.status = 0;
data.componentID = comp;
data.size = mat.total() * mat.elemSize();
data.buffer = mat.data;
data.width = mat.cols;
data.height = mat.rows;
data.pixelFormat = cvpf2typf(mat.type());
}
//回调数据
struct CallbackData {
int index;
TY_DEV_HANDLE hDevice;
TY_CAMERA_INTRINSIC* intri_depth;
TY_CAMERA_INTRINSIC* intri_color;
TY_CAMERA_CALIB_INFO depth_calib;
TY_CAMERA_CALIB_INFO color_calib;
float scale_unit;
bool saveOneFramePoint3d;
int fileIndex;
bool isTof;
};
CallbackData cb_data;
//通过内参实训深度图转点云,方式供参考
//depth to pointcloud
cv::Mat depthToWorld(float* intr, const cv::Mat& depth, float scale_unit = 1.0)
{
cv::Mat world(depth.rows, depth.cols, CV_32FC3);
float cx = intr[2];
float cy = intr[5];
float inv_fx = 1.0f / intr[0];
float inv_fy = 1.0f / intr[4];
for (int r = 0; r < depth.rows; r++)
{
uint16_t* pSrc = (uint16_t*)depth.data + r * depth.cols;
cv::Vec3f* pDst = (cv::Vec3f*)world.data + r * depth.cols;
for (int c = 0; c < depth.cols; c++)
{
uint16_t z = pSrc[c] * scale_unit;
if (z == 0) {
pDst[c][0] = NAN;
pDst[c][1] = NAN;
pDst[c][2] = NAN;
}
else {
pDst[c][0] = (c - cx) * z * inv_fx;
pDst[c][1] = (r - cy) * z * inv_fy;
pDst[c][2] = z;
}
}
}
return world;
}
//输出畸变校正的彩色图,并实现深度图对齐到彩色图
static void doRegister(const TY_CAMERA_CALIB_INFO& depth_calib
, const TY_CAMERA_CALIB_INFO& color_calib
, const cv::Mat& depth
, const float f_scale_unit
, const cv::Mat& color
, cv::Mat& undistort_color
, cv::Mat& out
, bool map_depth_to_color
)
{
// do undistortion
TY_IMAGE_DATA src;
src.width = color.cols;
src.height = color.rows;
src.size = color.size().area() * 3;
src.pixelFormat = TY_PIXEL_FORMAT_RGB;
src.buffer = color.data;
undistort_color = cv::Mat(color.size(), CV_8UC3);
TY_IMAGE_DATA dst;
dst.width = color.cols;
dst.height = color.rows;
dst.size = undistort_color.size().area() * 3;
dst.buffer = undistort_color.data;
dst.pixelFormat = TY_PIXEL_FORMAT_RGB;
ASSERT_OK(TYUndistortImage(&color_calib, &src, NULL, &dst));
// do register
if (map_depth_to_color) {
out = cv::Mat::zeros(undistort_color.size(), CV_16U);
ASSERT_OK(
TYMapDepthImageToColorCoordinate(
&depth_calib,
depth.cols, depth.rows, depth.ptr<uint16_t>(),
&color_calib,
out.cols, out.rows, out.ptr<uint16_t>(), f_scale_unit)
);
}
else {
out = depth;
}
}
//帧处理
void frameHandler(TY_FRAME_DATA* frame, void* userdata)
{
CallbackData* pData = (CallbackData*) userdata;
LOGD("=== Get frame %d", ++pData->index);
std::vector<TY_VECT_3F> P3dtoColor;//对齐到color的点云
cv::Mat depth, color, p3d, newP3d;
//auto StartParseFrame = std::chrono::steady_clock::now();
//解析帧
parseFrame(*frame, &depth, 0, 0, &color);//拿深度图和color图
//parseFrame(*frame, &depth, 0, 0, 0);//只拿深度图
// auto ParseFrameFinished = std::chrono::steady_clock::now();
// auto duration2 = std::chrono::duration_cast<std::chrono::microseconds>(ParseFrameFinished - StartParseFrame);
// LOGI("*******ParseFrame spend Time : %lld", duration2);
//填洞开关,开启后会降低帧率
bool FillHole = 0;
//星噪滤波开关,深度图中离散点降噪处理
bool SpeckleFilter = 1;
//时域滤波,可降低单点抖动,提升点云平面度
bool EnhenceFilter = 0;
//深度图处理
if (!depth.empty())
{
if (pData->isTof)
{
//r如果是TOF相机,深度图需要做畸变校正,如TM26X相机和TM421,而双目相机不需要执行该步骤
TY_IMAGE_DATA src;
src.width = depth.cols;
src.height = depth.rows;
src.size = depth.size().area() * 2;
src.pixelFormat = TY_PIXEL_FORMAT_DEPTH16;
src.buffer = depth.data;
cv::Mat undistort_depth = cv::Mat(depth.size(), CV_16U);
TY_IMAGE_DATA dst;
dst.width = depth.cols;
dst.height = depth.rows;
dst.size = undistort_depth.size().area() * 2;
dst.buffer = undistort_depth.data;
dst.pixelFormat = TY_PIXEL_FORMAT_DEPTH16;
ASSERT_OK(TYUndistortImage(&pData->depth_calib, &src, NULL, &dst));
depth = undistort_depth.clone();
}
if (FillHole)
{
//深度图填洞处理
DepthInpainter inpainter;
inpainter._kernelSize = 10;
inpainter._maxInternalHoleToBeFilled = 1800;
inpainter._fillAll = false;
inpainter.inpaint(depth, depth, cv::Mat());
depthViewer1.show(depth);
}
if (SpeckleFilter)
{
//使用星噪滤波
TY_IMAGE_DATA sfFilteredDepth;
cv::Mat filteredDepth(depth.size(), depth.type());
filteredDepth = depth.clone();
mat2TY_IMAGE_DATA(TY_COMPONENT_DEPTH_CAM, filteredDepth, sfFilteredDepth);
struct DepthSpeckleFilterParameters sfparam = DepthSpeckleFilterParameters_Initializer;
sfparam.max_speckle_size = 300;//噪点面积小于该值将被过滤
sfparam.max_speckle_diff = 64;//相邻像素视差大于该值将被视为噪点
TYDepthSpeckleFilter(&sfFilteredDepth, &sfparam);
//显示星噪滤波后深度图渲染
depthViewer2.show(filteredDepth);
//点云, pointcloud in CV_32FC3 format
newP3d = depthToWorld(pData->intri_depth->data, filteredDepth,pData->scale_unit);
depth = filteredDepth.clone();
保存滤波后的深度图
//char file[32];
//sprintf(file, "depth-%d.png", pData->fileIndex++);
//cv::imwrite(file, filteredDepth);
}
if (EnhenceFilter)
{
//使用时域滤波
TY_IMAGE_DATA efFilteredDepthin, efFilteredDepthout;
cv::Mat filteredDepth1(depth.size(), depth.type());
cv::Mat filteredDepth2(depth.size(), depth.type());
filteredDepth1 = depth.clone();
mat2TY_IMAGE_DATA(TY_COMPONENT_DEPTH_CAM, filteredDepth1, efFilteredDepthin);
mat2TY_IMAGE_DATA(TY_COMPONENT_DEPTH_CAM, filteredDepth2, efFilteredDepthout);
struct DepthEnhenceParameters efparam = DepthEnhenceParameters_Initializer;
efparam.sigma_s = 0;//空间滤波系数
efparam.sigma_r = 0;//深度滤波系数
efparam.outlier_win_sz = 0;//以像素为单位的滤波窗口
efparam.outlier_rate = 0.f;//噪音过滤系数
TY_IMAGE_DATA *guide = nullptr;
TYDepthEnhenceFilter(&efFilteredDepthin, 3, guide, &efFilteredDepthout, &efparam);
//显示时域滤波后深度图渲染
depthViewer3.show(filteredDepth2);
//点云, pointcloud in CV_32FC3 format
newP3d = depthToWorld(pData->intri_depth->data, filteredDepth2, pData->scale_unit);
depth = filteredDepth2.clone();
保存滤波后的深度图
//char file[32];
//sprintf(file, "depth-%d.png", pData->fileIndex++);
//cv::imwrite(file, filteredDepth);
}
else if (!FillHole&&!SpeckleFilter&&!EnhenceFilter)
{
//显示原深度图渲染
depthViewer0.show(depth);
//原点云
p3d = depthToWorld(pData->intri_depth->data, depth, pData->scale_unit);
}
}
//彩色图处理
cv::Mat color_data_mat,p3dtocolorMat;
if (!color.empty())
{
//显示原彩色图
//imshow("orgColor", color);
cv::Mat undistort_color, MappedDepth;
if (MAP_DEPTH_TO_COLOR)
{
auto BeforedoRegister = std::chrono::steady_clock::now();
//彩色图去畸变,并将深度图对齐到彩色图坐标系
doRegister(pData->depth_calib, pData->color_calib, depth, pData->scale_unit, color,undistort_color, MappedDepth, MAP_DEPTH_TO_COLOR);
//数据格式转换
cv::cvtColor(undistort_color, color_data_mat, CV_BGR2RGB);
//生成对齐到彩色图坐标系的点云,两种方法
//方法一:生成点云放在TY_VECT_3F---P3dtoColor
P3dtoColor.resize(MappedDepth.size().area());
ASSERT_OK(TYMapDepthImageToPoint3d(&pData->color_calib, MappedDepth.cols, MappedDepth.rows
, (uint16_t*)MappedDepth.data, &P3dtoColor[0],pData->scale_unit));
//方法二:生成点云放在32FC3 Mat---p3dtocolorMat
//p3dtocolorMat = depthToWorld(pData->intri_color->data, MappedDepth);
auto AfterdoRegister = std::chrono::steady_clock::now();
auto duration3 = std::chrono::duration_cast<std::chrono::microseconds>(AfterdoRegister - BeforedoRegister);
LOGI("*******do Rgb Undistortion--MapDepthToColor--P3D spend Time : %lld", duration3);
//显示畸变校正后的彩色图
imshow("undistort_color", undistort_color);
//显示对齐到彩色图坐标系的深度图
depthViewer4.show(MappedDepth);
}
else
{
//彩色图去畸变,未对齐的深度图
doRegister(pData->depth_calib, pData->color_calib, depth, pData->scale_unit, color, undistort_color, MappedDepth, MAP_DEPTH_TO_COLOR);
//显示畸变校正后的彩色图
imshow("undistort_color", undistort_color);
}
}
//保存点云
//save pointcloud
if (pData->saveOneFramePoint3d)
{
char file[32];
if (MAP_DEPTH_TO_COLOR)
{
LOGD("Save p3dtocolor now!!!");
//保存对齐到color坐标系XYZRGB格式彩色点云
sprintf(file, "p3dtocolor-%d.xyz", pData->fileIndex++);
writePointCloud((cv::Point3f*)p3dtocolorMat.data, (const cv::Vec3b*)color_data_mat.data, p3dtocolorMat.total(), file, PC_FILE_FORMAT_XYZ);
}
else
{
LOGD("Save point3d now!!!");
//保存XYZ格式点云
sprintf(file, "points-%d.xyz", pData->fileIndex++);
writePointCloud((cv::Point3f*)newP3d.data, 0, newP3d.total(), file, PC_FILE_FORMAT_XYZ);
}
pData->saveOneFramePoint3d = false;
}
//归还Buffer队列
LOGD("=== Re-enqueue buffer(%p, %d)", frame->userBuffer, frame->bufferSize);
ASSERT_OK( TYEnqueueBuffer(pData->hDevice, frame->userBuffer, frame->bufferSize) );
}
int main(int argc, char* argv[])
{
std::string ID, IP;
TY_INTERFACE_HANDLE hIface = NULL;
TY_DEV_HANDLE hDevice = NULL;
TY_CAMERA_INTRINSIC intri_depth;
TY_CAMERA_INTRINSIC intri_color;
int32_t resend = 1;
bool isTof = 0;
for(int i = 1; i < argc; i++)
{
if(strcmp(argv[i], "-id") == 0){
ID = argv[++i];
} else if(strcmp(argv[i], "-ip") == 0)
{
IP = argv[++i];
} else if(strcmp(argv[i], "-h") == 0){
LOGI("Usage: SimpleView_Callback [-h] [-id <ID>]");
return 0;
}
}
LOGD("=== Init lib");
ASSERT_OK(TYInitLib());
TY_VERSION_INFO ver;
ASSERT_OK(TYLibVersion(&ver));
LOGD(" - lib version: %d.%d.%d", ver.major, ver.minor, ver.patch);
std::vector<TY_DEVICE_BASE_INFO> selected;
//选择相机
ASSERT_OK(selectDevice(TY_INTERFACE_ALL, ID, IP, 1, selected));
ASSERT(selected.size() > 0);
//默认加载第一个相机
TY_DEVICE_BASE_INFO& selectedDev = selected[0];
//const std::string actualBrand = selected[0].modelName;
//打开接口和设备
ASSERT_OK(TYOpenInterface(selectedDev.iface.id, &hIface));
ASSERT_OK(TYOpenDevice(hIface, selectedDev.id, &hDevice));
//对时设置
LOGD("Set type of time sync mechanism");
ASSERT_OK(TYSetEnum(hDevice, TY_COMPONENT_DEVICE, TY_ENUM_TIME_SYNC_TYPE, TY_TIME_SYNC_TYPE_HOST));
LOGD("Wait for time sync ready");
while (1)
{
bool sync_ready;
ASSERT_OK(TYGetBool(hDevice, TY_COMPONENT_DEVICE, TY_BOOL_TIME_SYNC_READY, &sync_ready));
if (sync_ready)
{
break;
}
MSLEEP(10);
}
//设置相机log输出等级,VERBOSE > DEBUG > INFO > WARNING > ERROR
//ASSERT_OK(TYSetLogLevel(TY_LOG_LEVEL_ERROR));
//ASSERT_OK(TYAppendLogToFile("test_log.txt", TY_LOG_LEVEL_DEBUG)); //相机日志输出到文件内,搭配关闭输出TYRemoveLogFile
//使用相机内保存参数
if (!setParameters)
{
std::string js_data;
int ret;
ret = load_parameters_from_storage(hDevice, js_data);//使用相机内保存参数
if (ret == TY_STATUS_ERROR)
{
LOGD("no save parameters in the camera");
setParameters = true;
}
else if (ret != TY_STATUS_OK)
{
LOGD("Failed: error %d(%s)", ret, TYErrorString(ret));
setParameters = true;
}
}
//使能彩色相机
//try to enable color camera
LOGD("Has RGB camera, open RGB cam");
ASSERT_OK(TYEnableComponents(hDevice, TY_COMPONENT_RGB_CAM));
//设置彩色相机像素格式和分辨率
if (setParameters)
{
LOGD("=== Configure feature, set RGB resolution");
//方法一:直接设置像素格式和分辨率
//ASSERT_OK(TYSetEnum(hDevice, TY_COMPONENT_RGB_CAM, TY_ENUM_IMAGE_MODE, TY_IMAGE_MODE_BAYER8GB_640x480));
//ASSERT_OK(TYSetEnum(hDevice, TY_COMPONENT_RGB_CAM, TY_ENUM_IMAGE_MODE, TY_IMAGE_MODE_YUYV_1920x1080));
//不同型号图漾相机的彩色像素格式和分辨率不同,具体可见相机规格书;
//方法二:通过枚举相机支持的图像模式,结合图像宽度选定分辨率,不关注像素格式
TY_STATUS status = TY_STATUS_OK;
if (TY_COMPONENT_RGB_CAM)
{
std::vector<TY_ENUM_ENTRY> image_mode_list;
status = get_feature_enum_list(hDevice, TY_COMPONENT_RGB_CAM, TY_ENUM_IMAGE_MODE, image_mode_list);
for (int idx = 0; idx < image_mode_list.size(); idx++)
{
TY_ENUM_ENTRY& entry = image_mode_list[idx];
//try to select a resolution
if (TYImageWidth(entry.value) == 640)
{
LOGD("Select RGB Image Mode: %s", entry.description);
int err = TYSetEnum(hDevice, TY_COMPONENT_RGB_CAM, TY_ENUM_IMAGE_MODE, entry.value);
ASSERT(err == TY_STATUS_OK || err == TY_STATUS_NOT_PERMITTED);
break;
}
}
}
}
//读取彩色相机标定数据
//TY_STRUCT_CAM_CALIB_DATA内参是相机最大分辨率的内参
//TY_STRUCT_CAM_INTRINSIC内参是相机当前分辨率的内参
LOGD("=== Get color intrinsic");
ASSERT_OK(TYGetStruct(hDevice, TY_COMPONENT_RGB_CAM, TY_STRUCT_CAM_INTRINSIC, &intri_color, sizeof(intri_color)));
LOGD("=== Read color calib data");
ASSERT_OK(TYGetStruct(hDevice, TY_COMPONENT_RGB_CAM, TY_STRUCT_CAM_CALIB_DATA
, &cb_data.color_calib, sizeof(cb_data.color_calib)));
//硬ISP功能,仅部分相机的RGB支持硬ISP,如FM854-E1,FM855-E1,TM265和TM421相机。
//获取RGB是否支持自动曝光,自动白平衡,自动增益等属性,这些属性虽然不能保存到storage里面,但是默认是开启的。
//*********************这几个属性可以默认启用,可以不设置**************************
bool hasAUTOEXPOSURE, hasAUTOGAIN, hasAUTOAWB;
ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_BOOL_AUTO_EXPOSURE, &hasAUTOEXPOSURE));
ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_BOOL_AUTO_AWB, &hasAUTOAWB));
ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_BOOL_AUTO_GAIN, &hasAUTOGAIN));
if (hasAUTOEXPOSURE && setParameters)
{
ASSERT_OK(TYSetBool(hDevice, TY_COMPONENT_RGB_CAM, TY_BOOL_AUTO_EXPOSURE, true));//turn on AEC 打开自动曝光
}
if (hasAUTOAWB && setParameters)
{
ASSERT_OK(TYSetBool(hDevice, TY_COMPONENT_RGB_CAM, TY_BOOL_AUTO_AWB, true));//turn on AWB 打开白平衡
}
if (hasAUTOGAIN && setParameters)
{
ASSERT_OK(TYSetBool(hDevice, TY_COMPONENT_RGB_CAM, TY_BOOL_AUTO_GAIN, true));//turn on AGC,自动增益,仅TM265相机支持该属性
}
//*********************这几个属性可默认启用,可以不设置**************************
//获取RGB支持的属性
//*********************这几个属性可以保存到storage里面,可以不设置**************************
bool hasRGB_ANALOG_GAIN, hasRGB_R_GAIN, hasRGB_G_GAIN, hasRGB_B_GAIN, hasRGB_EXPOSURE_TIME , hasRGB_AE_TARGET_V;;
ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_ANALOG_GAIN, &hasRGB_ANALOG_GAIN));
ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_R_GAIN, &hasRGB_R_GAIN));
ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_G_GAIN, &hasRGB_G_GAIN));
ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_B_GAIN, &hasRGB_B_GAIN));
ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_EXPOSURE_TIME, &hasRGB_EXPOSURE_TIME));
if (hasRGB_ANALOG_GAIN && setParameters)
{
ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_ANALOG_GAIN, 2));//设置RGB模拟增益,仅FM854和FM855等双目相机支持
}
if (hasRGB_R_GAIN && setParameters)
{
ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_R_GAIN, 130));//设置RGB数字增益R通道
}
if (hasRGB_G_GAIN && setParameters)
{
ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_G_GAIN, 80));//设置RGB数字增益G通道
}
if (hasRGB_B_GAIN && setParameters)
{
ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_B_GAIN, 150));//设置RGB数字增益B通道
}
if (hasRGB_EXPOSURE_TIME && setParameters)
{
ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_EXPOSURE_TIME, 300));//设置RGB曝光时间,所有带RGB的相机都支持该属性,只是范围不同
}
//*********************这几个属性可以保存到storage里面,可以不设置**************************
ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_AE_TARGET_V, &hasRGB_AE_TARGET_V));
if (hasRGB_AE_TARGET_V && setParameters)
{
ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_AE_TARGET_V, 3500));//设置RGB自动曝光目标亮度,范围(0,4000)
}
//使能深度相机
//try to enable depth cam
LOGD("=== Configure components, open depth cam");
int32_t componentIDs = TY_COMPONENT_DEPTH_CAM;
ASSERT_OK( TYEnableComponents(hDevice, componentIDs) );
//设置深度图分辨率
if (setParameters)
{
LOGD("=== Configure feature, set depth resolution");
//方法一:直接设置分辨率
//ASSERT_OK(TYSetEnum(hDevice, TY_COMPONENT_DEPTH_CAM, TY_ENUM_IMAGE_MODE, TY_IMAGE_MODE_DEPTH16_640x480))
//方法二:通过枚举相机支持的图像模式,结合图像宽度选定分辨率,不关注具体分辨率
if (TY_COMPONENT_DEPTH_CAM)
{
std::vector<TY_ENUM_ENTRY> image_mode_list;
TY_STATUS status = TY_STATUS_OK;
status = get_feature_enum_list(hDevice, TY_COMPONENT_DEPTH_CAM, TY_ENUM_IMAGE_MODE, image_mode_list);
for (int idx = 0; idx < image_mode_list.size(); idx++)
{
TY_ENUM_ENTRY &entry = image_mode_list[idx];
//try to select a resolution
if (TYImageWidth(entry.value) == 640)
{
LOGD("Select Depth Image Mode: %s", entry.description);
int err = TYSetEnum(hDevice, TY_COMPONENT_DEPTH_CAM, TY_ENUM_IMAGE_MODE, entry.value);
ASSERT(err == TY_STATUS_OK || err == TY_STATUS_NOT_PERMITTED);
status = TYEnableComponents(hDevice, TY_COMPONENT_DEPTH_CAM);
break;
}
}
}
}
//读取深度相机内参和深度相机标定数据
//TY_STRUCT_CAM_CALIB_DATA内参是相机最大分辨率的内参
//TY_STRUCT_CAM_INTRINSIC内参是相机当前分辨率的内参
LOGD("=== Get depth intrinsic");
ASSERT_OK(TYGetStruct(hDevice, TY_COMPONENT_DEPTH_CAM, TY_STRUCT_CAM_INTRINSIC, &intri_depth, sizeof(intri_depth)));
LOGD("=== Read depth calib data");
ASSERT_OK(TYGetStruct(hDevice, TY_COMPONENT_DEPTH_CAM, TY_STRUCT_CAM_CALIB_DATA
, &cb_data.depth_calib, sizeof(cb_data.depth_calib)));
//you can set TOF camera feature here 下面是TOF相机属性设置
//*************************************************
ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_DEPTH_CAM, TY_STRUCT_CAM_DISTORTION, &isTof));//判断是否为Tof相机
if (isTof && setParameters)
{
//*********************这些属性可以保存到storage****************************
//设置频段,多相机模式下可设置不同频道
int channel = 0;//频段0,1,2,3等
LOGD("Set TOF_CHANNEL %d", channel);
ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_DEPTH_CAM, TY_INT_TOF_CHANNEL, channel));
//设置图像质量模式
int quality = 2;//质量模式:1,2,4,TM26X只有Basic和mediu模式,而TL430/TM421相机有medium和high模式
LOGD("Set DEPTH_QUALITY %d", quality);
ASSERT_OK(TYSetEnum(hDevice, TY_COMPONENT_DEPTH_CAM, TY_ENUM_DEPTH_QUALITY, quality));
//设置强度置信度阈值
int modulation = 640; //强度置信度过滤,小于此阈值的像素点不参与计算深度,即像素点的深度值赋值为 0,,范围(0.65535)
LOGD("Set TOF_MODULATION_THRESHOLD %d", modulation);
ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_DEPTH_CAM, TY_INT_TOF_MODULATION_THRESHOLD, modulation));
//设置飞点滤波阈值
int filter = 0;//飞点滤波,滤波阈值设置越小,过滤的飞点越多
LOGD("Set FILTER_THRESHOLD %d", filter);
ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_DEPTH_CAM, TY_INT_FILTER_THRESHOLD, filter));
bool hasJITTER_THRESHOLD = true;
//设置抖动过滤阈值
ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_DEPTH_CAM, TY_INT_TOF_JITTER_THRESHOLD, &hasJITTER_THRESHOLD));
if (hasJITTER_THRESHOLD && setParameters)
{
int jitter = 6;//(1,10),阈值设置值越大,深度图边缘抖动的深度数据过滤得越少,拍摄黑色等低反材质,建议增大该值
LOGD("Set TOF_JITTER_THRESHOLD %d", jitter);
ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_DEPTH_CAM, TY_INT_TOF_JITTER_THRESHOLD, jitter));
}
滤波设置
//set TY_INT_MAX_SPECKLE_SIZE
bool hasMAX_SPECKLE_SIZE = true;
ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_DEPTH_CAM, TY_INT_MAX_SPECKLE_SIZE, &hasMAX_SPECKLE_SIZE));
if (hasMAX_SPECKLE_SIZE && setParameters)
{
int speckle_size = 50; //(0,200) //噪点面积小于该值将被过滤
LOGD("Set MAX_SPECKLE_SIZE %d", speckle_size);
ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_DEPTH_CAM, TY_INT_MAX_SPECKLE_SIZE, speckle_size));
}
//set TY_INT_MAX_SPECKLE_DIFF
bool hasMAX_SPECKLE_DIFF = true;
ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_DEPTH_CAM, TY_INT_MAX_SPECKLE_DIFF, &hasMAX_SPECKLE_DIFF));
if (hasMAX_SPECKLE_DIFF && setParameters)
{
int speckle_diff = 200; //(100,500) //相邻像素视差大于该值将被视为噪点
LOGD("Set MAX_SPECKLE_DIFF %d", speckle_diff);
ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_DEPTH_CAM, TY_INT_MAX_SPECKLE_DIFF, speckle_diff));
}
//*********************这些属性可以保存到storage****************************
// Set TY_BOOL_TOF_ANTI_INTERFERENCE 比如排除3m以外干扰物影响,优先调整anti-inference,只有TM26X相机有该属性
bool hasANTI_INTERFERENCE = true;
ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_DEPTH_CAM, TY_BOOL_TOF_ANTI_INTERFERENCE, &hasANTI_INTERFERENCE));
if (hasANTI_INTERFERENCE && setParameters)
{
LOGD("Set TY_BOOL_TOF_ANTI_INTERFERENCE ");
ASSERT_OK(TYSetBool(hDevice, TY_COMPONENT_DEPTH_CAM, TY_BOOL_TOF_ANTI_INTERFERENCE, true));
}
}
//********************只有双目相机拥有下面属性******************************
//设置左右IR的模拟增益,数字增益和曝光
//adjust the gain and exposure of Left&Right IR camera
//获取左右IR支持的属性
bool hasIR_ANALOG_GAIN, hasIR_GAIN, hasIR_EXPOSURE_TIME, hasIR_HDR;
ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_IR_CAM_LEFT, TY_INT_ANALOG_GAIN, &hasIR_ANALOG_GAIN));
ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_IR_CAM_LEFT, TY_INT_GAIN, &hasIR_GAIN));
ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_IR_CAM_LEFT, TY_INT_EXPOSURE_TIME, &hasIR_EXPOSURE_TIME));
ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_IR_CAM_LEFT, TY_BOOL_HDR, &hasIR_HDR));
if (hasIR_ANALOG_GAIN && setParameters)
{
ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_IR_CAM_LEFT, TY_INT_ANALOG_GAIN, 2));//设置左右IR模拟增益
if (!isTof)
{
ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_IR_CAM_RIGHT, TY_INT_ANALOG_GAIN, 2));
}
}
if (hasIR_GAIN && setParameters)
{
ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_IR_CAM_LEFT, TY_INT_GAIN, 32));//设置左右IR数字增益
if (!isTof)
{
ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_IR_CAM_RIGHT, TY_INT_GAIN, 32));
}
}
if (hasIR_EXPOSURE_TIME && setParameters)
{
ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_IR_CAM_LEFT, TY_INT_EXPOSURE_TIME, 500)); //设置IR曝光时间
if (!isTof)
{
ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_IR_CAM_RIGHT, TY_INT_EXPOSURE_TIME, 500));
}
}
if (hasIR_HDR && setParameters)
{
ASSERT_OK(TYSetBool(hDevice, TY_COMPONENT_IR_CAM_LEFT, TY_BOOL_HDR, true));
ASSERT_OK(TYSetBool(hDevice, TY_COMPONENT_IR_CAM_RIGHT, TY_BOOL_HDR, true));//设置开启HDR功能
}
//设置激光器亮度,默认不用设置,除非深度图过曝
//adjust the laser power
bool hasLASER_POWER;
ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_DEVICE, TY_INT_LASER_POWER, &hasLASER_POWER));
if (hasLASER_POWER && setParameters)
{
ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_LASER, TY_INT_LASER_POWER, 100));// range(0,100)
}
// ********************只有双目相机拥有下面属性******************************
//获取所需Buffer大小
LOGD("=== Prepare image buffer");
uint32_t frameSize;
ASSERT_OK( TYGetFrameBufferSize(hDevice, &frameSize) );
LOGD("- Get size of framebuffer, %d", frameSize);
//分配两个Buffer,并压入队列
LOGD(" - Allocate & enqueue buffers");
char* frameBuffer[2];
frameBuffer[0] = new char[frameSize];
frameBuffer[1] = new char[frameSize];
LOGD(" - Enqueue buffer (%p, %d)", frameBuffer[0], frameSize);
ASSERT_OK( TYEnqueueBuffer(hDevice, frameBuffer[0], frameSize) );
LOGD(" - Enqueue buffer (%p, %d)", frameBuffer[1], frameSize);
ASSERT_OK( TYEnqueueBuffer(hDevice, frameBuffer[1], frameSize) );
//注册事件回调,相机掉线捕获
bool device_offline = false;
LOGD("Register event callback");
ASSERT_OK(TYRegisterEventCallback(hDevice, eventCallback, &device_offline));
//触发模式设置
bool hasTrigger;
ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_DEVICE, TY_STRUCT_TRIGGER_PARAM, &hasTrigger));
if (hasTrigger)
{
TY_TRIGGER_PARAM trigger;
//LOGD("Disable trigger mode");
//trigger.mode = TY_TRIGGER_MODE_OFF;//连续采集模式
LOGD("=== enable trigger mode");
trigger.mode = TY_TRIGGER_MODE_SLAVE;//软触发和硬触发模式
ASSERT_OK(TYSetStruct(hDevice, TY_COMPONENT_DEVICE, TY_STRUCT_TRIGGER_PARAM, &trigger, sizeof(trigger)));
bool hasDI0_WORKMODE;
ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_DEVICE, TY_STRUCT_DI0_WORKMODE, &hasDI0_WORKMODE));
//if (hasDI0_WORKMODE)
//{
// //硬触发模式防抖
// TY_DI_WORKMODE di_wm;
// di_wm.mode = TY_DI_PE_INT;
// di_wm.int_act = TY_DI_INT_TRIG_CAP;
// uint32_t time_hw = 10;//单位ms,硬件滤波,小于设定时间的电平信号会被过滤
// uint32_t time_sw = 200;//单位ms,软件滤波,连续高频触发情形,小于设置周期的后一个触发信号将被过滤
// di_wm.reserved[0] = time_hw | (time_sw << 16);
// ASSERT_OK(TYSetStruct(hDevice, TY_COMPONENT_DEVICE, TY_STRUCT_DI0_WORKMODE, &di_wm, sizeof(di_wm)));
//}
//
}
//网口相机,启用丢包重传功能
//for network only
LOGD("=== resend: %d", resend);
if (resend)
{
bool hasResend;
ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_DEVICE, TY_BOOL_GVSP_RESEND, &hasResend));
if (hasResend)
{
LOGD("=== Open resend");
ASSERT_OK(TYSetBool(hDevice, TY_COMPONENT_DEVICE, TY_BOOL_GVSP_RESEND, true));
}
else
{
LOGD("=== Not support feature TY_BOOL_GVSP_RESEND");
}
}
//开始采集
LOGD("=== Start capture");
ASSERT_OK( TYStartCapture(hDevice) );
//回调数据初始化
cb_data.index = 0;
cb_data.hDevice = hDevice;
cb_data.saveOneFramePoint3d = false;
cb_data.fileIndex = 0;
cb_data.intri_depth = &intri_depth;
cb_data.intri_color = &intri_color;
float scale_unit = 1.;
TYGetFloat(hDevice, TY_COMPONENT_DEPTH_CAM, TY_FLOAT_SCALE_UNIT, &scale_unit);
cb_data.scale_unit = scale_unit;
cb_data.isTof = isTof;
//循环取图
LOGD("=== While loop to fetch frame");
TY_FRAME_DATA frame;
bool exit_main = false;
int index = 0;
while(!exit_main)
{
int key = cv::waitKey(1);
switch(key & 0xff)
{
case 0xff:
break;
case 'q':
exit_main = true;
break;
case 's':
cb_data.saveOneFramePoint3d = true;//图片显示窗口上按s键则存一张点云图
break;
default:
LOGD("Pressed key %d", key);
}
auto timeTrigger = std::chrono::steady_clock::now();
//发送一次软触发
while (TY_STATUS_BUSY == TYSendSoftTrigger(hDevice));
//获取帧,默认超时设置为10s
int err = TYFetchFrame(hDevice, &frame, 10000);
//获取图像时间戳代码
LOGD("=== Time Stamp (%" PRIu64 ")", frame.image[0].timestamp);
time_t tick = (time_t)(frame.image[0].timestamp / 1000000);
struct tm tm;
char s[100];
tm = *localtime(&tick);
strftime(s, sizeof(s), "%Y-%m-%d %H:%M:%S", &tm);
int milliseconds = (int)((frame.image[0].timestamp % 1000000) / 1000);
char ms_str[5];
sprintf(ms_str, ".%d", milliseconds);
strcat(s, ms_str);
LOGD("===Time Stamp %d:%s\n", (int)tick, s);
if (device_offline)
{
LOGI("Found device offline");
break;
}
if( err != TY_STATUS_OK )
{
LOGD("... Drop one frame");
continue;
}
if (err == TY_STATUS_OK)
{
LOGD("Get frame %d", ++index);
int fps = get_fps();
if (fps > 0)
{
LOGI("***************************fps: %d", fps);
}
}
frameHandler(&frame, &cb_data);
auto timeGetFrame = std::chrono::steady_clock::now();
auto duration = std::chrono::duration_cast<std::chrono::microseconds>(timeGetFrame - timeTrigger);
LOGI("*******FetchFrame spend Time : %lld", duration);
}
ASSERT_OK( TYStopCapture(hDevice) );
ASSERT_OK( TYCloseDevice(hDevice) );
ASSERT_OK( TYCloseInterface(hIface) );
ASSERT_OK( TYDeinitLib() );
delete frameBuffer[0];
delete frameBuffer[1];
LOGD("=== Main done!");
return 0;
}