From bf7b03a7d26de1d8344823abdabf1a7ed6c46e1f Mon Sep 17 00:00:00 2001 From: Adam Serafin Date: Thu, 7 Sep 2023 11:14:32 +0200 Subject: [PATCH] V280_noetic (#391) --- README.md | 2 +- depthai_bridge/CMakeLists.txt | 7 +- .../depthai_bridge/BridgePublisher.hpp | 21 +- .../include/depthai_bridge/ImageConverter.hpp | 15 +- .../include/depthai_bridge/TFPublisher.hpp | 86 ++++++ .../TrackedFeaturesConverter.hpp | 56 ++++ depthai_bridge/package.xml | 3 + depthai_bridge/src/ImageConverter.cpp | 185 ++++++------ depthai_bridge/src/TFPublisher.cpp | 270 ++++++++++++++++++ .../src/TrackedFeaturesConverter.cpp | 51 ++++ depthai_descriptions/launch/urdf_launch.py | 121 -------- .../urdf/base_descr.urdf.xacro | 21 ++ .../urdf/include/base_macro.urdf.xacro | 78 +++++ .../urdf/include/depthai_macro.urdf.xacro | 186 +++++------- depthai_examples/CMakeLists.txt | 4 +- depthai_examples/launch/rgb_publisher.launch | 43 --- .../src/feature_tracker_publisher.cpp | 100 +++++++ depthai_filters/CMakeLists.txt | 4 +- depthai_filters/config/feature_tracker.yaml | 3 + .../feature_tracker_overlay.hpp | 49 ++++ .../include/depthai_filters/features_3d.hpp | 35 +++ .../launch/example_feature_3d.launch | 18 ++ .../launch/example_feature_tracker.launch | 19 ++ depthai_filters/nodelet_plugins.xml | 10 + depthai_filters/package.xml | 2 +- .../src/feature_tracker_overlay.cpp | 75 +++++ depthai_filters/src/features_3d.cpp | 69 +++++ depthai_ros_driver/CMakeLists.txt | 7 +- depthai_ros_driver/cfg/parameters.cfg | 154 +--------- depthai_ros_driver/config/camera.yaml | 102 +------ .../include/depthai_ros_driver/camera.hpp | 46 ++- .../dai_nodes/base_node.hpp | 36 ++- .../dai_nodes/nn/detection.hpp | 44 ++- .../dai_nodes/nn/spatial_detection.hpp | 4 +- .../dai_nodes/sensors/feature_tracker.hpp | 56 ++++ .../dai_nodes/sensors/mono.hpp | 4 +- .../dai_nodes/sensors/rgb.hpp | 3 +- .../dai_nodes/sensors/sensor_helpers.hpp | 26 +- .../dai_nodes/sensors/sensor_wrapper.hpp | 2 +- .../depthai_ros_driver/dai_nodes/stereo.hpp | 39 ++- .../dai_nodes/sys_logger.hpp | 40 +++ .../param_handlers/base_param_handler.hpp | 57 +++- .../feature_tracker_param_handler.hpp | 33 +++ .../param_handlers/nn_param_handler.hpp | 7 + .../param_handlers/sensor_param_handler.hpp | 3 + .../param_handlers/stereo_param_handler.hpp | 6 +- .../pipeline/base_pipeline.hpp | 1 - .../pipeline/pipeline_generator.hpp | 27 +- .../include/depthai_ros_driver/utils.hpp | 7 + depthai_ros_driver/launch/camera.launch | 25 +- depthai_ros_driver/launch/rgbd_pcl.launch | 2 +- depthai_ros_driver/package.xml | 1 + depthai_ros_driver/src/camera.cpp | 104 +++++-- .../src/dai_nodes/base_node.cpp | 25 +- .../src/dai_nodes/nn/segmentation.cpp | 2 +- .../src/dai_nodes/sensors/feature_tracker.cpp | 82 ++++++ .../src/dai_nodes/sensors/mono.cpp | 32 ++- .../src/dai_nodes/sensors/rgb.cpp | 38 +-- .../src/dai_nodes/sensors/sensor_helpers.cpp | 144 +++------- .../src/dai_nodes/sensors/sensor_wrapper.cpp | 11 + depthai_ros_driver/src/dai_nodes/stereo.cpp | 221 +++++++++----- .../src/dai_nodes/sys_logger.cpp | 85 ++++++ .../param_handlers/camera_param_handler.cpp | 36 ++- .../feature_tracker_param_handler.cpp | 29 ++ .../src/param_handlers/imu_param_handler.cpp | 25 +- .../src/param_handlers/nn_param_handler.cpp | 26 +- .../param_handlers/sensor_param_handler.cpp | 159 +++++++---- .../param_handlers/stereo_param_handler.cpp | 155 ++++++---- .../src/pipeline/base_types.cpp | 17 +- .../src/pipeline/pipeline_generator.cpp | 4 +- depthai_ros_driver/src/utils.cpp | 17 ++ depthai_ros_msgs/CMakeLists.txt | 2 + depthai_ros_msgs/msg/TrackedFeature.msg | 10 + depthai_ros_msgs/msg/TrackedFeatures.msg | 2 + 74 files changed, 2419 insertions(+), 1072 deletions(-) create mode 100644 depthai_bridge/include/depthai_bridge/TFPublisher.hpp create mode 100644 depthai_bridge/include/depthai_bridge/TrackedFeaturesConverter.hpp create mode 100644 depthai_bridge/src/TFPublisher.cpp create mode 100644 depthai_bridge/src/TrackedFeaturesConverter.cpp delete mode 100644 depthai_descriptions/launch/urdf_launch.py create mode 100644 depthai_descriptions/urdf/base_descr.urdf.xacro create mode 100644 depthai_descriptions/urdf/include/base_macro.urdf.xacro delete mode 100644 depthai_examples/launch/rgb_publisher.launch create mode 100644 depthai_examples/src/feature_tracker_publisher.cpp create mode 100644 depthai_filters/config/feature_tracker.yaml create mode 100644 depthai_filters/include/depthai_filters/feature_tracker_overlay.hpp create mode 100644 depthai_filters/include/depthai_filters/features_3d.hpp create mode 100644 depthai_filters/launch/example_feature_3d.launch create mode 100644 depthai_filters/launch/example_feature_tracker.launch create mode 100644 depthai_filters/src/feature_tracker_overlay.cpp create mode 100644 depthai_filters/src/features_3d.cpp create mode 100644 depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/feature_tracker.hpp create mode 100644 depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sys_logger.hpp create mode 100644 depthai_ros_driver/include/depthai_ros_driver/param_handlers/feature_tracker_param_handler.hpp create mode 100644 depthai_ros_driver/src/dai_nodes/sensors/feature_tracker.cpp create mode 100644 depthai_ros_driver/src/dai_nodes/sys_logger.cpp create mode 100644 depthai_ros_driver/src/param_handlers/feature_tracker_param_handler.cpp create mode 100644 depthai_ros_msgs/msg/TrackedFeature.msg create mode 100644 depthai_ros_msgs/msg/TrackedFeatures.msg diff --git a/README.md b/README.md index 590972ac..7aa9f6ca 100644 --- a/README.md +++ b/README.md @@ -128,7 +128,7 @@ docker run -it -v /dev/:/dev/ --privileged -e DISPLAY -v /tmp/.X11-unix:/tmp/.X1 Will only start `stereo_inertial_node` launch file (you can try different commands). ### Running docker image on ROS2 ``` bash -docker run -it -v /dev/:/dev/ --privileged -e DISPLAY -v /tmp/.X11-unix:/tmp/.X11-unix depthai_ros roslaunch depthai_examples stereo_inertial_node.launch.py +docker run -it -v /dev/:/dev/ --privileged -e DISPLAY -v /tmp/.X11-unix:/tmp/.X11-unix depthai_ros ros2 launch depthai_examples stereo_inertial_node.launch.py ``` diff --git a/depthai_bridge/CMakeLists.txt b/depthai_bridge/CMakeLists.txt index 2be1ec1b..5578ed42 100644 --- a/depthai_bridge/CMakeLists.txt +++ b/depthai_bridge/CMakeLists.txt @@ -38,6 +38,9 @@ find_package(catkin REQUIRED COMPONENTS stereo_msgs std_msgs vision_msgs + tf2_ros + urdf + robot_state_publisher ) find_package(Boost REQUIRED) find_package(depthai CONFIG REQUIRED) @@ -48,12 +51,14 @@ FILE(GLOB LIB_SRC "src/ImgDetectionConverter.cpp" "src/SpatialDetectionConverter.cpp" "src/ImuConverter.cpp" +"src/TFPublisher.cpp" +"src/TrackedFeaturesConverter.cpp" ) catkin_package( INCLUDE_DIRS include LIBRARIES ${PROJECT_NAME} - CATKIN_DEPENDS depthai_ros_msgs camera_info_manager roscpp sensor_msgs std_msgs vision_msgs image_transport cv_bridge stereo_msgs + CATKIN_DEPENDS depthai_ros_msgs camera_info_manager roscpp sensor_msgs std_msgs vision_msgs image_transport cv_bridge stereo_msgs tf2_ros urdf robot_state_publisher ) list(APPEND DEPENDENCY_PUBLIC_LIBRARIES ${catkin_LIBRARIES}) diff --git a/depthai_bridge/include/depthai_bridge/BridgePublisher.hpp b/depthai_bridge/include/depthai_bridge/BridgePublisher.hpp index 5ba0e5b4..a08a2f41 100644 --- a/depthai_bridge/include/depthai_bridge/BridgePublisher.hpp +++ b/depthai_bridge/include/depthai_bridge/BridgePublisher.hpp @@ -37,7 +37,8 @@ class BridgePublisher { ConvertFunc converter, int queueSize, std::string cameraParamUri = "", - std::string cameraName = ""); + std::string cameraName = "", + bool lazyPublisher = true); BridgePublisher(std::shared_ptr daiMessageQueue, rosOrigin::NodeHandle nh, @@ -45,7 +46,8 @@ class BridgePublisher { ConvertFunc converter, int queueSize, ImageMsgs::CameraInfo cameraInfoData, - std::string cameraName); + std::string cameraName, + bool lazyPublisher = true); /** * Tag Dispacher function to to overload the Publisher to ImageTransport Publisher @@ -90,6 +92,7 @@ class BridgePublisher { std::unique_ptr _camInfoManager; bool _isCallbackAdded = false; bool _isImageMessage = false; // used to enable camera info manager + bool _lazyPublisher = true; }; template @@ -102,14 +105,16 @@ BridgePublisher::BridgePublisher(std::shared_ptr{}); } @@ -121,14 +126,16 @@ BridgePublisher::BridgePublisher(std::shared_ptr{}); } @@ -219,7 +226,7 @@ void BridgePublisher::publishHelper(std::shared_ptr inDa } mainSubCount = _rosPublisher->getNumSubscribers(); - if(mainSubCount > 0 || infoSubCount > 0) { + if(!_lazyPublisher || (mainSubCount > 0 || infoSubCount > 0)) { _converter(inDataPtr, opMsgs); while(opMsgs.size()) { diff --git a/depthai_bridge/include/depthai_bridge/ImageConverter.hpp b/depthai_bridge/include/depthai_bridge/ImageConverter.hpp index 0937faaa..811983ae 100644 --- a/depthai_bridge/include/depthai_bridge/ImageConverter.hpp +++ b/depthai_bridge/include/depthai_bridge/ImageConverter.hpp @@ -47,10 +47,12 @@ class ImageConverter { _updateRosBaseTimeOnToRosMsg = update; } - void toRosMsgFromBitStream(std::shared_ptr inData, - std::deque& outImageMsgs, - dai::RawImgFrame::Type type, - const sensor_msgs::CameraInfo& info); + void convertFromBitstream(dai::RawImgFrame::Type srcType); + + void addExposureOffset(dai::CameraExposureOffset& offset); + void convertDispToDepth(); + + ImageMsgs::Image toRosMsgRawPtr(std::shared_ptr inData, const sensor_msgs::CameraInfo& info = sensor_msgs::CameraInfo()); void toRosMsg(std::shared_ptr inData, std::deque& outImageMsgs); ImagePtr toRosMsgPtr(std::shared_ptr inData); @@ -86,6 +88,11 @@ class ImageConverter { int64_t _totalNsChange{0}; // Whether to update the ROS base time on each message conversion bool _updateRosBaseTimeOnToRosMsg{false}; + dai::RawImgFrame::Type _srcType; + bool _fromBitstream = false; + bool _convertDispToDepth = false; + bool _addExpOffset = false; + dai::CameraExposureOffset _expOffset; }; } // namespace ros diff --git a/depthai_bridge/include/depthai_bridge/TFPublisher.hpp b/depthai_bridge/include/depthai_bridge/TFPublisher.hpp new file mode 100644 index 00000000..b68306b0 --- /dev/null +++ b/depthai_bridge/include/depthai_bridge/TFPublisher.hpp @@ -0,0 +1,86 @@ +#pragma once +#include "depthai-shared/common/CameraFeatures.hpp" +#include "depthai/device/CalibrationHandler.hpp" +#include "geometry_msgs/Quaternion.h" +#include "geometry_msgs/TransformStamped.h" +#include "nlohmann/json.hpp" +#include "robot_state_publisher/robot_state_publisher.h" +#include "ros/node_handle.h" +#include "tf2_ros/static_transform_broadcaster.h" + +namespace dai { +namespace ros { +class TFPublisher { + public: + explicit TFPublisher(::ros::NodeHandle node, + const dai::CalibrationHandler& calHandler, + const std::vector& camFeatures, + const std::string& camName, + const std::string& camModel, + const std::string& baseFrame, + const std::string& parentFrame, + const std::string& camPosX, + const std::string& camPosY, + const std::string& camPosZ, + const std::string& camRoll, + const std::string& camPitch, + const std::string& camYaw, + const std::string& imuFromDescr, + const std::string& customURDFLocation, + const std::string& customXacroArgs); + /* + * @brief Obtain URDF description by running Xacro with provided arguments. + */ + std::string getURDF(); + geometry_msgs::Quaternion quatFromRotM(nlohmann::json rotMatrix); + geometry_msgs::Vector3 transFromExtr(nlohmann::json translation); + + private: + /* + * @brief Converts model name to one of the available model families + */ + void convertModelName(); + /* + * @brief Prepare arguments for xacro command. If custom URDF location is not provided, check if model name is available in depthai_descriptions package. + */ + std::string prepareXacroArgs(); + /* + * @brief Get URDF description and set it as a parameter for robot_state_publisher + */ + void publishDescription(::ros::NodeHandle node); + /* + * @brief Publish camera transforms ("standard" and optical) based on calibration data. + * Frame names are based on socket names and use following convention: [base_frame]_[socket_name]_camera_frame and + * [base_frame]_[socket_name]_camera_optical_frame + */ + void publishCamTransforms(nlohmann::json camData, ::ros::NodeHandle node); + /* + * @brief Publish IMU transform based on calibration data. + * Frame name is based on IMU name and uses following convention: [base_frame]_imu_frame. + * If IMU extrinsics are not set, warning is printed out and imu frame is published with zero translation and rotation. + */ + void publishImuTransform(nlohmann::json json, ::ros::NodeHandle node); + /* + * @brief Check if model STL file is available in depthai_descriptions package. + */ + bool modelNameAvailable(); + std::string getCamSocketName(int socketNum); + std::shared_ptr _tfPub; + std::shared_ptr _rsp; + std::string _camName; + std::string _camModel; + std::string _baseFrame; + std::string _parentFrame; + std::string _camPosX; + std::string _camPosY; + std::string _camPosZ; + std::string _camRoll; + std::string _camPitch; + std::string _camYaw; + std::string _imuFromDescr; + std::string _customURDFLocation; + std::string _customXacroArgs; + std::vector _camFeatures; +}; +} // namespace ros +} // namespace dai \ No newline at end of file diff --git a/depthai_bridge/include/depthai_bridge/TrackedFeaturesConverter.hpp b/depthai_bridge/include/depthai_bridge/TrackedFeaturesConverter.hpp new file mode 100644 index 00000000..fb188b96 --- /dev/null +++ b/depthai_bridge/include/depthai_bridge/TrackedFeaturesConverter.hpp @@ -0,0 +1,56 @@ +#pragma once + +#include + +#include +#include +#include + +#include "depthai/pipeline/datatype/TrackedFeatures.hpp" +#include "ros/time.h" + +namespace dai { + +namespace ros { + +class TrackedFeaturesConverter { + public: + // DetectionConverter() = default; + TrackedFeaturesConverter(std::string frameName, bool getBaseDeviceTimestamp = false); + ~TrackedFeaturesConverter(); + + /** + * @brief Handles cases in which the ROS time shifts forward or backward + * Should be called at regular intervals or on-change of ROS time, depending + * on monitoring. + * + */ + void updateRosBaseTime(); + + /** + * @brief Commands the converter to automatically update the ROS base time on message conversion based on variable + * + * @param update: bool whether to automatically update the ROS base time on message conversion + */ + void setUpdateRosBaseTimeOnToRosMsg(bool update = true) { + _updateRosBaseTimeOnToRosMsg = update; + } + + void toRosMsg(std::shared_ptr inFeatures, std::deque& featureMsgs); + + private: + const std::string _frameName; + std::chrono::time_point _steadyBaseTime; + ::ros::Time _rosBaseTime; + bool _getBaseDeviceTimestamp; + // For handling ROS time shifts and debugging + int64_t _totalNsChange{0}; + // Whether to update the ROS base time on each message conversion + bool _updateRosBaseTimeOnToRosMsg{false}; +}; + +} // namespace ros + +namespace rosBridge = ros; + +} // namespace dai diff --git a/depthai_bridge/package.xml b/depthai_bridge/package.xml index 1b869381..a2a24324 100644 --- a/depthai_bridge/package.xml +++ b/depthai_bridge/package.xml @@ -25,6 +25,9 @@ std_msgs stereo_msgs vision_msgs + tf2_ros + urdf + robot_state_publisher catkin diff --git a/depthai_bridge/src/ImageConverter.cpp b/depthai_bridge/src/ImageConverter.cpp index 1fbd6242..517c8a59 100644 --- a/depthai_bridge/src/ImageConverter.cpp +++ b/depthai_bridge/src/ImageConverter.cpp @@ -38,73 +38,32 @@ void ImageConverter::updateRosBaseTime() { updateBaseTime(_steadyBaseTime, _rosBaseTime, _totalNsChange); } -void ImageConverter::toRosMsgFromBitStream(std::shared_ptr inData, - std::deque& outImageMsgs, - dai::RawImgFrame::Type type, - const sensor_msgs::CameraInfo& info) { - if(_updateRosBaseTimeOnToRosMsg) { - updateRosBaseTime(); - } - std::chrono::_V2::steady_clock::time_point tstamp; - if(_getBaseDeviceTimestamp) - tstamp = inData->getTimestampDevice(); - else - tstamp = inData->getTimestamp(); - ImageMsgs::Image outImageMsg; - StdMsgs::Header header; - header.frame_id = _frameName; - header.stamp = getFrameTime(_rosBaseTime, _steadyBaseTime, tstamp); - std::string encoding; - int decodeFlags; - cv::Mat output; - switch(type) { - case dai::RawImgFrame::Type::BGR888i: { - encoding = sensor_msgs::image_encodings::BGR8; - decodeFlags = cv::IMREAD_COLOR; - break; - } - case dai::RawImgFrame::Type::GRAY8: { - encoding = sensor_msgs::image_encodings::MONO8; - decodeFlags = cv::IMREAD_GRAYSCALE; - break; - } - case dai::RawImgFrame::Type::RAW8: { - encoding = sensor_msgs::image_encodings::TYPE_16UC1; - decodeFlags = cv::IMREAD_GRAYSCALE; - break; - } - default: { - throw(std::runtime_error("Converted type not supported!")); - } - } +void ImageConverter::convertFromBitstream(dai::RawImgFrame::Type srcType) { + _fromBitstream = true; + _srcType = srcType; +} - output = cv::imdecode(cv::Mat(inData->getData()), decodeFlags); - - // converting disparity - if(type == dai::RawImgFrame::Type::RAW8) { - auto factor = (info.K[0] * info.P[3]); - cv::Mat depthOut = cv::Mat(cv::Size(output.cols, output.rows), CV_16UC1); - depthOut.forEach([&output, &factor](short& pixel, const int* position) -> void { - auto disp = output.at(position); - if(disp == 0) - pixel = 0; - else - pixel = factor / disp; - }); - output = depthOut.clone(); - } - cv_bridge::CvImage(header, encoding, output).toImageMsg(outImageMsg); - outImageMsgs.push_back(outImageMsg); - return; +void ImageConverter::convertDispToDepth() { + _convertDispToDepth = true; } -void ImageConverter::toRosMsg(std::shared_ptr inData, std::deque& outImageMsgs) { +void ImageConverter::addExposureOffset(dai::CameraExposureOffset& offset) { + _expOffset = offset; + _addExpOffset = true; +} + +ImageMsgs::Image ImageConverter::toRosMsgRawPtr(std::shared_ptr inData, const sensor_msgs::CameraInfo& info) { if(_updateRosBaseTimeOnToRosMsg) { updateRosBaseTime(); } std::chrono::_V2::steady_clock::time_point tstamp; if(_getBaseDeviceTimestamp) - tstamp = inData->getTimestampDevice(); + if(_addExpOffset) + tstamp = inData->getTimestampDevice(_expOffset); + else + tstamp = inData->getTimestampDevice(); + else if(_addExpOffset) + tstamp = inData->getTimestamp(_expOffset); else tstamp = inData->getTimestamp(); ImageMsgs::Image outImageMsg; @@ -113,6 +72,54 @@ void ImageConverter::toRosMsg(std::shared_ptr inData, std::deque< header.stamp = getFrameTime(_rosBaseTime, _steadyBaseTime, tstamp); + if(_fromBitstream) { + std::string encoding; + int decodeFlags; + int channels; + cv::Mat output; + switch(_srcType) { + case dai::RawImgFrame::Type::BGR888i: { + encoding = sensor_msgs::image_encodings::BGR8; + decodeFlags = cv::IMREAD_COLOR; + channels = CV_8UC3; + break; + } + case dai::RawImgFrame::Type::GRAY8: { + encoding = sensor_msgs::image_encodings::MONO8; + decodeFlags = cv::IMREAD_GRAYSCALE; + channels = CV_8UC1; + break; + } + case dai::RawImgFrame::Type::RAW8: { + encoding = sensor_msgs::image_encodings::TYPE_16UC1; + decodeFlags = cv::IMREAD_ANYDEPTH; + channels = CV_16UC1; + break; + } + default: { + std::cout << _frameName << static_cast(_srcType) << std::endl; + throw(std::runtime_error("Converted type not supported!")); + } + } + + output = cv::imdecode(cv::Mat(inData->getData()), decodeFlags); + + // converting disparity + if(_convertDispToDepth) { + auto factor = std::abs(info.P[3]) * 10000; + cv::Mat depthOut = cv::Mat(cv::Size(output.cols, output.rows), CV_16UC1); + depthOut.forEach([&output, &factor](uint16_t& pixel, const int* position) -> void { + auto disp = output.at(position); + if(disp == 0) + pixel = 0; + else + pixel = factor / disp; + }); + output = depthOut.clone(); + } + cv_bridge::CvImage(header, encoding, output).toImageMsg(outImageMsg); + return outImageMsg; + } if(planarEncodingEnumMap.find(inData->getType()) != planarEncodingEnumMap.end()) { cv::Mat mat, output; cv::Size size = {0, 0}; @@ -130,7 +137,7 @@ void ImageConverter::toRosMsg(std::shared_ptr inData, std::deque< break; default: - std::runtime_error("Invalid dataType inputs.."); + throw std::runtime_error("Invalid dataType inputs.."); break; } mat = cv::Mat(size, type, inData->getData().data()); @@ -138,22 +145,20 @@ void ImageConverter::toRosMsg(std::shared_ptr inData, std::deque< switch(inData->getType()) { case dai::RawImgFrame::Type::RGB888p: { cv::Size s(inData->getWidth(), inData->getHeight()); - std::vector channels; - // RGB - channels.push_back(cv::Mat(s, CV_8UC1, inData->getData().data() + s.area() * 2)); - channels.push_back(cv::Mat(s, CV_8UC1, inData->getData().data() + s.area() * 1)); - channels.push_back(cv::Mat(s, CV_8UC1, inData->getData().data() + s.area() * 0)); - cv::merge(channels, output); + cv::Mat m1 = cv::Mat(s, CV_8UC1, inData->getData().data() + s.area() * 2); + cv::Mat m2 = cv::Mat(s, CV_8UC1, inData->getData().data() + s.area() * 1); + cv::Mat m3 = cv::Mat(s, CV_8UC1, inData->getData().data() + s.area() * 0); + cv::Mat channels[3] = {m1, m2, m3}; + cv::merge(channels, 3, output); } break; case dai::RawImgFrame::Type::BGR888p: { cv::Size s(inData->getWidth(), inData->getHeight()); - std::vector channels; - // BGR - channels.push_back(cv::Mat(s, CV_8UC1, inData->getData().data() + s.area() * 0)); - channels.push_back(cv::Mat(s, CV_8UC1, inData->getData().data() + s.area() * 1)); - channels.push_back(cv::Mat(s, CV_8UC1, inData->getData().data() + s.area() * 2)); - cv::merge(channels, output); + cv::Mat m1 = cv::Mat(s, CV_8UC1, inData->getData().data() + s.area() * 0); + cv::Mat m2 = cv::Mat(s, CV_8UC1, inData->getData().data() + s.area() * 1); + cv::Mat m3 = cv::Mat(s, CV_8UC1, inData->getData().data() + s.area() * 2); + cv::Mat channels[3] = {m1, m2, m3}; + cv::merge(channels, 3, output); } break; case dai::RawImgFrame::Type::YUV420p: @@ -184,18 +189,24 @@ void ImageConverter::toRosMsg(std::shared_ptr inData, std::deque< outImageMsg.is_bigendian = true; size_t size = inData->getData().size(); - outImageMsg.data.resize(size); - unsigned char* imageMsgDataPtr = reinterpret_cast(&outImageMsg.data[0]); - unsigned char* daiImgData = reinterpret_cast(inData->getData().data()); - - // TODO(Sachin): Try using assign since it is a vector - // img->data.assign(packet.data->cbegin(), packet.data->cend()); - memcpy(imageMsgDataPtr, daiImgData, size); + outImageMsg.data.reserve(size); + outImageMsg.data = std::move(inData->getData()); } + return outImageMsg; +} + +void ImageConverter::toRosMsg(std::shared_ptr inData, std::deque& outImageMsgs) { + auto outImageMsg = toRosMsgRawPtr(inData); outImageMsgs.push_back(outImageMsg); return; } +ImagePtr ImageConverter::toRosMsgPtr(std::shared_ptr inData) { + auto msg = toRosMsgRawPtr(inData); + ImagePtr ptr = boost::make_shared(msg); + return ptr; +} + // TODO(sachin): Not tested void ImageConverter::toDaiMsg(const ImageMsgs::Image& inMsg, dai::ImgFrame& outData) { std::unordered_map::iterator revEncodingIter; @@ -204,7 +215,7 @@ void ImageConverter::toDaiMsg(const ImageMsgs::Image& inMsg, dai::ImgFrame& outD return pair.second == inMsg.encoding; }); if(revEncodingIter == encodingEnumMap.end()) - std::runtime_error( + throw std::runtime_error( "Unable to find DAI encoding for the corresponding " "sensor_msgs::image.encoding stream"); @@ -243,14 +254,6 @@ void ImageConverter::toDaiMsg(const ImageMsgs::Image& inMsg, dai::ImgFrame& outD outData.setType(revEncodingIter->first); } -ImagePtr ImageConverter::toRosMsgPtr(std::shared_ptr inData) { - std::deque msgQueue; - toRosMsg(inData, msgQueue); - auto msg = msgQueue.front(); - ImagePtr ptr = boost::make_shared(msg); - return ptr; -} - void ImageConverter::planarToInterleaved(const std::vector& srcData, std::vector& destData, int w, int h, int numPlanes, int bpp) { if(numPlanes == 3) { // optimization (cache) @@ -267,7 +270,7 @@ void ImageConverter::planarToInterleaved(const std::vector& srcData, st destData[i * 3 + 2] = r; } } else { - std::runtime_error( + throw std::runtime_error( "If you encounter the scenario where you need this " "please create an issue on github"); } @@ -287,7 +290,7 @@ void ImageConverter::interleavedToPlanar(const std::vector& srcData, st destData[i + w * h * 2] = r; } } else { - std::runtime_error( + throw std::runtime_error( "If you encounter the scenario where you need this " "please create an issue on github"); } @@ -301,7 +304,7 @@ cv::Mat ImageConverter::rosMsgtoCvMat(ImageMsgs::Image& inMsg) { cv::cvtColor(nv_frame, rgb, cv::COLOR_YUV2BGR_NV12); return rgb; } else { - std::runtime_error("This frature is still WIP"); + throw std::runtime_error("This frature is still WIP"); return rgb; } } diff --git a/depthai_bridge/src/TFPublisher.cpp b/depthai_bridge/src/TFPublisher.cpp new file mode 100644 index 00000000..6c2c9d96 --- /dev/null +++ b/depthai_bridge/src/TFPublisher.cpp @@ -0,0 +1,270 @@ +#include "depthai_bridge/TFPublisher.hpp" + +#include +#include +#include +#include + +#include +#include +#include + +#include "geometry_msgs/Quaternion.h" +#include "geometry_msgs/TransformStamped.h" +#include "kdl/tree.hpp" +#include "kdl_parser/kdl_parser.hpp" +#include "nlohmann/json.hpp" +#include "ros/node_handle.h" +#include "ros/package.h" +#include "tf2/LinearMath/Matrix3x3.h" +#include "tf2/LinearMath/Quaternion.h" +#include "tf2_geometry_msgs/tf2_geometry_msgs.h" +#include "urdf/model.h" + +namespace dai { +namespace ros { +TFPublisher::TFPublisher(::ros::NodeHandle node, + const dai::CalibrationHandler& calHandler, + const std::vector& camFeatures, + const std::string& camName, + const std::string& camModel, + const std::string& baseFrame, + const std::string& parentFrame, + const std::string& camPosX, + const std::string& camPosY, + const std::string& camPosZ, + const std::string& camRoll, + const std::string& camPitch, + const std::string& camYaw, + const std::string& imuFromDescr, + const std::string& customURDFLocation, + const std::string& customXacroArgs) + : _camName(camName), + _camModel(camModel), + _baseFrame(baseFrame), + _parentFrame(parentFrame), + _camPosX(camPosX), + _camPosY(camPosY), + _camPosZ(camPosZ), + _camRoll(camRoll), + _camPitch(camPitch), + _camYaw(camYaw), + _camFeatures(camFeatures), + _imuFromDescr(imuFromDescr), + _customURDFLocation(customURDFLocation), + _customXacroArgs(customXacroArgs) { + _tfPub = std::make_shared(); + + auto json = calHandler.eepromToJson(); + auto camData = json["cameraData"]; + publishDescription(node); + publishCamTransforms(camData, node); + if(_imuFromDescr != "true") { + publishImuTransform(json, node); + } +} + +void TFPublisher::publishDescription(::ros::NodeHandle node) { + auto urdf = getURDF(); + urdf::Model model; + model.initString(urdf); + KDL::Tree tree; + if(!kdl_parser::treeFromUrdfModel(model, tree)) { + ROS_ERROR("Failed to extract kdl tree from xml robot description"); + throw std::runtime_error("Failed to extract kdl tree from xml robot description"); + } + _rsp = std::make_shared(tree, model); + _rsp->publishFixedTransforms(true); + node.setParam("robot_description", urdf); + ROS_INFO("Published URDF"); +} + +void TFPublisher::publishCamTransforms(nlohmann::json camData, ::ros::NodeHandle node) { + for(auto& cam : camData) { + geometry_msgs::TransformStamped ts; + geometry_msgs::TransformStamped opticalTS; + ts.header.stamp = ::ros::Time::now(); + opticalTS.header.stamp = ts.header.stamp; + auto extrinsics = cam[1]["extrinsics"]; + + ts.transform.rotation = quatFromRotM(extrinsics["rotationMatrix"]); + ts.transform.translation = transFromExtr(extrinsics["translation"]); + + std::string name = getCamSocketName(cam[0]); + ts.child_frame_id = _baseFrame + std::string("_") + name + std::string("_camera_frame"); + // check if the camera is at the end of the chain + if(extrinsics["toCameraSocket"] != -1) { + ts.header.frame_id = _baseFrame + std::string("_") + getCamSocketName(extrinsics["toCameraSocket"].get()) + std::string("_camera_frame"); + } else { + ts.header.frame_id = _baseFrame; + ts.transform.rotation.w = 1.0; + ts.transform.rotation.x = 0.0; + ts.transform.rotation.y = 0.0; + ts.transform.rotation.z = 0.0; + } + // rotate optical fransform + opticalTS.child_frame_id = _baseFrame + std::string("_") + name + std::string("_camera_optical_frame"); + opticalTS.header.frame_id = ts.child_frame_id; + opticalTS.transform.rotation.w = 0.5; + opticalTS.transform.rotation.x = -0.5; + opticalTS.transform.rotation.y = 0.5; + opticalTS.transform.rotation.z = -0.5; + _tfPub->sendTransform(ts); + _tfPub->sendTransform(opticalTS); + } +} +void TFPublisher::publishImuTransform(nlohmann::json json, ::ros::NodeHandle node) { + geometry_msgs::TransformStamped ts; + ts.header.stamp = ::ros::Time::now(); + auto imuExtr = json["imuExtrinsics"]; + if(imuExtr["toCameraSocket"] != -1) { + ts.header.frame_id = _baseFrame + std::string("_") + getCamSocketName(imuExtr["toCameraSocket"].get()) + std::string("_camera_frame"); + } else { + ts.header.frame_id = _baseFrame; + } + ts.child_frame_id = _baseFrame + std::string("_imu_frame"); + + ts.transform.rotation = quatFromRotM(imuExtr["rotationMatrix"]); + ts.transform.translation = transFromExtr(imuExtr["translation"]); + bool zeroTrans = ts.transform.translation.x == 0 && ts.transform.translation.y == 0 && ts.transform.translation.z == 0; + bool zeroRot = ts.transform.rotation.w == 1 && ts.transform.rotation.x == 0 && ts.transform.rotation.y == 0 && ts.transform.rotation.z == 0; + if(zeroTrans || zeroRot) { + ROS_WARN("IMU extrinsics appear to be default. Check if the IMU is calibrated."); + ts.transform.rotation.w = 1.0; + ts.transform.rotation.x = 0.0; + } + _tfPub->sendTransform(ts); +} + +std::string TFPublisher::getCamSocketName(int socketNum) { + std::string name; + for(auto& cam : _camFeatures) { + if(cam.socket == static_cast(socketNum)) { + if(cam.name == "color" || cam.name == "center") { + name = "rgb"; + } else { + name = cam.name; + } + return name; + } + } + throw std::runtime_error("Camera socket not found"); +} + +geometry_msgs::Vector3 TFPublisher::transFromExtr(nlohmann::json translation) { + geometry_msgs::Vector3 trans; + // optical coordinates to ROS + trans.x = translation["y"].get() / -100.0; + trans.y = translation["x"].get() / -100.0; + trans.z = translation["z"].get() / 100.0; + return trans; +} +geometry_msgs::Quaternion TFPublisher::quatFromRotM(nlohmann::json rotMatrix) { + tf2::Matrix3x3 m(rotMatrix[0][0], + rotMatrix[0][1], + rotMatrix[0][2], + + rotMatrix[1][0], + rotMatrix[1][1], + rotMatrix[1][2], + + rotMatrix[2][0], + rotMatrix[2][1], + rotMatrix[2][2]); + + tf2::Quaternion q; + m.getRotation(q); + geometry_msgs::Quaternion msg_quat = tf2::toMsg(q); + return msg_quat; +} + +bool TFPublisher::modelNameAvailable() { + std::string path = ::ros::package::getPath("depthai_descriptions") + "/urdf/models/"; + DIR* dir; + struct dirent* ent; + convertModelName(); + if((dir = opendir(path.c_str())) != NULL) { + while((ent = readdir(dir)) != NULL) { + auto name = std::string(ent->d_name); + ROS_DEBUG("Found model: %s", name.c_str()); + if(name == _camModel + ".stl") { + return true; + } + } + closedir(dir); + } else { + throw std::runtime_error("Could not open depthai_descriptions package directory"); + } + return false; +} + +std::string TFPublisher::prepareXacroArgs() { + if(!_customURDFLocation.empty() || !modelNameAvailable()) { + ROS_ERROR( + "Model name %s not found in depthai_descriptions package. If camera model is autodetected, please notify developers. Using default model: OAK-D", + _camModel.c_str()); + _camModel = "OAK-D"; + } + + std::string xacroArgs = "camera_name:=" + _camName; + xacroArgs += " camera_model:=" + _camModel; + xacroArgs += " base_frame:=" + _baseFrame; + xacroArgs += " parent_frame:=" + _parentFrame; + xacroArgs += " cam_pos_x:=" + _camPosX; + xacroArgs += " cam_pos_y:=" + _camPosY; + xacroArgs += " cam_pos_z:=" + _camPosZ; + xacroArgs += " cam_roll:=" + _camRoll; + xacroArgs += " cam_pitch:=" + _camPitch; + xacroArgs += " cam_yaw:=" + _camYaw; + xacroArgs += " has_imu:=" + _imuFromDescr; + return xacroArgs; +} + +void TFPublisher::convertModelName() { + if(_camModel.find("OAK-D-PRO-POE") != std::string::npos || _camModel.find("OAK-D-PRO-W-POE") != std::string::npos + || _camModel.find("OAK-D-S2-POE") != std::string::npos) { + _camModel = "OAK-D-POE"; + } else if(_camModel.find("OAK-D-LITE") != std::string::npos) { + _camModel = "OAK-D-PRO"; + } else if(_camModel.find("OAK-D-S2") != std::string::npos) { + _camModel = "OAK-D-PRO"; + } else if(_camModel.find("OAK-D-PRO-W") != std::string::npos) { + _camModel = "OAK-D-PRO"; + } else if(_camModel.find("OAK-D-PRO") != std::string::npos) { + _camModel = "OAK-D-PRO"; + } else if(_camModel.find("OAK-D-POE")) { + _camModel = "OAK-D-POE"; + } else if(_camModel.find("OAK-D") != std::string::npos) { + _camModel = "OAK-D"; + } else { + ROS_WARN("Unable to match model name: %s to available model family.", _camModel.c_str()); + } +} + +std::string TFPublisher::getURDF() { + std::string args, path; + if(_customXacroArgs.empty()) { + args = prepareXacroArgs(); + } else { + args = _customXacroArgs; + } + if(_customURDFLocation.empty()) { + path = ::ros::package::getPath("depthai_descriptions") + "/urdf/base_descr.urdf.xacro "; + } else { + path = _customURDFLocation + " "; + } + std::string cmd = "xacro " + path + args; + ROS_DEBUG("Xacro command: %s", cmd.c_str()); + std::array buffer; + std::string result; + std::unique_ptr pipe(popen(cmd.c_str(), "r"), pclose); + if(!pipe) { + throw std::runtime_error("popen() failed!"); + } + while(fgets(buffer.data(), buffer.size(), pipe.get()) != nullptr) { + result += buffer.data(); + } + return result; +} +} // namespace ros +} // namespace dai \ No newline at end of file diff --git a/depthai_bridge/src/TrackedFeaturesConverter.cpp b/depthai_bridge/src/TrackedFeaturesConverter.cpp new file mode 100644 index 00000000..75f7ef38 --- /dev/null +++ b/depthai_bridge/src/TrackedFeaturesConverter.cpp @@ -0,0 +1,51 @@ +#include "depthai_bridge/TrackedFeaturesConverter.hpp" + +#include "depthai_bridge/depthaiUtility.hpp" + +namespace dai { + +namespace ros { + +TrackedFeaturesConverter::TrackedFeaturesConverter(std::string frameName, bool getBaseDeviceTimestamp) + : _frameName(frameName), _steadyBaseTime(std::chrono::steady_clock::now()), _getBaseDeviceTimestamp(getBaseDeviceTimestamp) { + _rosBaseTime = ::ros::Time::now(); +} + +TrackedFeaturesConverter::~TrackedFeaturesConverter() = default; + +void TrackedFeaturesConverter::updateRosBaseTime() { + updateBaseTime(_steadyBaseTime, _rosBaseTime, _totalNsChange); +} + +void TrackedFeaturesConverter::toRosMsg(std::shared_ptr inFeatures, std::deque& featureMsgs) { + if(_updateRosBaseTimeOnToRosMsg) { + updateRosBaseTime(); + } + std::chrono::_V2::steady_clock::time_point tstamp; + if(_getBaseDeviceTimestamp) + tstamp = inFeatures->getTimestampDevice(); + else + tstamp = inFeatures->getTimestamp(); + + depthai_ros_msgs::TrackedFeatures msg; + + msg.header.stamp = getFrameTime(_rosBaseTime, _steadyBaseTime, tstamp); + msg.header.frame_id = _frameName; + msg.features.resize(inFeatures->trackedFeatures.size()); + + for(const auto& feature : inFeatures->trackedFeatures) { + depthai_ros_msgs::TrackedFeature ft; + ft.header = msg.header; + ft.position.x = feature.position.x; + ft.position.y = feature.position.y; + ft.age = feature.age; + ft.id = feature.id; + ft.harris_score = feature.harrisScore; + ft.tracking_error = feature.trackingError; + msg.features.emplace_back(ft); + } + featureMsgs.push_back(msg); +} + +} // namespace ros +} // namespace dai \ No newline at end of file diff --git a/depthai_descriptions/launch/urdf_launch.py b/depthai_descriptions/launch/urdf_launch.py deleted file mode 100644 index 0e47fc66..00000000 --- a/depthai_descriptions/launch/urdf_launch.py +++ /dev/null @@ -1,121 +0,0 @@ -from launch import LaunchDescription -from launch.actions import DeclareLaunchArgument -from launch_ros.actions import Node -from launch.substitutions import LaunchConfiguration, Command -from ament_index_python.packages import get_package_share_directory -import os - -from launch import LaunchDescription -from launch.actions import DeclareLaunchArgument -from launch_ros.actions import Node -from launch.substitutions import LaunchConfiguration, Command -from ament_index_python.packages import get_package_share_directory -import os - -def generate_launch_description(): - bringup_dir = get_package_share_directory('depthai_bridge') - xacro_path = os.path.join(bringup_dir, 'urdf', 'depthai_descr.urdf.xacro') - - camera_model = LaunchConfiguration('camera_model', default = 'OAK-D') - tf_prefix = LaunchConfiguration('tf_prefix', default = 'oak') - base_frame = LaunchConfiguration('base_frame', default = 'oak-d_frame') - parent_frame = LaunchConfiguration('parent_frame', default = 'oak-d-base-frame') - cam_pos_x = LaunchConfiguration('cam_pos_x', default = '0.0') - cam_pos_y = LaunchConfiguration('cam_pos_y', default = '0.0') - cam_pos_z = LaunchConfiguration('cam_pos_z', default = '0.0') - cam_roll = LaunchConfiguration('cam_roll', default = '1.5708') - cam_pitch = LaunchConfiguration('cam_pitch', default = '0.0') - cam_yaw = LaunchConfiguration('cam_yaw', default = '1.5708') - namespace = LaunchConfiguration('namespace', default = '') - - declare_namespace_cmd = DeclareLaunchArgument( - 'namespace', - default_value=namespace, - description='Specifies the namespace of the robot state publisher node. Default value will be ""') - - declare_camera_model_cmd = DeclareLaunchArgument( - 'camera_model', - default_value=camera_model, - description='The model of the camera. Using a wrong camera model can disable camera features. Valid models: `OAK-D, OAK-D-LITE`.') - - declare_tf_prefix_cmd = DeclareLaunchArgument( - 'tf_prefix', - default_value=tf_prefix, - description='The name of the camera. It can be different from the camera model and it will be used as node `namespace`.') - - declare_base_frame_cmd = DeclareLaunchArgument( - 'base_frame', - default_value=base_frame, - description='Name of the base link.') - - declare_parent_frame_cmd = DeclareLaunchArgument( - 'parent_frame', - default_value=parent_frame, - description='Name of the parent link from other a robot TF for example that can be connected to the base of the OAK.') - - declare_pos_x_cmd = DeclareLaunchArgument( - 'cam_pos_x', - default_value=cam_pos_x, - description='Position X of the camera with respect to the base frame.') - - declare_pos_y_cmd = DeclareLaunchArgument( - 'cam_pos_y', - default_value=cam_pos_y, - description='Position Y of the camera with respect to the base frame.') - - declare_pos_z_cmd = DeclareLaunchArgument( - 'cam_pos_z', - default_value=cam_pos_z, - description='Position Z of the camera with respect to the base frame.') - - declare_roll_cmd = DeclareLaunchArgument( - 'cam_roll', - default_value=cam_roll, - description='Roll orientation of the camera with respect to the base frame.') - - declare_pitch_cmd = DeclareLaunchArgument( - 'cam_pitch', - default_value=cam_pitch, - description='Pitch orientation of the camera with respect to the base frame.') - - declare_yaw_cmd = DeclareLaunchArgument( - 'cam_yaw', - default_value=cam_yaw, - description='Yaw orientation of the camera with respect to the base frame.') - - rsp_node = Node( - package='robot_state_publisher', - executable='robot_state_publisher', - name='oak_state_publisher', - namespace=namespace, - parameters=[{'robot_description': Command( - [ - 'xacro', ' ', xacro_path, ' ', - 'camera_name:=', tf_prefix, ' ', - 'camera_model:=', camera_model, ' ', - 'base_frame:=', base_frame, ' ', - 'parent_frame:=', parent_frame, ' ', - 'cam_pos_x:=', cam_pos_x, ' ', - 'cam_pos_y:=', cam_pos_y, ' ', - 'cam_pos_z:=', cam_pos_z, ' ', - 'cam_roll:=', cam_roll, ' ', - 'cam_pitch:=', cam_pitch, ' ', - 'cam_yaw:=', cam_yaw - ])}] - ) - - ld = LaunchDescription() - ld.add_action(declare_tf_prefix_cmd) - ld.add_action(declare_camera_model_cmd) - ld.add_action(declare_base_frame_cmd) - ld.add_action(declare_parent_frame_cmd) - ld.add_action(declare_pos_x_cmd) - ld.add_action(declare_pos_y_cmd) - ld.add_action(declare_pos_z_cmd) - ld.add_action(declare_roll_cmd) - ld.add_action(declare_pitch_cmd) - ld.add_action(declare_yaw_cmd) - ld.add_action(declare_namespace_cmd) - - ld.add_action(rsp_node) - return ld \ No newline at end of file diff --git a/depthai_descriptions/urdf/base_descr.urdf.xacro b/depthai_descriptions/urdf/base_descr.urdf.xacro new file mode 100644 index 00000000..e5d04601 --- /dev/null +++ b/depthai_descriptions/urdf/base_descr.urdf.xacro @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/depthai_descriptions/urdf/include/base_macro.urdf.xacro b/depthai_descriptions/urdf/include/base_macro.urdf.xacro new file mode 100644 index 00000000..e46e3bca --- /dev/null +++ b/depthai_descriptions/urdf/include/base_macro.urdf.xacro @@ -0,0 +1,78 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/depthai_descriptions/urdf/include/depthai_macro.urdf.xacro b/depthai_descriptions/urdf/include/depthai_macro.urdf.xacro index fc1c9e6f..d16b7f43 100644 --- a/depthai_descriptions/urdf/include/depthai_macro.urdf.xacro +++ b/depthai_descriptions/urdf/include/depthai_macro.urdf.xacro @@ -1,120 +1,76 @@ - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + cam_roll cam_pitch cam_yaw r:=0.8 g:=0.8 b:=0.8 a:=0.8 "> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + \ No newline at end of file diff --git a/depthai_examples/CMakeLists.txt b/depthai_examples/CMakeLists.txt index 61562fa1..fb180647 100644 --- a/depthai_examples/CMakeLists.txt +++ b/depthai_examples/CMakeLists.txt @@ -108,9 +108,10 @@ dai_add_node(mobilenet_node src/mobilenet_publisher.cpp) dai_add_node(rgb_stereo_node src/rgb_stereo_node.cpp) dai_add_node(rgb_subscriber_node src/rgb_video_subscriber.cpp) dai_add_node(stereo_inertial_node src/stereo_inertial_publisher.cpp) - dai_add_node(stereo_node src/stereo_publisher.cpp) dai_add_node(yolov4_spatial_node src/yolov4_spatial_publisher.cpp) +dai_add_node(feature_tracker src/feature_tracker_publisher.cpp) + target_compile_definitions(mobilenet_node PRIVATE BLOB_NAME="${mobilenet_blob_name}") target_compile_definitions(yolov4_spatial_node PRIVATE BLOB_NAME="${tiny_yolo_v4_blob_name}") @@ -131,6 +132,7 @@ install(TARGETS stereo_inertial_node stereo_node yolov4_spatial_node + feature_tracker LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} ) diff --git a/depthai_examples/launch/rgb_publisher.launch b/depthai_examples/launch/rgb_publisher.launch deleted file mode 100644 index 5c6499f8..00000000 --- a/depthai_examples/launch/rgb_publisher.launch +++ /dev/null @@ -1,43 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -rgb_subscriber_node - diff --git a/depthai_examples/src/feature_tracker_publisher.cpp b/depthai_examples/src/feature_tracker_publisher.cpp new file mode 100644 index 00000000..5d3fbd5b --- /dev/null +++ b/depthai_examples/src/feature_tracker_publisher.cpp @@ -0,0 +1,100 @@ +#include +#include +#include +#include + +#include "depthai_ros_msgs/TrackedFeatures.h" +#include "ros/ros.h" + +// Inludes common necessary includes for development using depthai library +#include "depthai/device/DataQueue.hpp" +#include "depthai/device/Device.hpp" +#include "depthai/pipeline/Pipeline.hpp" +#include "depthai/pipeline/node/ColorCamera.hpp" +#include "depthai/pipeline/node/FeatureTracker.hpp" +#include "depthai/pipeline/node/MonoCamera.hpp" +#include "depthai/pipeline/node/StereoDepth.hpp" +#include "depthai/pipeline/node/XLinkIn.hpp" +#include "depthai/pipeline/node/XLinkOut.hpp" +#include "depthai_bridge/BridgePublisher.hpp" +#include "depthai_bridge/ImageConverter.hpp" +#include "depthai_bridge/TrackedFeaturesConverter.hpp" + +int main(int argc, char** argv) { + ros::init(argc, argv, "feature_tracker_node"); + ros::NodeHandle pnh("~"); + + dai::Pipeline pipeline; + + // Define sources and outputs + auto monoLeft = pipeline.create(); + auto monoRight = pipeline.create(); + auto featureTrackerLeft = pipeline.create(); + auto featureTrackerRight = pipeline.create(); + + auto xoutTrackedFeaturesLeft = pipeline.create(); + auto xoutTrackedFeaturesRight = pipeline.create(); + + xoutTrackedFeaturesLeft->setStreamName("trackedFeaturesLeft"); + xoutTrackedFeaturesRight->setStreamName("trackedFeaturesRight"); + + // Properties + monoLeft->setResolution(dai::MonoCameraProperties::SensorResolution::THE_720_P); + monoLeft->setCamera("left"); + monoRight->setResolution(dai::MonoCameraProperties::SensorResolution::THE_720_P); + monoRight->setCamera("right"); + + // Linking + monoLeft->out.link(featureTrackerLeft->inputImage); + featureTrackerLeft->outputFeatures.link(xoutTrackedFeaturesLeft->input); + + monoRight->out.link(featureTrackerRight->inputImage); + featureTrackerRight->outputFeatures.link(xoutTrackedFeaturesRight->input); + + // By default the least mount of resources are allocated + // increasing it improves performance when optical flow is enabled + auto numShaves = 2; + auto numMemorySlices = 2; + featureTrackerLeft->setHardwareResources(numShaves, numMemorySlices); + featureTrackerRight->setHardwareResources(numShaves, numMemorySlices); + + auto featureTrackerConfig = featureTrackerRight->initialConfig.get(); + + std::shared_ptr device; + std::vector availableDevices = dai::Device::getAllAvailableDevices(); + + std::cout << "Listing available devices..." << std::endl; + device = std::make_shared(pipeline); + auto outputFeaturesLeftQueue = device->getOutputQueue("trackedFeaturesLeft", 8, false); + auto outputFeaturesRightQueue = device->getOutputQueue("trackedFeaturesRight", 8, false); + std::string tfPrefix = "oak"; + dai::rosBridge::TrackedFeaturesConverter leftConverter(tfPrefix + "_left_camera_optical_frame", true); + + dai::rosBridge::TrackedFeaturesConverter rightConverter(tfPrefix + "_right_camera_optical_frame", true); + + dai::rosBridge::BridgePublisher featuresPubL( + outputFeaturesLeftQueue, + pnh, + std::string("features_left"), + std::bind(&dai::rosBridge::TrackedFeaturesConverter::toRosMsg, &leftConverter, std::placeholders::_1, std::placeholders::_2), + 30, + "", + "features_left"); + + featuresPubL.addPublisherCallback(); + + dai::rosBridge::BridgePublisher featuresPubR( + outputFeaturesRightQueue, + pnh, + std::string("features_right"), + std::bind(&dai::rosBridge::TrackedFeaturesConverter::toRosMsg, &rightConverter, std::placeholders::_1, std::placeholders::_2), + 30, + "", + "features_right"); + + featuresPubR.addPublisherCallback(); + std::cout << "Ready." << std::endl; + ros::spin(); + + return 0; +} diff --git a/depthai_filters/CMakeLists.txt b/depthai_filters/CMakeLists.txt index c5a27a9c..52309bf4 100644 --- a/depthai_filters/CMakeLists.txt +++ b/depthai_filters/CMakeLists.txt @@ -22,7 +22,7 @@ endif() ## Find catkin macros and libraries ## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz) ## is used, also find other catkin packages -find_package(catkin REQUIRED COMPONENTS roscpp dynamic_reconfigure message_filters sensor_msgs vision_msgs visualization_msgs cv_bridge image_transport nodelet) +find_package(catkin REQUIRED COMPONENTS roscpp dynamic_reconfigure message_filters sensor_msgs vision_msgs visualization_msgs cv_bridge image_transport depthai_ros_msgs nodelet) ## Generate dynamic reconfigure parameters in the 'cfg' folder @@ -56,6 +56,8 @@ add_library(${PROJECT_NAME} SHARED src/segmentation_overlay.cpp src/wls_filter.cpp src/spatial_bb.cpp + src/feature_tracker_overlay.cpp + src/features_3d.cpp src/utils.cpp ) diff --git a/depthai_filters/config/feature_tracker.yaml b/depthai_filters/config/feature_tracker.yaml new file mode 100644 index 00000000..cedb4fb8 --- /dev/null +++ b/depthai_filters/config/feature_tracker.yaml @@ -0,0 +1,3 @@ +/oak: + camera_i_nn_type: none + rgb_i_enable_feature_tracker: true \ No newline at end of file diff --git a/depthai_filters/include/depthai_filters/feature_tracker_overlay.hpp b/depthai_filters/include/depthai_filters/feature_tracker_overlay.hpp new file mode 100644 index 00000000..38f22697 --- /dev/null +++ b/depthai_filters/include/depthai_filters/feature_tracker_overlay.hpp @@ -0,0 +1,49 @@ +#pragma once + +#include +#include + +#include "cv_bridge/cv_bridge.h" +#include "depthai_ros_msgs/TrackedFeatures.h" +#include "geometry_msgs/Point.h" +#include "message_filters/subscriber.h" +#include "message_filters/sync_policies/approximate_time.h" +#include "message_filters/synchronizer.h" +#include "nodelet/nodelet.h" +#include "ros/ros.h" +#include "sensor_msgs/Image.h" + +namespace depthai_filters { + +class FeatureTrackerOverlay : public nodelet::Nodelet { + public: + void onInit() override; + + void overlayCB(const sensor_msgs::ImageConstPtr& img, const depthai_ros_msgs::TrackedFeaturesConstPtr& detections); + + message_filters::Subscriber imgSub; + message_filters::Subscriber featureSub; + + typedef message_filters::sync_policies::ApproximateTime syncPolicy; + std::unique_ptr> sync; + ros::Publisher overlayPub; + + using featureIdType = decltype(geometry_msgs::Point::x); + + private: + void trackFeaturePath(std::vector& features); + + void drawFeatures(cv::Mat& img); + + int circleRadius = 2; + int maxTrackedFeaturesPathLength = 30; + + cv::Scalar lineColor = cv::Scalar(200, 0, 200); + cv::Scalar pointColor = cv::Scalar(0, 0, 255); + + int trackedFeaturesPathLength = 10; + std::unordered_set trackedIDs; + std::unordered_map> trackedFeaturesPath; +}; + +} // namespace depthai_filters \ No newline at end of file diff --git a/depthai_filters/include/depthai_filters/features_3d.hpp b/depthai_filters/include/depthai_filters/features_3d.hpp new file mode 100644 index 00000000..a45298f8 --- /dev/null +++ b/depthai_filters/include/depthai_filters/features_3d.hpp @@ -0,0 +1,35 @@ +#pragma once + +#include "depthai_ros_msgs/TrackedFeatures.h" +#include "message_filters/subscriber.h" +#include "message_filters/sync_policies/approximate_time.h" +#include "message_filters/synchronizer.h" +#include "nodelet/nodelet.h" +#include "ros/ros.h" +#include "sensor_msgs/CameraInfo.h" +#include "sensor_msgs/Image.h" +#include "sensor_msgs/PointCloud2.h" +#include "visualization_msgs/MarkerArray.h" + +namespace depthai_filters { +class Features3D : public nodelet::Nodelet { + public: + void onInit(); + + void overlayCB(const sensor_msgs::ImageConstPtr& depth, + const sensor_msgs::CameraInfoConstPtr& info, + const depthai_ros_msgs::TrackedFeaturesConstPtr& features); + + message_filters::Subscriber depthSub; + message_filters::Subscriber featureSub; + message_filters::Subscriber infoSub; + + typedef message_filters::sync_policies::ApproximateTime syncPolicy; + std::unique_ptr> sync; + ros::Publisher overlayPub; + ros::Publisher pclPub; + float getDepthAt(int x, int y, const sensor_msgs::ImageConstPtr& depth_image); + bool desqueeze = false; +}; + +} // namespace depthai_filters \ No newline at end of file diff --git a/depthai_filters/launch/example_feature_3d.launch b/depthai_filters/launch/example_feature_3d.launch new file mode 100644 index 00000000..763d4449 --- /dev/null +++ b/depthai_filters/launch/example_feature_3d.launch @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/depthai_filters/launch/example_feature_tracker.launch b/depthai_filters/launch/example_feature_tracker.launch new file mode 100644 index 00000000..49e709cc --- /dev/null +++ b/depthai_filters/launch/example_feature_tracker.launch @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/depthai_filters/nodelet_plugins.xml b/depthai_filters/nodelet_plugins.xml index 4146322e..608edda1 100644 --- a/depthai_filters/nodelet_plugins.xml +++ b/depthai_filters/nodelet_plugins.xml @@ -19,4 +19,14 @@ Spatial Bounding Box publisher. + + + 2D Overlay for tracked features + + + + + 3D Features (PCL) publisher + + \ No newline at end of file diff --git a/depthai_filters/package.xml b/depthai_filters/package.xml index d63ae397..0f590b22 100644 --- a/depthai_filters/package.xml +++ b/depthai_filters/package.xml @@ -21,7 +21,7 @@ nodelet libopencv-dev visualization_msgs - + depthai_ros_msgs diff --git a/depthai_filters/src/feature_tracker_overlay.cpp b/depthai_filters/src/feature_tracker_overlay.cpp new file mode 100644 index 00000000..026b01c3 --- /dev/null +++ b/depthai_filters/src/feature_tracker_overlay.cpp @@ -0,0 +1,75 @@ +#include "depthai_filters/feature_tracker_overlay.hpp" + +#include "cv_bridge/cv_bridge.h" +#include "depthai_filters/utils.hpp" + +namespace depthai_filters { + +void FeatureTrackerOverlay::onInit() { + auto pNH = getPrivateNodeHandle(); + imgSub.subscribe(pNH, "/rgb/preview/image_raw", 1); + featureSub.subscribe(pNH, "/feature_tracker/tracked_features", 1); + sync = std::make_unique>(syncPolicy(10), imgSub, featureSub); + sync->registerCallback(std::bind(&FeatureTrackerOverlay::overlayCB, this, std::placeholders::_1, std::placeholders::_2)); + overlayPub = pNH.advertise("overlay", 10); +} + +void FeatureTrackerOverlay::overlayCB(const sensor_msgs::ImageConstPtr& img, const depthai_ros_msgs::TrackedFeaturesConstPtr& features) { + cv::Mat imgMat = utils::msgToMat(img, sensor_msgs::image_encodings::BGR8); + std::vector f = features->features; + trackFeaturePath(f); + drawFeatures(imgMat); + sensor_msgs::Image outMsg; + cv_bridge::CvImage(img->header, sensor_msgs::image_encodings::BGR8, imgMat).toImageMsg(outMsg); + + overlayPub.publish(outMsg); +} + +void FeatureTrackerOverlay::trackFeaturePath(std::vector& features) { + std::unordered_set newTrackedIDs; + for(auto& currentFeature : features) { + auto currentID = currentFeature.id; + newTrackedIDs.insert(currentID); + + if(!trackedFeaturesPath.count(currentID)) { + trackedFeaturesPath.insert({currentID, std::deque()}); + } + std::deque& path = trackedFeaturesPath.at(currentID); + + path.push_back(currentFeature.position); + while(path.size() > std::max(1, trackedFeaturesPathLength)) { + path.pop_front(); + } + } + + std::unordered_set featuresToRemove; + for(auto& oldId : trackedIDs) { + if(!newTrackedIDs.count(oldId)) { + featuresToRemove.insert(oldId); + } + } + + for(auto& id : featuresToRemove) { + trackedFeaturesPath.erase(id); + } + + trackedIDs = newTrackedIDs; +} +void FeatureTrackerOverlay::drawFeatures(cv::Mat& img) { + for(auto& featurePath : trackedFeaturesPath) { + std::deque& path = featurePath.second; + unsigned int j = 0; + for(j = 0; j < path.size() - 1; j++) { + auto src = cv::Point(path[j].x, path[j].y); + auto dst = cv::Point(path[j + 1].x, path[j + 1].y); + cv::line(img, src, dst, lineColor, 1, cv::LINE_AA, 0); + } + + cv::circle(img, cv::Point(path[j].x, path[j].y), circleRadius, pointColor, -1, cv::LINE_AA, 0); + } +} +} // namespace depthai_filters +#include + +// watch the capitalization carefully +PLUGINLIB_EXPORT_CLASS(depthai_filters::FeatureTrackerOverlay, nodelet::Nodelet) \ No newline at end of file diff --git a/depthai_filters/src/features_3d.cpp b/depthai_filters/src/features_3d.cpp new file mode 100644 index 00000000..af06acba --- /dev/null +++ b/depthai_filters/src/features_3d.cpp @@ -0,0 +1,69 @@ +#include "depthai_filters/features_3d.hpp" + +#include "cv_bridge/cv_bridge.h" +#include "depthai_filters/utils.hpp" +#include "geometry_msgs/Point32.h" +#include "opencv2/opencv.hpp" +#include "sensor_msgs/point_cloud2_iterator.h" + +namespace depthai_filters { + +void Features3D::onInit() { + auto pNH = getPrivateNodeHandle(); + depthSub.subscribe(pNH, "/stereo/image_raw", 1); + infoSub.subscribe(pNH, "/stereo/camera_info", 1); + featureSub.subscribe(pNH, "/feature_tracker/tracked_features", 1); + sync = std::make_unique>(syncPolicy(10), depthSub, infoSub, featureSub); + sync->registerCallback(std::bind(&Features3D::overlayCB, this, std::placeholders::_1, std::placeholders::_2, std::placeholders::_3)); + pclPub = pNH.advertise("features", 10); + overlayPub = pNH.advertise("overlay", 10); + pNH.getParam("desqueeze", desqueeze); +} +float Features3D::getDepthAt(int x, int y, const sensor_msgs::ImageConstPtr& depth_image) { + // Assuming depth image is of encoding type 16UC1 (16-bit depth, unsigned) + int row_step = depth_image->step; // bytes per row + int pixel_index = y * row_step + x * 2; // 2 bytes per pixel for 16-bit image + + // Get the 16-bit depth value + uint16_t depth_raw = *(uint16_t*)(&depth_image->data[pixel_index]); + + // Convert to meters or appropriate unit (this depends on your depth sensor configuration) + // Often, raw values are in millimeters, so convert to meters. + float depth_meters = depth_raw * 0.001; + + return depth_meters; +} +void Features3D::overlayCB(const sensor_msgs::ImageConstPtr& depth, + const sensor_msgs::CameraInfoConstPtr& info, + const depthai_ros_msgs::TrackedFeaturesConstPtr& features) { + sensor_msgs::PointCloud2 cloud; + cloud.header.frame_id = info->header.frame_id; // Set this to your camera's frame + cloud.header.stamp = ros::Time::now(); + cloud.height = 1; + cloud.width = features->features.size(); // assuming features is your vector of 2D points + sensor_msgs::PointCloud2Modifier pcd_modifier(cloud); + pcd_modifier.setPointCloud2FieldsByString(1, "xyz"); + sensor_msgs::PointCloud2Iterator out_x(cloud, "x"); + sensor_msgs::PointCloud2Iterator out_y(cloud, "y"); + sensor_msgs::PointCloud2Iterator out_z(cloud, "z"); + double fx = info->K[0]; + double fy = info->K[4]; + double cx = info->K[2]; + double cy = info->K[5]; + for(const auto& feature : features->features) { + float depthVal = getDepthAt(feature.position.x, feature.position.y, depth); // Define this function based on your depth image structure + *out_x = (feature.position.x - cx) * depthVal / fx; + *out_y = (feature.position.y - cy) * depthVal / fy; + *out_z = depthVal; + ++out_x; + ++out_y; + ++out_z; + } + pclPub.publish(cloud); +} + +} // namespace depthai_filters +#include + +// watch the capitalization carefully +PLUGINLIB_EXPORT_CLASS(depthai_filters::Features3D, nodelet::Nodelet) \ No newline at end of file diff --git a/depthai_ros_driver/CMakeLists.txt b/depthai_ros_driver/CMakeLists.txt index 75470100..4737c50c 100644 --- a/depthai_ros_driver/CMakeLists.txt +++ b/depthai_ros_driver/CMakeLists.txt @@ -49,6 +49,8 @@ find_package(catkin REQUIRED COMPONENTS message_filters image_transport nodelet + diagnostic_updater + diagnostic_msgs ) find_package(Threads REQUIRED) @@ -74,7 +76,7 @@ catkin_package( ${SENSOR_LIB_NAME} ${NN_LIB_NAME} ${COMMON_LIB_NAME} - CATKIN_DEPENDS roscpp sensor_msgs std_msgs camera_info_manager depthai_bridge vision_msgs cv_bridge message_filters image_transport + CATKIN_DEPENDS roscpp sensor_msgs std_msgs camera_info_manager depthai_bridge vision_msgs cv_bridge message_filters image_transport diagnostic_msgs diagnostic_updater DEPENDS OpenCV ) @@ -84,11 +86,13 @@ add_library( ${COMMON_LIB_NAME} SHARED src/utils.cpp src/dai_nodes/base_node.cpp + src/dai_nodes/sys_logger.cpp src/dai_nodes/sensors/sensor_helpers.cpp # TODO: Figure out different place for this src/param_handlers/camera_param_handler.cpp src/param_handlers/imu_param_handler.cpp src/param_handlers/nn_param_handler.cpp src/param_handlers/sensor_param_handler.cpp + src/param_handlers/feature_tracker_param_handler.cpp src/param_handlers/stereo_param_handler.cpp ) @@ -110,6 +114,7 @@ add_library( src/dai_nodes/sensors/imu.cpp src/dai_nodes/sensors/rgb.cpp src/dai_nodes/sensors/mono.cpp + src/dai_nodes/sensors/feature_tracker.cpp src/dai_nodes/sensors/sensor_wrapper.cpp src/dai_nodes/stereo.cpp ) diff --git a/depthai_ros_driver/cfg/parameters.cfg b/depthai_ros_driver/cfg/parameters.cfg index 4d46e9c0..fceef23c 100755 --- a/depthai_ros_driver/cfg/parameters.cfg +++ b/depthai_ros_driver/cfg/parameters.cfg @@ -9,33 +9,11 @@ camera = gen.add_group("camera") # Name Type Reconfiguration level # Description # Default Min Max -camera.add("camera_i_enable_imu", bool_t, 0,"Enabling imu", True) + camera.add("camera_i_enable_ir", bool_t, 0, "Enabling IR", True) camera.add("camera_i_floodlight_brightness", double_t, 0, "Floodlight brightness", 0, 0, 1500) -camera.add("camera_i_ip", str_t, 0, "IP", "") camera.add("camera_i_laser_dot_brightness", double_t, 0,"Laser dot brightness", 800, 0, 1200) -camera.add("camera_i_mx_id", str_t, 0,"MXID", "") -camera.add("camera_i_usb_port_id", str_t, 0,"USB port ID", "") -camera.add("camera_i_nn_type", str_t, 0, "NN Type", "spatial") -camera.add("camera_i_pipeline_type", str_t, 0, "Pipeline type", "RGBD") -camera.add("camera_i_usb_speed", str_t, 0, "USB Speed", "SUPER_PLUS") -camera.add("camera_i_pipeline_dump", bool_t, 0, "Dump pipeline info at the beginning", False) -camera.add("camera_i_calibration_dump", bool_t, 0, "Dump calibration info at the beginning", False) -camera.add("camera_i_external_calibration_path", str_t, 0, "External calibration path", "") - -camera.add("left_i_board_socket_id", int_t, 0, "Sensor board socket id", 1) -camera.add("left_i_calibration_file", str_t, 0, "Path to calibration file", "") -camera.add("left_i_fps", double_t, 0, "FPS", 30.0) -camera.add("left_i_height", int_t, 0, "Image height", 720) camera.add("left_r_keep_preview_aspect_ratio", bool_t, 0, "Keep preview aspect ratio", True) -camera.add("left_i_low_bandwidth", bool_t, 0, "Use encoding for data", False) -camera.add("left_i_low_bandwidth_quality", int_t, 0, "Quality when using low-bandwidth mode", 50, 1, 100) -camera.add("left_i_max_q_size", int_t, 0, "Internal queue size", 30) -camera.add("left_i_preview_size", int_t, 0, "Preview size", 416) -camera.add("left_i_publish_topic", bool_t, 0, "Enable ROS topic", False) -camera.add("left_i_resolution", str_t, 0, "Sensor resolution", "720") -camera.add("left_i_set_isp_scale", bool_t, 0, "Set ISP scale", False) -camera.add("left_i_width", int_t, 0, "Image height", 1280) camera.add("left_r_exposure", int_t, 0, "Sensor exposure", 1000, 1, 33000) camera.add("left_r_focus", int_t, 0, "Focus", 1, 0, 255) camera.add("left_r_iso", int_t, 0, "Sensor iso", 800, 100, 1600) @@ -43,27 +21,8 @@ camera.add("left_r_set_man_exposure", bool_t, 0, "Enable manual exposure", False camera.add("left_r_set_man_focus", bool_t, 0, "Enable manual focus", False) camera.add("left_r_set_man_whitebalance", bool_t, 0, "Enable manual whitebalance", False) camera.add("left_r_whitebalance", int_t, 0, "Whitebalance", 3300, 1000, 12000) -camera.add("left_i_simulate_from_topic", bool_t, 0, "Use external topic instead of this node in the pipeline", False) -camera.add("left_i_disable_node", bool_t, 0, "Disable device node when using external input", False) -camera.add("left_i_get_base_device_timestamp", bool_t, 0, "Use base device timestamp", False) -camera.add("left_i_isp_num", int_t, 0, "ISP scale numerator", 2) -camera.add("left_i_isp_den", int_t, 0, "ISP scale denominator", 3) -camera.add("left_i_output_isp", bool_t, 0, "Output ISP instead of video", True) -camera.add("left_i_update_ros_base_time_on_ros_msg", bool_t, 0 ,"Whether to update ROS time with new messages", False) -camera.add("right_i_board_socket_id", int_t, 0, "Sensor board socket id", 2) -camera.add("right_i_calibration_file", str_t, 0, "Path to calibration file", "") -camera.add("right_i_fps", double_t, 0, "FPS", 30.0) -camera.add("right_i_height", int_t, 0, "Image height", 720) camera.add("right_r_keep_preview_aspect_ratio", bool_t, 0, "Keep preview aspect ratio", True) -camera.add("right_i_low_bandwidth", bool_t, 0, "Use encoding for data", False) -camera.add("right_i_low_bandwidth_quality", int_t, 0, "Quality when using low-bandwidth mode", 50, 1, 100) -camera.add("right_i_max_q_size", int_t, 0, "Internal queue size", 30) -camera.add("right_i_preview_size", int_t, 0, "Preview size", 416) -camera.add("right_i_publish_topic", bool_t, 0, "Enable ROS topic", False) -camera.add("right_i_resolution", str_t, 0, "Sensor resolution", "720") -camera.add("right_i_set_isp_scale", bool_t, 0, "Set ISP scale", False) -camera.add("right_i_width", int_t, 0, "Image height", 1280) camera.add("right_r_exposure", int_t, 0, "Sensor exposure", 1000, 1, 33000) camera.add("right_r_focus", int_t, 0, "Focus", 1, 0, 255) camera.add("right_r_iso", int_t, 0, "Sensor iso", 800, 100, 1600) @@ -71,30 +30,8 @@ camera.add("right_r_set_man_exposure", bool_t, 0, "Enable manual exposure", Fals camera.add("right_r_set_man_focus", bool_t, 0, "Enable manual focus", False) camera.add("right_r_set_man_whitebalance", bool_t, 0, "Enable manual whitebalance", False) camera.add("right_r_whitebalance", int_t, 0, "Whitebalance", 3300, 1000, 12000) -camera.add("right_i_simulate_from_topic", bool_t, 0, "Use external topic instead of this node in the pipeline", False) -camera.add("right_i_disable_node", bool_t, 0, "Disable device node when using external input", False) -camera.add("right_i_get_base_device_timestamp", bool_t, 0, "Use base device timestamp", False) -camera.add("right_i_isp_num", int_t, 0, "ISP scale numerator", 2) -camera.add("right_i_isp_den", int_t, 0, "ISP scale denominator", 3) -camera.add("right_i_output_isp", bool_t, 0, "Output ISP instead of video", False) -camera.add("right_i_update_ros_base_time_on_ros_msg", bool_t, 0 ,"Whether to update ROS time with new messages", False) - -camera.add("rgb_i_board_socket_id", int_t, 0, "Sensor board socket id", 0) -camera.add("rgb_i_calibration_file", str_t, 0, "Path to calibration file", "") -camera.add("rgb_i_fps", double_t, 0, "FPS", 60.0) -camera.add("rgb_i_height", int_t, 0, "Image height", 720) -camera.add("rgb_i_interleaved", bool_t, 0, "Is frame interleaved or planar", False) camera.add("rgb_r_keep_preview_aspect_ratio", bool_t, 0, "Keep preview aspect ratio", True) -camera.add("rgb_i_low_bandwidth", bool_t, 0, "Use encoding for data", False) -camera.add("rgb_i_low_bandwidth_quality", int_t, 0, "Quality when using low-bandwidth mode", 50, 1, 100) -camera.add("rgb_i_max_q_size", int_t, 0, "Internal queue size", 30) -camera.add("rgb_i_preview_size", int_t, 0, "Preview size", 300) -camera.add("rgb_i_publish_topic", bool_t, 0, "Enable ROS topic", True) -camera.add("rgb_i_enable_preview", bool_t, 0, "Enable ROS topic", False) -camera.add("rgb_i_resolution", str_t, 0, "Sensor resolution", "1080") -camera.add("rgb_i_set_isp_scale", bool_t, 0, "Set ISP scale", True) -camera.add("rgb_i_width", int_t, 0, "Image height", 1280) camera.add("rgb_r_exposure", int_t, 0, "Sensor exposure", 1000, 1, 33000) camera.add("rgb_r_focus", int_t, 0, "Focus", 1, 0, 255) camera.add("rgb_r_iso", int_t, 0, "Sensor iso", 800, 100, 1600) @@ -102,94 +39,5 @@ camera.add("rgb_r_set_man_exposure", bool_t, 0, "Enable manual exposure", False) camera.add("rgb_r_set_man_focus", bool_t, 0, "Enable manual focus", False) camera.add("rgb_r_set_man_whitebalance", bool_t, 0, "Enable manual whitebalance", False) camera.add("rgb_r_whitebalance", int_t, 0, "Whitebalance", 3300, 1000, 12000) -camera.add("rgb_i_simulate_from_topic", bool_t, 0, "Use external topic instead of this node in the pipeline", False) -camera.add("rgb_i_disable_node", bool_t, 0, "Disable device node when using external input", False) -camera.add("rgb_i_get_base_device_timestamp", bool_t, 0, "Use base device timestamp", False) -camera.add("rgb_i_isp_num", int_t, 0, "ISP scale numerator", 2) -camera.add("rgb_i_isp_den", int_t, 0, "ISP scale denominator", 3) -camera.add("rgb_i_output_isp", bool_t, 0, "Output ISP instead of video", False) -camera.add("rgb_i_update_ros_base_time_on_ros_msg", bool_t, 0 ,"Whether to update ROS time with new messages", False) - - -camera.add("stereo_i_align_depth", bool_t, 0, "Whether to align depth to RGB", True) -camera.add("stereo_i_calibration_file", str_t, 0, "Path to calibration file", "") -camera.add("stereo_i_bilateral_sigma", int_t, 0, "Bilateral sigma", 0) -camera.add("stereo_i_board_socket_id", int_t, 0, "Board socket id, depends on alignment", 0) -camera.add("stereo_i_decimation_filter_decimation_factor", int_t, 0, "Decimation factor", 1) -camera.add("stereo_i_decimation_filter_decimation_mode", str_t, 0, "Decimation mode", "PIXEL_SKIPPING") -camera.add("stereo_i_depth_filter_size", int_t, 0, "Depth filter size", 5) -camera.add("stereo_i_depth_preset", str_t, 0, "Depth preset", "HIGH_ACCURACY") -camera.add("stereo_i_enable_brightness_filter", bool_t, 0, "Enable brightness filter", False) -camera.add("stereo_i_brightness_filter_min_brightness", int_t, 0, "Brightness filter min", 0) -camera.add("stereo_i_brightness_filter_max_brightness", int_t, 0, "Brightness filter max", 256) -camera.add("stereo_i_enable_companding", bool_t, 0, "Enable companding", False) -camera.add("stereo_i_enable_decimation_filter", bool_t, 0, "Enable decimation filter", False) -camera.add("stereo_i_enable_distortion_correction", bool_t, 0, "Enable distortion correction", True) -camera.add("stereo_i_enable_speckle_filter", bool_t, 0, "Enable speckle filter", False) -camera.add("stereo_i_speckle_filter_max_range", int_t, 0, "Speckle filter max range", 50) -camera.add("stereo_i_enable_spatial_filter", bool_t, 0, "Enable spatial filter", False) -camera.add("stereo_i_enable_temporal_filter", bool_t, 0, "Enable temporal filter", False) -camera.add("stereo_i_enable_threshold_filter", bool_t, 0, "Enable threshold filter", False) -camera.add("stereo_i_extended_disp", bool_t, 0, "Enable extended disparity", False) -camera.add("stereo_i_height", int_t, 0, "Output image height", 720) -camera.add("stereo_i_low_bandwidth", bool_t, 0, "Use encoding for data", False) -camera.add("stereo_i_low_bandwidth_quality", int_t, 0, "Quality when using low-bandwidth mode", 50, 1, 100) -camera.add("stereo_i_lr_check", bool_t, 0, "Enable left-right check", True) -camera.add("stereo_i_lrc_threshold", int_t, 0, "Left-right check threshold", 10) -camera.add("stereo_i_max_q_size", int_t, 0, "Internal queue size", 30) -camera.add("stereo_i_publish_left_rect", bool_t, 0, "Publish rectified left", False) -camera.add("stereo_i_left_rect_low_bandwidth", bool_t, 0, "Low bandwidth for rectified left", False) -camera.add("stereo_i_left_rect_low_bandwidth_quality", int_t, 0, "Low bandwidth left quality", 50) -camera.add("stereo_i_left_rect_update_ros_base_time_on_ros_msg", bool_t, 0 ,"Whether to update ROS time with new messages", False) -camera.add("stereo_i_publish_right_rect", bool_t, 0, "Publish rectified right", False) -camera.add("stereo_i_right_rect_low_bandwidth", bool_t, 0, "Low bandwidth for rectified right", False) -camera.add("stereo_i_right_rect_low_bandwidth_quality", int_t, 0, "Low bandwidth right quality", 50) -camera.add("stereo_i_right_rect_update_ros_base_time_on_ros_msg", bool_t, 0 ,"Whether to update ROS time with new messages", False) -camera.add("stereo_i_publish_topic", bool_t, 0, "Publish stereo topic", True) -camera.add("stereo_i_rectify_edge_fill_color", int_t, 0, "Rectify edge fill color", 0) -camera.add("stereo_i_spatial_filter_alpha", double_t, 0, "Spatial filter alpha", 0.5) -camera.add("stereo_i_spatial_filter_delta", int_t, 0, "Spatial filter delta", 20) -camera.add("stereo_i_spatial_filter_hole_filling_radius", int_t, 0, "Spatial filter hole filling radius", 2) -camera.add("stereo_i_spatial_filter_iterations", int_t, 0, "Spatial filter iterations", 1) -camera.add("stereo_i_stereo_conf_threshold", int_t, 0, "Stereo confidence threshold", 255, 0, 255) -camera.add("stereo_i_subpixel", bool_t, 0, "Subpixel mode", False) -camera.add("stereo_i_temporal_filter_alpha", double_t, 0, "Temporal filter alpha", 0.4) -camera.add("stereo_i_temporal_filter_delta", int_t, 0, "Temporal filter delta", 20) -camera.add("stereo_i_temporal_filter_persistency", str_t, 0, "Temporal filter persistency", "VALID_2_IN_LAST_4") -camera.add("stereo_i_threshold_filter_max_range", int_t, 0, "Threshold filter max range", 15000) -camera.add("stereo_i_threshold_filter_min_range", int_t, 0, "Threshold filter min range", 400) -camera.add("stereo_i_width", int_t, 0, "Output image width", 1280) -camera.add("stereo_i_disparity_width", str_t, 0, "Disparity width", "DISPARITY_96") -camera.add("stereo_i_get_base_device_timestamp", bool_t, 0, "Use base device timestamp", False) -camera.add("stereo_i_subpixel_fractional_bits", int_t, 0, "Subpixel fractional bits", 3) -camera.add("stereo_i_set_input_size", bool_t, 0, "Set stereo input size", False) -camera.add("stereo_i_input_width", int_t, 0, "Input image width", 1280) -camera.add("stereo_i_input_height", int_t, 0, "Input image height", 720) -camera.add("stereo_i_update_ros_base_time_on_ros_msg", bool_t, 0 ,"Whether to update ROS time with new messages", False) - -camera.add("nn_i_nn_config_path", str_t, 0, "NN JSON Config path", "depthai_ros_driver/mobilenet") -camera.add("nn_i_disable_resize", bool_t, 0, "Disable ImageManip input", True) -camera.add("nn_i_enable_passthrough", bool_t, 0, "Publish passthrough", False) -camera.add("nn_i_enable_passthrough_depth", bool_t, 0, "Publish passthrough depth", False) -camera.add("nn_i_update_ros_base_time_on_ros_msg", bool_t, 0 ,"Whether to update ROS time with new messages", False) - -camera.add("imu_i_max_q_size", int_t, 0, "Internal queue size", 30) -camera.add("imu_i_acc_freq", int_t, 0, "Accelerometer frequency", 500) -camera.add("imu_i_acc_cov", double_t, 0, "Accelerometer covariance", 0.0) -camera.add("imu_i_batch_report_threshold", int_t, 0, "Batch report size", 5) -camera.add("imu_i_enable_rotation", bool_t, 0, "Whether to enable rotation vector & magnetometer data (available when using IMU_WITH_MAGN/ IMU_WITH_MAGN_SPLIT message type)", False) -camera.add("imu_i_get_base_device_timestamp", bool_t, 0, "Use base device timestamp", False) -camera.add("imu_i_gyro_cov", double_t, 0, "Gyroscope covariance", 0.0) -camera.add("imu_i_gyro_freq", int_t, 0, "Gyroscope frequency", 400) -camera.add("imu_i_mag_cov", double_t, 0, "Magnetometer covariance", 0.0) -camera.add("imu_i_mag_freq", int_t, 0, "Magnetometer frequency", 100) -camera.add("imu_i_max_batch_reports", int_t, 0, "Max reports per batch", 20) -camera.add("imu_i_message_type", str_t, 0, "ROS Publisher type", "IMU") -camera.add("imu_i_rot_cov", double_t, 0, "Rotation covariance", -1.0) -camera.add("imu_i_rot_freq", int_t, 0, "Rotation frequency", 100) -camera.add("imu_i_sync_method", str_t, 0, "Imu sync method", "COPY") -camera.add("imu_i_update_ros_base_time_on_ros_msg", bool_t, 0 ,"Whether to update ROS time with new messages", False) - - exit(gen.generate(PACKAGE, "depthai_ros_driver", "parameters")) \ No newline at end of file diff --git a/depthai_ros_driver/config/camera.yaml b/depthai_ros_driver/config/camera.yaml index 566d4e64..457e7abc 100644 --- a/depthai_ros_driver/config/camera.yaml +++ b/depthai_ros_driver/config/camera.yaml @@ -6,106 +6,10 @@ camera_i_laser_dot_brightness: 800.0 camera_i_mx_id: '' camera_i_nn_type: spatial - camera_i_pipeline_type: RGBD + camera_i_pipeline_type: rgbd + camera_i_tf_imu_from_descr: "true" camera_i_usb_port_id: '' camera_i_usb_speed: SUPER_PLUS - imu_i_max_q_size: 30 left_i_board_socket_id: 1 - left_i_calibration_file: '' - left_i_fps: 30.0 - left_i_height: 720 - left_i_low_bandwidth: false - left_i_low_bandwidth_quality: 50 - left_i_max_q_size: 30 - left_i_preview_size: 300 - left_i_publish_topic: false - left_i_resolution: '720' - left_i_set_isp_scale: true - left_i_width: 1280 - left_r_exposure: 1000 - left_r_focus: 1 - left_r_iso: 800 - left_r_keep_preview_aspect_ratio: true - left_r_set_man_exposure: false - left_r_set_man_focus: false - left_r_set_man_whitebalance: false - left_r_whitebalance: 3300 - nn_i_nn_config_path: depthai_ros_driver/mobilenet - rgb_i_board_socket_id: 0 - rgb_i_calibration_file: '' - rgb_i_enable_preview: false - rgb_i_fps: 60.0 - rgb_i_height: 720 - rgb_i_interleaved: false - rgb_i_keep_preview_aspect_ratio: true - rgb_i_low_bandwidth: false - rgb_i_low_bandwidth_quality: 50 - rgb_i_max_q_size: 30 - rgb_i_preview_size: 300 - rgb_i_publish_topic: true - rgb_i_resolution: '1080' - rgb_i_set_isp_scale: true - rgb_i_output_isp: true - rgb_i_width: 1280 - rgb_r_exposure: 1000 - rgb_r_focus: 1 - rgb_r_iso: 800 - rgb_r_keep_preview_aspect_ratio: true - rgb_r_set_man_exposure: false - rgb_r_set_man_focus: false - rgb_r_set_man_whitebalance: false - rgb_r_whitebalance: 3300 - rgb_i_isp_num: 2 - rgb_i_isp_den: 3 right_i_board_socket_id: 2 - right_i_calibration_file: '' - right_i_fps: 30.0 - right_i_height: 720 - right_i_low_bandwidth: false - right_i_low_bandwidth_quality: 50 - right_i_max_q_size: 30 - right_i_preview_size: 300 - right_i_publish_topic: false - right_i_resolution: '720' - right_i_set_isp_scale: true - right_i_width: 1280 - right_r_exposure: 1000 - right_r_focus: 1 - right_r_iso: 800 - right_r_keep_preview_aspect_ratio: true - right_r_set_man_exposure: false - right_r_set_man_focus: false - right_r_set_man_whitebalance: false - right_r_whitebalance: 3300 - stereo_i_align_depth: true - stereo_i_bilateral_sigma: 0 - stereo_i_board_socket_id: 0 - stereo_i_calibration_file: '' - stereo_i_decimation_filter_decimation_factor: 1 - stereo_i_decimation_filter_decimation_mode: PIXEL_SKIPPING - stereo_i_depth_filter_size: 5 - stereo_i_depth_preset: HIGH_ACCURACY - stereo_i_enable_decimation_filter: false - stereo_i_enable_distoartion_correction: false - stereo_i_enable_spatial_filter: false - stereo_i_enable_temporal_filter: false - stereo_i_enable_threshold_filter: false - stereo_i_extended_disp: false - stereo_i_height: 720 - stereo_i_low_bandwidth: false - stereo_i_low_bandwidth_quality: 50 - stereo_i_lr_check: true - stereo_i_lrc_threshold: 10 - stereo_i_max_q_size: 30 - stereo_i_rectify_edge_fill_color: 0 - stereo_i_spatial_filter_alpha: 0.5 - stereo_i_spatial_filter_delta: 20 - stereo_i_spatial_filter_hole_filling_radius: 2 - stereo_i_spatial_filter_iterations: 1 - stereo_i_stereo_conf_threshold: 255 - stereo_i_temporal_filter_alpha: 0.4 - stereo_i_temporal_filter_delta: 20 - stereo_i_temporal_filter_persistency: VALID_2_IN_LAST_4 - stereo_i_threshold_filter_max_range: 15000 - stereo_i_threshold_filter_min_range: 400 - stereo_i_width: 1280 \ No newline at end of file + nn_i_nn_config_path: depthai_ros_driver/mobilenet diff --git a/depthai_ros_driver/include/depthai_ros_driver/camera.hpp b/depthai_ros_driver/include/depthai_ros_driver/camera.hpp index 5f59b630..958347c2 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/camera.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/camera.hpp @@ -1,8 +1,10 @@ #pragma once +#include "depthai_bridge/TFPublisher.hpp" #include "depthai_ros_driver/dai_nodes/base_node.hpp" #include "depthai_ros_driver/param_handlers/camera_param_handler.hpp" #include "depthai_ros_driver/parametersConfig.h" +#include "diagnostic_msgs/DiagnosticArray.h" #include "dynamic_reconfigure/server.h" #include "nodelet/nodelet.h" #include "ros/node_handle.h" @@ -18,23 +20,62 @@ using Trigger = std_srvs::Trigger; class Camera : public nodelet::Nodelet { public: void onInit() override; + /* + * @brief Creates the pipeline and starts the device. Also sets up parameter callback and services. + */ void onConfigure(); private: + /* + * @brief Print information about the device type. + */ void getDeviceType(); + /* + * @brief Create the pipeline by using PipelineGenerator. + */ void createPipeline(); - void loadNodes(); + + /* + * @brief Connect either to a first available device or to a device with a specific USB port, MXID or IP. Loops continuously until a device is found. + */ void startDevice(); - void rgbPipeline(); + /* + * @brief Sets up the queues and creates publishers for the nodes in the pipeline. + */ void setupQueues(); + /* + * @brief Sets IR floodlight and dot pattern projector. + */ void setIR(); + /* + * @brief Saves pipeline as a json to a file. + */ void savePipeline(); + /* + * @brief Saves calibration data to a json file. + */ void saveCalib(); + /* + * @brief Loads calibration data from a path. + * @param path Path to the calibration file. + */ void loadCalib(const std::string& path); void parameterCB(parametersConfig& config, uint32_t level); std::shared_ptr> paramServer; std::unique_ptr ph; ros::ServiceServer startSrv, stopSrv, savePipelineSrv, saveCalibSrv; + ros::Subscriber diagSub; + /* + * Closes all the queues, clears the configured BaseNodes, stops the pipeline and resets the device. + */ + void stop(); + /* + * Runs onConfigure(); + */ + void start(); + void restart(); + void diagCB(const diagnostic_msgs::DiagnosticArray::ConstPtr& msg); + bool startCB(Trigger::Request& /*req*/, Trigger::Response& res); bool stopCB(Trigger::Request& /*req*/, Trigger::Response& res); bool saveCalibCB(Trigger::Request& /*req*/, Trigger::Response& res); @@ -49,5 +90,6 @@ class Camera : public nodelet::Nodelet { bool enableIR = false; double floodlightBrighness; double laserDotBrightness; + std::unique_ptr tfPub; }; } // namespace depthai_ros_driver \ No newline at end of file diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/base_node.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/base_node.hpp index 22a01ee6..979dac3c 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/base_node.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/base_node.hpp @@ -20,20 +20,50 @@ namespace depthai_ros_driver { namespace dai_nodes { class BaseNode { public: + /* + * @brief Constructor of the class BaseNode. Creates a node in the pipeline. + * + * @param[in] daiNodeName The dai node name + * @param node The node + * @param pipeline The pipeline + */ BaseNode(const std::string& daiNodeName, ros::NodeHandle node, std::shared_ptr /*pipeline*/); virtual ~BaseNode(); - virtual void updateParams(parametersConfig& config) = 0; - virtual void link(dai::Node::Input in, int linkType = 0) = 0; - virtual dai::Node::Input getInput(int /*linkType = 0*/); + virtual void updateParams(parametersConfig& config); + virtual void link(dai::Node::Input in, int linkType = 0); + virtual dai::Node::Input getInput(int linkType = 0); virtual void setupQueues(std::shared_ptr device) = 0; + /* + * @brief Sets the names of the queues. + */ virtual void setNames() = 0; + /* + * @brief Link inputs and outputs. + * + * @param pipeline The pipeline + */ virtual void setXinXout(std::shared_ptr pipeline) = 0; virtual void closeQueues() = 0; void setNodeName(const std::string& daiNodeName); void setROSNodePointer(ros::NodeHandle node); + /* + * @brief Gets the ROS node pointer. + * + * @return The ROS node pointer. + */ ros::NodeHandle getROSNode(); + /* + * @brief Gets the name of the node. + * + * @return The name. + */ std::string getName(); + /* + * @brief Append ROS node name to the frameName given. + * + * @param[in] frameName The frame name + */ std::string getTFPrefix(const std::string& frameName = ""); private: diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/detection.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/detection.hpp index aad3a991..a74c08b1 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/detection.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/detection.hpp @@ -28,6 +28,14 @@ namespace nn { template class Detection : public BaseNode { public: + /* + * @brief Constructor of the class Detection. Creates a DetectionNetwork node in the pipeline. Also creates an ImageManip node in the pipeline. + * The ImageManip node is used to resize the input frames to the size required by the DetectionNetwork node. + * + * @param[in] daiNodeName The dai node name + * @param node The node + * @param pipeline The pipeline + */ Detection(const std::string& daiNodeName, ros::NodeHandle node, std::shared_ptr pipeline) : BaseNode(daiNodeName, node, pipeline), it(node) { ROS_DEBUG("Creating node %s", daiNodeName.c_str()); setNames(); @@ -40,7 +48,12 @@ class Detection : public BaseNode { setXinXout(pipeline); } ~Detection() = default; - + /* + * @brief Sets up the queues for the DetectionNetwork node and the ImageManip node. Also sets up the publishers for the DetectionNetwork node and the + * ImageManip node. + * + * @param device The device + */ void setupQueues(std::shared_ptr device) override { nnQ = device->getOutputQueue(nnQName, ph->getParam("i_max_q_size"), false); auto tfPrefix = getTFPrefix("rgb"); @@ -67,12 +80,25 @@ class Detection : public BaseNode { infoManager->setCameraInfo(sensor_helpers::getCalibInfo(*imageConverter, device, dai::CameraBoardSocket::CAM_A, width, height)); ptPub = it.advertiseCamera(getName() + "/passthrough/image_raw", 1); - ptQ->addCallback(std::bind(sensor_helpers::imgCB, std::placeholders::_1, std::placeholders::_2, *imageConverter, ptPub, infoManager)); + ptQ->addCallback(std::bind(sensor_helpers::basicCameraPub, std::placeholders::_1, std::placeholders::_2, *imageConverter, ptPub, infoManager)); } }; + /* + * @brief Links the input of the DetectionNetwork node to the output of the ImageManip node. + * + * @param[in] in The input of the DetectionNetwork node + * @param[in] linkType The link type (not used) + */ void link(dai::Node::Input in, int /*linkType*/) override { detectionNode->out.link(in); }; + /* + * @brief Gets the input of the DetectionNetwork node. + * + * @param[in] linkType The link type (not used) + * + * @return The input of the DetectionNetwork node. + */ dai::Node::Input getInput(int /*linkType*/) override { if(ph->getParam("i_disable_resize")) { return detectionNode->input; @@ -83,6 +109,11 @@ class Detection : public BaseNode { nnQName = getName() + "_nn"; ptQName = getName() + "_pt"; }; + /* + * @brief Sets the XLinkOut for the DetectionNetwork node and the ImageManip node. Additionally enables the passthrough. + * + * @param pipeline The pipeline + */ void setXinXout(std::shared_ptr pipeline) override { xoutNN = pipeline->create(); xoutNN->setStreamName(nnQName); @@ -93,6 +124,9 @@ class Detection : public BaseNode { detectionNode->passthrough.link(xoutPT->input); } }; + /* + * @brief Closes the queues for the DetectionNetwork node and the passthrough. + */ void closeQueues() override { nnQ->close(); if(ph->getParam("i_enable_passthrough")) { @@ -105,6 +139,12 @@ class Detection : public BaseNode { }; private: + /* + * @brief Callback for the DetectionNetwork node. Converts the ImgDetections to Detection2DArray and publishes it. + * + * @param[in] name The name of the stream + * @param[in] data The DAI data + */ void detectionCB(const std::string& /*name*/, const std::shared_ptr& data) { auto inDet = std::dynamic_pointer_cast(data); std::deque deq; diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/spatial_detection.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/spatial_detection.hpp index 089db08c..9b249a8b 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/spatial_detection.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/spatial_detection.hpp @@ -70,7 +70,7 @@ class SpatialDetection : public BaseNode { ptInfoMan->setCameraInfo(sensor_helpers::getCalibInfo(*ptImageConverter, device, dai::CameraBoardSocket::CAM_A, width, height)); ptPub = it.advertiseCamera(getName() + "/passthrough/image_raw", 1); - ptQ->addCallback(std::bind(sensor_helpers::imgCB, std::placeholders::_1, std::placeholders::_2, *ptImageConverter, ptPub, ptInfoMan)); + ptQ->addCallback(std::bind(sensor_helpers::basicCameraPub, std::placeholders::_1, std::placeholders::_2, *ptImageConverter, ptPub, ptInfoMan)); } if(ph->getParam("i_enable_passthrough_depth")) { @@ -91,7 +91,7 @@ class SpatialDetection : public BaseNode { ptDepthPub = it.advertiseCamera(getName() + "/passthrough_depth/image_raw", 1); ptDepthQ->addCallback( - std::bind(sensor_helpers::imgCB, std::placeholders::_1, std::placeholders::_2, *ptDepthImageConverter, ptDepthPub, ptDepthInfoMan)); + std::bind(sensor_helpers::basicCameraPub, std::placeholders::_1, std::placeholders::_2, *ptDepthImageConverter, ptDepthPub, ptDepthInfoMan)); } }; void link(dai::Node::Input in, int /*linkType = 0*/) override { diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/feature_tracker.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/feature_tracker.hpp new file mode 100644 index 00000000..ed596b0c --- /dev/null +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/feature_tracker.hpp @@ -0,0 +1,56 @@ +#pragma once + +#include "depthai_ros_driver/dai_nodes/base_node.hpp" +#include "depthai_ros_msgs/TrackedFeatures.h" + +namespace dai { +class Pipeline; +class Device; +class DataOutputQueue; +class ADatatype; +namespace node { +class FeatureTracker; +class XLinkOut; +} // namespace node +namespace ros { +class TrackedFeaturesConverter; +} +} // namespace dai + +namespace ros { +class NodeHandle; +} // namespace ros + +namespace depthai_ros_driver { +namespace param_handlers { +class FeatureTrackerParamHandler; +} +namespace dai_nodes { + +class FeatureTracker : public BaseNode { + public: + explicit FeatureTracker(const std::string& daiNodeName, ros::NodeHandle node, std::shared_ptr pipeline); + ~FeatureTracker(); + void updateParams(parametersConfig& config) override; + void setupQueues(std::shared_ptr device) override; + void link(dai::Node::Input in, int linkType = 0) override; + dai::Node::Input getInput(int linkType = 0) override; + void setNames() override; + void setXinXout(std::shared_ptr pipeline) override; + void closeQueues() override; + void getParentName(const std::string& fullName); + + private: + std::unique_ptr featureConverter; + void featureQCB(const std::string& name, const std::shared_ptr& data); + ros::Publisher featurePub; + std::shared_ptr featureNode; + std::unique_ptr ph; + std::shared_ptr featureQ; + std::shared_ptr xoutFeature; + std::string featureQName; + std::string parentName; +}; + +} // namespace dai_nodes +} // namespace depthai_ros_driver \ No newline at end of file diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/mono.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/mono.hpp index b6cb92b2..4a54f15a 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/mono.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/mono.hpp @@ -4,6 +4,8 @@ #include "depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp" #include "image_transport/camera_publisher.h" #include "image_transport/image_transport.h" +#include "sensor_msgs/CameraInfo.h" +#include "sensor_msgs/Image.h" namespace dai { class Pipeline; @@ -55,7 +57,7 @@ class Mono : public BaseNode { private: std::unique_ptr imageConverter; image_transport::ImageTransport it; - image_transport::CameraPublisher monoPub; + image_transport::CameraPublisher monoPubIT; std::shared_ptr infoManager; std::shared_ptr monoCamNode; std::shared_ptr videoEnc; diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/rgb.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/rgb.hpp index 413f8d57..24b1eedd 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/rgb.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/rgb.hpp @@ -5,6 +5,7 @@ #include "image_transport/camera_publisher.h" #include "image_transport/image_transport.h" #include "sensor_msgs/CameraInfo.h" +#include "sensor_msgs/Image.h" namespace dai { class Pipeline; @@ -61,7 +62,7 @@ class RGB : public BaseNode { private: std::unique_ptr imageConverter; image_transport::ImageTransport it; - image_transport::CameraPublisher rgbPub, previewPub; + image_transport::CameraPublisher rgbPubIT, previewPubIT; std::shared_ptr infoManager, previewInfoManager; std::shared_ptr colorCamNode; std::shared_ptr videoEnc; diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp index 91cbff7b..85986ee0 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp @@ -34,28 +34,32 @@ enum class RGBLinkType { video, isp, preview }; namespace sensor_helpers { struct ImageSensor { std::string name; + std::string defaultResolution; std::vector allowedResolutions; bool color; void getSizeFromResolution(const dai::ColorCameraProperties::SensorResolution& res, int& width, int& height); }; extern std::vector availableSensors; -void imgCB(const std::string& /*name*/, - const std::shared_ptr& data, - dai::ros::ImageConverter& converter, - image_transport::CameraPublisher& pub, - std::shared_ptr infoManager); -void compressedImgCB(const std::string& /*name*/, - const std::shared_ptr& data, - dai::ros::ImageConverter& converter, - image_transport::CameraPublisher& pub, - std::shared_ptr infoManager, - dai::RawImgFrame::Type dataType); +void basicCameraPub(const std::string& /*name*/, + const std::shared_ptr& data, + dai::ros::ImageConverter& converter, + image_transport::CameraPublisher& pub, + std::shared_ptr infoManager); + +void cameraPub(const std::string& /*name*/, + const std::shared_ptr& data, + dai::ros::ImageConverter& converter, + image_transport::CameraPublisher& pub, + std::shared_ptr infoManager, + bool lazyPub = true); + sensor_msgs::CameraInfo getCalibInfo( dai::ros::ImageConverter& converter, std::shared_ptr device, dai::CameraBoardSocket socket, int width = 0, int height = 0); std::shared_ptr createEncoder(std::shared_ptr pipeline, int quality, dai::VideoEncoderProperties::Profile profile = dai::VideoEncoderProperties::Profile::MJPEG); + } // namespace sensor_helpers } // namespace dai_nodes } // namespace depthai_ros_driver \ No newline at end of file diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sensor_wrapper.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sensor_wrapper.hpp index 8a15c426..02b04f09 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sensor_wrapper.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sensor_wrapper.hpp @@ -52,7 +52,7 @@ class SensorWrapper : public BaseNode { private: void subCB(const sensor_msgs::Image::ConstPtr& img); - std::unique_ptr sensorNode; + std::unique_ptr sensorNode, featureTrackerNode; std::unique_ptr ph; std::unique_ptr converter; ros::Subscriber sub; diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/stereo.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/stereo.hpp index 1d3783d7..d1291116 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/stereo.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/stereo.hpp @@ -4,6 +4,7 @@ #include #include +#include "depthai-shared/common/CameraFeatures.hpp" #include "depthai_ros_driver/dai_nodes/base_node.hpp" #include "depthai_ros_driver/dai_nodes/sensors/sensor_wrapper.hpp" #include "depthai_ros_driver/parametersConfig.h" @@ -14,6 +15,7 @@ class Pipeline; class Device; class DataOutputQueue; class ADatatype; +class ImgFrame; namespace node { class StereoDepth; class XLinkOut; @@ -21,14 +23,10 @@ class VideoEncoder; } // namespace node namespace ros { class ImageConverter; -} +class Timer; +} // namespace ros } // namespace dai -namespace rclcpp { -class Node; -class Parameter; -} // namespace rclcpp - namespace camera_info_manager { class CameraInfoManager; } @@ -43,19 +41,14 @@ namespace link_types { enum class StereoLinkType { left, right }; } -struct StereoSensorInfo { - std::string name; - dai::CameraBoardSocket socket; -}; - class Stereo : public BaseNode { public: explicit Stereo(const std::string& daiNodeName, ros::NodeHandle node, std::shared_ptr pipeline, std::shared_ptr device, - StereoSensorInfo leftInfo = StereoSensorInfo{"left", dai::CameraBoardSocket::CAM_B}, - StereoSensorInfo rightInfo = StereoSensorInfo{"right", dai::CameraBoardSocket::CAM_C}); + dai::CameraBoardSocket leftSocket = dai::CameraBoardSocket::CAM_B, + dai::CameraBoardSocket rightSocket = dai::CameraBoardSocket::CAM_C); ~Stereo(); void updateParams(parametersConfig& config) override; void setupQueues(std::shared_ptr device) override; @@ -69,19 +62,35 @@ class Stereo : public BaseNode { void setupStereoQueue(std::shared_ptr device); void setupLeftRectQueue(std::shared_ptr device); void setupRightRectQueue(std::shared_ptr device); + void setupRectQueue(std::shared_ptr device, + dai::CameraFeatures& sensorInfo, + const std::string& queueName, + std::unique_ptr& conv, + std::shared_ptr& im, + std::shared_ptr& q, + image_transport::CameraPublisher& pubIT, + bool isLeft); + /* + * This callback is used to synchronize left and right rectified frames + * It is called every 10ms and it publishes the frames if they are synchronized + * If they are not synchronized, it prints a warning message + */ + void syncTimerCB(); image_transport::ImageTransport it; std::unique_ptr stereoConv, leftRectConv, rightRectConv; - image_transport::CameraPublisher stereoPub, leftRectPub, rightRectPub; + image_transport::CameraPublisher stereoPubIT, leftRectPubIT, rightRectPubIT; std::shared_ptr stereoIM, leftRectIM, rightRectIM; std::shared_ptr stereoCamNode; std::shared_ptr stereoEnc, leftRectEnc, rightRectEnc; std::unique_ptr left; std::unique_ptr right; + std::unique_ptr featureTrackerLeftR, featureTrackerRightR; std::unique_ptr ph; std::shared_ptr stereoQ, leftRectQ, rightRectQ; std::shared_ptr xoutStereo, xoutLeftRect, xoutRightRect; std::string stereoQName, leftRectQName, rightRectQName; - StereoSensorInfo leftSensInfo, rightSensInfo; + dai::CameraFeatures leftSensInfo, rightSensInfo; + std::shared_ptr syncTimer; }; } // namespace dai_nodes diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sys_logger.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sys_logger.hpp new file mode 100644 index 00000000..90c55fe4 --- /dev/null +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sys_logger.hpp @@ -0,0 +1,40 @@ +#include "depthai/pipeline/datatype/SystemInformation.hpp" +#include "depthai_ros_driver/dai_nodes/base_node.hpp" +#include "diagnostic_updater/diagnostic_updater.h" +namespace dai { +class Pipeline; +class Device; +class DataOutputQueue; +class ADatatype; +namespace node { +class SystemLogger; +class XLinkOut; +class VideoEncoder; +} // namespace node +} // namespace dai + +namespace depthai_ros_driver { + +namespace dai_nodes { +class SysLogger : public BaseNode { + public: + SysLogger(const std::string& daiNodeName, ros::NodeHandle node, std::shared_ptr pipeline); + ~SysLogger(); + void setupQueues(std::shared_ptr device) override; + void setNames() override; + void setXinXout(std::shared_ptr pipeline) override; + void closeQueues() override; + + private: + void timerCB(); + std::string sysInfoToString(const dai::SystemInformation& sysInfo); + void produceDiagnostics(diagnostic_updater::DiagnosticStatusWrapper& stat); + std::shared_ptr updater; + std::shared_ptr xoutLogger; + std::shared_ptr sysNode; + std::shared_ptr loggerQ; + std::string loggerQName; + ros::Timer timer; +}; +} // namespace dai_nodes +} // namespace depthai_ros_driver \ No newline at end of file diff --git a/depthai_ros_driver/include/depthai_ros_driver/param_handlers/base_param_handler.hpp b/depthai_ros_driver/include/depthai_ros_driver/param_handlers/base_param_handler.hpp index 4b5318ba..9ce03c78 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/param_handlers/base_param_handler.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/param_handlers/base_param_handler.hpp @@ -5,6 +5,9 @@ namespace depthai_ros_driver { namespace param_handlers { +inline std::pair getRangedIntDescriptor(uint16_t min, uint16_t max) { + return std::make_pair(min, max); +} class BaseParamHandler { public: @@ -25,12 +28,6 @@ class BaseParamHandler { return value; } template - T getOtherNodeParam(const std::string& daiNodeName, const std::string& paramName) { - T value; - baseNode.getParam(getFullParamName(daiNodeName, paramName), value); - return value; - } - template T getParam(const std::string& paramName, T defaultVal) { T value; if(!baseNode.param(getFullParamName(paramName), value, defaultVal)) { @@ -41,6 +38,12 @@ class BaseParamHandler { return value; } template + T getOtherNodeParam(const std::string& daiNodeName, const std::string& paramName) { + T value; + baseNode.getParam(getFullParamName(daiNodeName, paramName), value); + return value; + } + template T getOtherNodeParam(const std::string& daiNodeName, const std::string& paramName, T defaultVal) { T value; if(!baseNode.param(getFullParamName(daiNodeName, paramName), value, defaultVal)) { @@ -56,11 +59,11 @@ class BaseParamHandler { return value; } std::string getFullParamName(const std::string& paramName) { - std::string name = std::string(baseNode.getNamespace()) + "/" + baseName + "_" + paramName; + std::string name = baseName + "_" + paramName; return name; } std::string getFullParamName(const std::string& daiNodeName, const std::string& paramName) { - std::string name = std::string(baseNode.getNamespace()) + "/" + daiNodeName + "_" + paramName; + std::string name = daiNodeName + "_" + paramName; return name; } @@ -68,6 +71,44 @@ class BaseParamHandler { ros::NodeHandle getROSNode() { return baseNode; } + + template + T declareAndLogParam(const std::string& paramName, const std::vector& value, bool override = false) { + std::string fullName = getFullParamName(paramName); + if(override || !baseNode.hasParam(fullName)) { + return setParam(paramName, value); + } else { + return getParam(paramName); + } + } + + template + T declareAndLogParam(const std::string& paramName, T value, bool override = false) { + std::string fullName = getFullParamName(paramName); + if(override || !baseNode.hasParam(fullName)) { + return setParam(paramName, value); + } else { + return getParam(paramName); + } + } + int declareAndLogParam(const std::string& paramName, int value, std::pair int_range, bool override = false) { + std::string fullName = getFullParamName(paramName); + if(override || !baseNode.hasParam(fullName)) { + if(value < int_range.first || value > int_range.second) { + ROS_WARN("Param %s with value %d is out of range [%d, %d]. Setting value %d", + fullName.c_str(), + value, + int_range.first, + int_range.second, + int_range.first); + value = int_range.first; + } + return setParam(paramName, value); + } else { + return getParam(paramName); + } + } + template inline void logParam(const std::string& name, T value) { std::stringstream ss; diff --git a/depthai_ros_driver/include/depthai_ros_driver/param_handlers/feature_tracker_param_handler.hpp b/depthai_ros_driver/include/depthai_ros_driver/param_handlers/feature_tracker_param_handler.hpp new file mode 100644 index 00000000..817cbe61 --- /dev/null +++ b/depthai_ros_driver/include/depthai_ros_driver/param_handlers/feature_tracker_param_handler.hpp @@ -0,0 +1,33 @@ +#pragma once + +#include +#include +#include + +#include "depthai/pipeline/datatype/CameraControl.hpp" +#include "depthai/pipeline/datatype/FeatureTrackerConfig.hpp" +#include "depthai_ros_driver/param_handlers/base_param_handler.hpp" + +namespace dai { +namespace node { +class FeatureTracker; +} +} // namespace dai + +namespace ros { +class NodeHandle; +} // namespace ros + +namespace depthai_ros_driver { +namespace param_handlers { + +class FeatureTrackerParamHandler : public BaseParamHandler { + public: + explicit FeatureTrackerParamHandler(ros::NodeHandle node, const std::string& name); + ~FeatureTrackerParamHandler(); + void declareParams(std::shared_ptr featureTracker); + dai::CameraControl setRuntimeParams(parametersConfig& config) override; + std::unordered_map motionEstMap; +}; +} // namespace param_handlers +} // namespace depthai_ros_driver \ No newline at end of file diff --git a/depthai_ros_driver/include/depthai_ros_driver/param_handlers/nn_param_handler.hpp b/depthai_ros_driver/include/depthai_ros_driver/param_handlers/nn_param_handler.hpp index 0bd03d98..b13df172 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/param_handlers/nn_param_handler.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/param_handlers/nn_param_handler.hpp @@ -38,6 +38,13 @@ class NNParamHandler : public BaseParamHandler { std::string getConfigPath(); template void declareParams(std::shared_ptr nn, std::shared_ptr imageManip) { + declareAndLogParam("i_disable_resize", false); + declareAndLogParam("i_enable_passthrough", false); + declareAndLogParam("i_enable_passthrough_depth", false); + declareAndLogParam("i_get_base_device_timestamp", false); + declareAndLogParam("i_update_ros_base_time_on_ros_msg", false); + declareAndLogParam("i_max_q_size", 30); + auto nn_path = getParam("i_nn_config_path"); auto nnPath = getConfigPath(); using json = nlohmann::json; std::ifstream f(nnPath); diff --git a/depthai_ros_driver/include/depthai_ros_driver/param_handlers/sensor_param_handler.hpp b/depthai_ros_driver/include/depthai_ros_driver/param_handlers/sensor_param_handler.hpp index a8e591a3..3ce45ac7 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/param_handlers/sensor_param_handler.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/param_handlers/sensor_param_handler.hpp @@ -28,6 +28,7 @@ class SensorParamHandler : public BaseParamHandler { public: explicit SensorParamHandler(ros::NodeHandle node, const std::string& name); ~SensorParamHandler(); + void declareCommonParams(); void declareParams(std::shared_ptr monoCam, dai::CameraBoardSocket socket, dai_nodes::sensor_helpers::ImageSensor sensor, @@ -41,6 +42,8 @@ class SensorParamHandler : public BaseParamHandler { private: std::unordered_map monoResolutionMap; std::unordered_map rgbResolutionMap; + std::unordered_map fSyncModeMap; + std::unordered_map cameraImageOrientationMap; }; } // namespace param_handlers } // namespace depthai_ros_driver \ No newline at end of file diff --git a/depthai_ros_driver/include/depthai_ros_driver/param_handlers/stereo_param_handler.hpp b/depthai_ros_driver/include/depthai_ros_driver/param_handlers/stereo_param_handler.hpp index 19066ab8..54b3c411 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/param_handlers/stereo_param_handler.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/param_handlers/stereo_param_handler.hpp @@ -10,6 +10,9 @@ #include "depthai_ros_driver/param_handlers/base_param_handler.hpp" #include "depthai_ros_driver/parametersConfig.h" +namespace dai { +class CameraFeatures; +} namespace ros { class NodeHandle; } // namespace ros @@ -20,8 +23,9 @@ class StereoParamHandler : public BaseParamHandler { public: explicit StereoParamHandler(ros::NodeHandle node, const std::string& name); ~StereoParamHandler(); - void declareParams(std::shared_ptr stereo, const std::string& rightName); + void declareParams(std::shared_ptr stereo, const std::vector& camFeatures); dai::CameraControl setRuntimeParams(parametersConfig& config) override; + void updateSocketsFromParams(dai::CameraBoardSocket& left, dai::CameraBoardSocket& right); private: std::unordered_map depthPresetMap; diff --git a/depthai_ros_driver/include/depthai_ros_driver/pipeline/base_pipeline.hpp b/depthai_ros_driver/include/depthai_ros_driver/pipeline/base_pipeline.hpp index a17e44f4..a780e3dd 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/pipeline/base_pipeline.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/pipeline/base_pipeline.hpp @@ -50,7 +50,6 @@ class BasePipeline { protected: BasePipeline(){}; - const std::string alphabet = "abcdefghijklmnopqrstuvwxyz"; std::unordered_map nnTypeMap = { {"", NNType::None}, {"NONE", NNType::None}, diff --git a/depthai_ros_driver/include/depthai_ros_driver/pipeline/pipeline_generator.hpp b/depthai_ros_driver/include/depthai_ros_driver/pipeline/pipeline_generator.hpp index b475b84e..1ea947f5 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/pipeline/pipeline_generator.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/pipeline/pipeline_generator.hpp @@ -24,12 +24,29 @@ class PipelineGenerator { public: PipelineGenerator(){}; ~PipelineGenerator() = default; + /* + * @brief Validates the pipeline type. If the pipeline type is not valid for the number of sensors, it will be changed to the default type. + * + * @param node The node + * @param[in] type The type + * @param[in] sensorNum The sensor number + * + * @return The validated pipeline type. + */ PipelineType validatePipeline(PipelineType type, int sensorNum); - std::unique_ptr createNN(ros::NodeHandle node, std::shared_ptr pipeline, dai_nodes::BaseNode& daiNode); - std::unique_ptr createSpatialNN(ros::NodeHandle node, - std::shared_ptr pipeline, - dai_nodes::BaseNode& daiNode, - dai_nodes::BaseNode& daiStereoNode); + + /* + * @brief Creates the pipeline by using a plugin. Plugin types need to be of type depthai_ros_driver::pipeline_gen::BasePipeline. + * + * @param node The node + * @param device The device + * @param pipeline The pipeline + * @param[in] pipelineType The pipeline type name (plugin name or one of the default types) + * @param[in] nnType The neural network type (none, rgb, spatial) + * @param[in] enableImu Indicates if IMU is enabled + * + * @return Vector BaseNodes created. + */ std::vector> createPipeline(ros::NodeHandle node, std::shared_ptr device, std::shared_ptr pipeline, diff --git a/depthai_ros_driver/include/depthai_ros_driver/utils.hpp b/depthai_ros_driver/include/depthai_ros_driver/utils.hpp index b99730ec..a7962aca 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/utils.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/utils.hpp @@ -5,6 +5,12 @@ #include #include #include +#include + +namespace dai { +enum class CameraBoardSocket; +struct CameraFeatures; +} // namespace dai namespace depthai_ros_driver { namespace utils { @@ -23,5 +29,6 @@ T getValFromMap(const std::string& name, const std::unordered_map camFeatures); } // namespace utils } // namespace depthai_ros_driver \ No newline at end of file diff --git a/depthai_ros_driver/launch/camera.launch b/depthai_ros_driver/launch/camera.launch index c34b4966..2d6241f8 100644 --- a/depthai_ros_driver/launch/camera.launch +++ b/depthai_ros_driver/launch/camera.launch @@ -1,10 +1,14 @@ + + + + - + @@ -21,9 +25,24 @@ + + + + + + + + + + + + + + + - + @@ -36,7 +55,7 @@ - + diff --git a/depthai_ros_driver/launch/rgbd_pcl.launch b/depthai_ros_driver/launch/rgbd_pcl.launch index f9962e5d..e116b26e 100644 --- a/depthai_ros_driver/launch/rgbd_pcl.launch +++ b/depthai_ros_driver/launch/rgbd_pcl.launch @@ -5,7 +5,7 @@ - + diff --git a/depthai_ros_driver/package.xml b/depthai_ros_driver/package.xml index 24e299aa..6f769c16 100644 --- a/depthai_ros_driver/package.xml +++ b/depthai_ros_driver/package.xml @@ -33,6 +33,7 @@ depthai_examples nodelet pluginlib + diagnostic_updater diff --git a/depthai_ros_driver/src/camera.cpp b/depthai_ros_driver/src/camera.cpp index e6f5140d..f30a723b 100644 --- a/depthai_ros_driver/src/camera.cpp +++ b/depthai_ros_driver/src/camera.cpp @@ -5,6 +5,7 @@ #include "depthai/device/Device.hpp" #include "depthai/pipeline/Pipeline.hpp" +#include "depthai_bridge/TFPublisher.hpp" #include "depthai_ros_driver/pipeline/pipeline_generator.hpp" #include "dynamic_reconfigure/server.h" #include "nodelet/nodelet.h" @@ -14,15 +15,89 @@ namespace depthai_ros_driver { void Camera::onInit() { pNH = getPrivateNodeHandle(); - paramServer = std::make_shared>(pNH); - paramServer->setCallback(std::bind(&Camera::parameterCB, this, std::placeholders::_1, std::placeholders::_2)); ph = std::make_unique(pNH, "camera"); ph->declareParams(); + paramServer = std::make_shared>(pNH); + paramServer->setCallback(std::bind(&Camera::parameterCB, this, std::placeholders::_1, std::placeholders::_2)); onConfigure(); startSrv = pNH.advertiseService("start_camera", &Camera::startCB, this); stopSrv = pNH.advertiseService("stop_camera", &Camera::stopCB, this); savePipelineSrv = pNH.advertiseService("save_pipeline", &Camera::startCB, this); saveCalibSrv = pNH.advertiseService("save_calibration", &Camera::stopCB, this); + + // If model name not set get one from the device + std::string camModel = ph->getParam("i_tf_camera_model"); + if(camModel.empty()) { + camModel = device->getDeviceName(); + } + + if(ph->getParam("i_publish_tf_from_calibration")) { + tfPub = std::make_unique(pNH, + device->readCalibration(), + device->getConnectedCameraFeatures(), + ph->getParam("i_tf_camera_name"), + camModel, + ph->getParam("i_tf_base_frame"), + ph->getParam("i_tf_parent_frame"), + ph->getParam("i_tf_cam_pos_x"), + ph->getParam("i_tf_cam_pos_y"), + ph->getParam("i_tf_cam_pos_z"), + ph->getParam("i_tf_cam_roll"), + ph->getParam("i_tf_cam_pitch"), + ph->getParam("i_tf_cam_yaw"), + ph->getParam("i_tf_imu_from_descr"), + ph->getParam("i_tf_custom_urdf_location"), + ph->getParam("i_tf_custom_xacro_args")); + } + diagSub = pNH.subscribe("/diagnostics", 1, &Camera::diagCB, this); +} + +void Camera::diagCB(const diagnostic_msgs::DiagnosticArray::ConstPtr& msg) { + for(const auto& status : msg->status) { + std::string nodeletName = pNH.getNamespace() + "_nodelet_manager"; + nodeletName.erase(nodeletName.begin()); + if(status.name == nodeletName + std::string(": sys_logger")) { + if(status.level == diagnostic_msgs::DiagnosticStatus::ERROR) { + ROS_ERROR("Camera diagnostics error: %s", status.message.c_str()); + restart(); + } + } + } +} + +void Camera::start() { + ROS_INFO("Starting camera."); + if(!camRunning) { + onConfigure(); + } else { + ROS_INFO("Camera already running!."); + } +} + +void Camera::stop() { + ROS_INFO("Stopping camera."); + if(camRunning) { + for(const auto& node : daiNodes) { + node->closeQueues(); + } + daiNodes.clear(); + device.reset(); + pipeline.reset(); + camRunning = false; + } else { + ROS_INFO("Camera already stopped!"); + } +} + +void Camera::restart() { + ROS_INFO("Restarting camera"); + stop(); + start(); + if(camRunning) { + return; + } else { + ROS_ERROR("Restarting camera failed."); + } } void Camera::saveCalib() { @@ -61,28 +136,12 @@ bool Camera::savePipelineCB(Trigger::Request& /*req*/, Trigger::Response& res) { } bool Camera::startCB(Trigger::Request& /*req*/, Trigger::Response& res) { - ROS_INFO("Starting camera!"); - if(!camRunning) { - onConfigure(); - } else { - ROS_INFO("Camera already running!."); - } + start(); res.success = true; return true; } bool Camera::stopCB(Trigger::Request& /*req*/, Trigger::Response& res) { - ROS_INFO("Stopping camera!"); - if(camRunning) { - for(const auto& node : daiNodes) { - node->closeQueues(); - } - daiNodes.clear(); - device.reset(); - pipeline.reset(); - camRunning = false; - } else { - ROS_INFO("Camera already stopped!"); - } + stop(); res.success = true; return true; } @@ -192,14 +251,13 @@ void Camera::startDevice() { } else if(!usb_id.empty() && info.name == usb_id) { ROS_INFO("Connecting to the camera using USB ID: %s", usb_id.c_str()); if(info.state == X_LINK_UNBOOTED || info.state == X_LINK_BOOTLOADER) { - device = std::make_shared(info); + device = std::make_shared(info, speed); camRunning = true; } else if(info.state == X_LINK_BOOTED) { throw std::runtime_error("Device is already booted in different process."); } } else { - ROS_ERROR("Device info: MXID: %s, Name: %s", info.getMxId().c_str(), info.name.c_str()); - throw std::runtime_error("Unable to connect to the device, check if parameters match with given info."); + ROS_INFO("Ignoring device info: MXID: %s, Name: %s", info.getMxId().c_str(), info.name.c_str()); } } } diff --git a/depthai_ros_driver/src/dai_nodes/base_node.cpp b/depthai_ros_driver/src/dai_nodes/base_node.cpp index be076223..58d1335f 100644 --- a/depthai_ros_driver/src/dai_nodes/base_node.cpp +++ b/depthai_ros_driver/src/dai_nodes/base_node.cpp @@ -28,8 +28,31 @@ std::string BaseNode::getTFPrefix(const std::string& frameName) { prefix.erase(0, 1); return prefix; } -dai::Node::Input BaseNode::getInput(int /*linkType = 0*/) { +dai::Node::Input BaseNode::getInput(int /*linkType*/) { throw(std::runtime_error("getInput() not implemented")); } +void BaseNode::closeQueues() { + throw(std::runtime_error("closeQueues() not implemented")); +} + +void BaseNode::setNames() { + throw(std::runtime_error("setNames() not implemented")); +} + +void BaseNode::setXinXout(std::shared_ptr /*pipeline*/) { + throw(std::runtime_error("setXinXout() not implemented")); +} + +void BaseNode::setupQueues(std::shared_ptr /*device*/) { + throw(std::runtime_error("setupQueues() not implemented")); +} + +void BaseNode::link(dai::Node::Input /*in*/, int /*linkType = 0*/) { + throw(std::runtime_error("link() not implemented")); +} + +void BaseNode::updateParams(parametersConfig& /*config*/) { + return; +} } // namespace dai_nodes } // namespace depthai_ros_driver \ No newline at end of file diff --git a/depthai_ros_driver/src/dai_nodes/nn/segmentation.cpp b/depthai_ros_driver/src/dai_nodes/nn/segmentation.cpp index a798ab91..2e9b975c 100644 --- a/depthai_ros_driver/src/dai_nodes/nn/segmentation.cpp +++ b/depthai_ros_driver/src/dai_nodes/nn/segmentation.cpp @@ -66,7 +66,7 @@ void Segmentation::setupQueues(std::shared_ptr device) { *imageConverter, device, dai::CameraBoardSocket::CAM_A, imageManip->initialConfig.getResizeWidth(), imageManip->initialConfig.getResizeWidth())); ptPub = it.advertiseCamera(getName() + "/passthrough/image_raw", 1); - ptQ->addCallback(std::bind(sensor_helpers::imgCB, std::placeholders::_1, std::placeholders::_2, *imageConverter, ptPub, infoManager)); + ptQ->addCallback(std::bind(sensor_helpers::basicCameraPub, std::placeholders::_1, std::placeholders::_2, *imageConverter, ptPub, infoManager)); } } diff --git a/depthai_ros_driver/src/dai_nodes/sensors/feature_tracker.cpp b/depthai_ros_driver/src/dai_nodes/sensors/feature_tracker.cpp new file mode 100644 index 00000000..7bb4151c --- /dev/null +++ b/depthai_ros_driver/src/dai_nodes/sensors/feature_tracker.cpp @@ -0,0 +1,82 @@ +#include "depthai_ros_driver/dai_nodes/sensors/feature_tracker.hpp" + +#include "depthai/device/DataQueue.hpp" +#include "depthai/device/Device.hpp" +#include "depthai/pipeline/Pipeline.hpp" +#include "depthai/pipeline/node/FeatureTracker.hpp" +#include "depthai/pipeline/node/XLinkOut.hpp" +#include "depthai_bridge/TrackedFeaturesConverter.hpp" +#include "depthai_ros_driver/param_handlers/feature_tracker_param_handler.hpp" +#include "depthai_ros_driver/utils.hpp" +#include "depthai_ros_msgs/TrackedFeatures.h" +#include "ros/node_handle.h" + +namespace depthai_ros_driver { +namespace dai_nodes { +FeatureTracker::FeatureTracker(const std::string& daiNodeName, ros::NodeHandle node, std::shared_ptr pipeline) + : BaseNode(daiNodeName, node, pipeline) { + ROS_DEBUG("Creating node %s", daiNodeName.c_str()); + getParentName(daiNodeName); + setNames(); + featureNode = pipeline->create(); + ph = std::make_unique(node, daiNodeName); + ph->declareParams(featureNode); + setXinXout(pipeline); + ROS_DEBUG("Node %s created", daiNodeName.c_str()); +} +FeatureTracker::~FeatureTracker() = default; + +void FeatureTracker::getParentName(const std::string& fullName) { + auto endIdx = fullName.find("_"); + parentName = fullName.substr(0, endIdx); +} + +void FeatureTracker::setNames() { + featureQName = parentName + getName() + "_queue"; +} + +void FeatureTracker::setXinXout(std::shared_ptr pipeline) { + xoutFeature = pipeline->create(); + xoutFeature->setStreamName(featureQName); + featureNode->outputFeatures.link(xoutFeature->input); +} + +void FeatureTracker::setupQueues(std::shared_ptr device) { + featureQ = device->getOutputQueue(featureQName, ph->getParam("i_max_q_size"), false); + auto tfPrefix = getTFPrefix(parentName); + featureConverter = std::make_unique(tfPrefix + "_frame", ph->getParam("i_get_base_device_timestamp")); + featureConverter->setUpdateRosBaseTimeOnToRosMsg(ph->getParam("i_update_ros_base_time_on_ros_msg")); + + featurePub = getROSNode().advertise(getName() + "/tracked_features", 10); + featureQ->addCallback(std::bind(&FeatureTracker::featureQCB, this, std::placeholders::_1, std::placeholders::_2)); +} + +void FeatureTracker::closeQueues() { + featureQ->close(); +} + +void FeatureTracker::featureQCB(const std::string& /*name*/, const std::shared_ptr& data) { + auto featureData = std::dynamic_pointer_cast(data); + std::deque deq; + featureConverter->toRosMsg(featureData, deq); + while(deq.size() > 0) { + auto currMsg = deq.front(); + featurePub.publish(currMsg); + deq.pop_front(); + } +} + +void FeatureTracker::link(dai::Node::Input in, int /*linkType*/) { + featureNode->outputFeatures.link(in); +} + +dai::Node::Input FeatureTracker::getInput(int /*linkType*/) { + return featureNode->inputImage; +} + +void FeatureTracker::updateParams(parametersConfig& config) { + ph->setRuntimeParams(config); +} + +} // namespace dai_nodes +} // namespace depthai_ros_driver diff --git a/depthai_ros_driver/src/dai_nodes/sensors/mono.cpp b/depthai_ros_driver/src/dai_nodes/sensors/mono.cpp index 07e6b8b3..22d273bb 100644 --- a/depthai_ros_driver/src/dai_nodes/sensors/mono.cpp +++ b/depthai_ros_driver/src/dai_nodes/sensors/mono.cpp @@ -9,7 +9,9 @@ #include "depthai/pipeline/node/XLinkIn.hpp" #include "depthai/pipeline/node/XLinkOut.hpp" #include "depthai_bridge/ImageConverter.hpp" +#include "depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp" #include "depthai_ros_driver/param_handlers/sensor_param_handler.hpp" +#include "depthai_ros_driver/utils.hpp" #include "image_transport/camera_publisher.h" #include "image_transport/image_transport.h" #include "ros/node_handle.h" @@ -57,10 +59,18 @@ void Mono::setXinXout(std::shared_ptr pipeline) { void Mono::setupQueues(std::shared_ptr device) { if(ph->getParam("i_publish_topic")) { monoQ = device->getOutputQueue(monoQName, ph->getParam("i_max_q_size"), false); - auto tfPrefix = getTFPrefix(getName()); + auto tfPrefix = getTFPrefix( + utils::getSocketName(static_cast(ph->getParam("i_board_socket_id")), device->getConnectedCameraFeatures())); imageConverter = std::make_unique(tfPrefix + "_camera_optical_frame", false); imageConverter->setUpdateRosBaseTimeOnToRosMsg(ph->getParam("i_update_ros_base_time_on_ros_msg")); - monoPub = it.advertiseCamera(getName() + "/image_raw", 1); + if(ph->getParam("i_low_bandwidth")) { + imageConverter->convertFromBitstream(dai::RawImgFrame::Type::GRAY8); + } + if(ph->getParam("i_add_exposure_offset")) { + auto offset = static_cast(ph->getParam("i_exposure_offset")); + imageConverter->addExposureOffset(offset); + } + monoPubIT = it.advertiseCamera(getName() + "/image_raw", 1); infoManager = std::make_shared(ros::NodeHandle(getROSNode(), getName()), "/" + getName()); if(ph->getParam("i_calibration_file").empty()) { infoManager->setCameraInfo(sensor_helpers::getCalibInfo(*imageConverter, @@ -71,17 +81,13 @@ void Mono::setupQueues(std::shared_ptr device) { } else { infoManager->loadCameraInfo(ph->getParam("i_calibration_file")); } - if(ph->getParam("i_low_bandwidth")) { - monoQ->addCallback(std::bind(sensor_helpers::compressedImgCB, - std::placeholders::_1, - std::placeholders::_2, - *imageConverter, - monoPub, - infoManager, - dai::RawImgFrame::Type::GRAY8)); - } else { - monoQ->addCallback(std::bind(sensor_helpers::imgCB, std::placeholders::_1, std::placeholders::_2, *imageConverter, monoPub, infoManager)); - } + monoQ->addCallback(std::bind(sensor_helpers::cameraPub, + std::placeholders::_1, + std::placeholders::_2, + *imageConverter, + monoPubIT, + infoManager, + ph->getParam("i_enable_lazy_publisher"))); } controlQ = device->getInputQueue(controlQName); } diff --git a/depthai_ros_driver/src/dai_nodes/sensors/rgb.cpp b/depthai_ros_driver/src/dai_nodes/sensors/rgb.cpp index 0121c216..344c9903 100644 --- a/depthai_ros_driver/src/dai_nodes/sensors/rgb.cpp +++ b/depthai_ros_driver/src/dai_nodes/sensors/rgb.cpp @@ -11,6 +11,7 @@ #include "depthai_bridge/ImageConverter.hpp" #include "depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp" #include "depthai_ros_driver/param_handlers/sensor_param_handler.hpp" +#include "depthai_ros_driver/utils.hpp" #include "image_transport/camera_publisher.h" #include "image_transport/image_transport.h" #include "ros/node_handle.h" @@ -21,7 +22,7 @@ RGB::RGB(const std::string& daiNodeName, ros::NodeHandle node, std::shared_ptr pipeline, dai::CameraBoardSocket socket = dai::CameraBoardSocket::CAM_A, - sensor_helpers::ImageSensor sensor = {"IMX378", {"12mp", "4k"}, true}, + sensor_helpers::ImageSensor sensor = {"IMX378", "4k", {"12mp", "4k"}, true}, bool publish = true) : BaseNode(daiNodeName, node, pipeline), it(node) { ROS_DEBUG("Creating node %s", daiNodeName.c_str()); @@ -69,10 +70,18 @@ void RGB::setXinXout(std::shared_ptr pipeline) { void RGB::setupQueues(std::shared_ptr device) { if(ph->getParam("i_publish_topic")) { - auto tfPrefix = getTFPrefix(getName()); + auto tfPrefix = getTFPrefix( + utils::getSocketName(static_cast(ph->getParam("i_board_socket_id")), device->getConnectedCameraFeatures())); infoManager = std::make_shared(ros::NodeHandle(getROSNode(), getName()), "/" + getName()); imageConverter = std::make_unique(tfPrefix + "_camera_optical_frame", false); imageConverter->setUpdateRosBaseTimeOnToRosMsg(ph->getParam("i_update_ros_base_time_on_ros_msg")); + if(ph->getParam("i_low_bandwidth")) { + imageConverter->convertFromBitstream(dai::RawImgFrame::Type::BGR888i); + } + if(ph->getParam("i_add_exposure_offset")) { + auto offset = static_cast(ph->getParam("i_exposure_offset")); + imageConverter->addExposureOffset(offset); + } if(ph->getParam("i_calibration_file").empty()) { infoManager->setCameraInfo(sensor_helpers::getCalibInfo(*imageConverter, device, @@ -82,23 +91,19 @@ void RGB::setupQueues(std::shared_ptr device) { } else { infoManager->loadCameraInfo(ph->getParam("i_calibration_file")); } - rgbPub = it.advertiseCamera(getName() + "/image_raw", 1); + rgbPubIT = it.advertiseCamera(getName() + "/image_raw", 1); colorQ = device->getOutputQueue(ispQName, ph->getParam("i_max_q_size"), false); - if(ph->getParam("i_low_bandwidth")) { - colorQ->addCallback(std::bind(sensor_helpers::compressedImgCB, - std::placeholders::_1, - std::placeholders::_2, - *imageConverter, - rgbPub, - infoManager, - dai::RawImgFrame::Type::BGR888i)); - } else { - colorQ->addCallback(std::bind(sensor_helpers::imgCB, std::placeholders::_1, std::placeholders::_2, *imageConverter, rgbPub, infoManager)); - } + colorQ->addCallback(std::bind(sensor_helpers::cameraPub, + std::placeholders::_1, + std::placeholders::_2, + *imageConverter, + rgbPubIT, + infoManager, + ph->getParam("i_enable_lazy_publisher"))); } if(ph->getParam("i_enable_preview")) { previewQ = device->getOutputQueue(previewQName, ph->getParam("i_max_q_size"), false); - previewPub = it.advertiseCamera(getName() + "/preview/image_raw", 1); + previewPubIT = it.advertiseCamera(getName() + "/preview/image_raw", 1); previewInfoManager = std::make_shared(ros::NodeHandle(getROSNode(), "/" + previewQName), previewQName); auto tfPrefix = getTFPrefix(getName()); imageConverter = std::make_unique(tfPrefix + "_camera_optical_frame", false); @@ -111,7 +116,8 @@ void RGB::setupQueues(std::shared_ptr device) { } else { infoManager->loadCameraInfo(ph->getParam("i_calibration_file")); } - previewQ->addCallback(std::bind(sensor_helpers::imgCB, std::placeholders::_1, std::placeholders::_2, *imageConverter, previewPub, previewInfoManager)); + previewQ->addCallback( + std::bind(sensor_helpers::basicCameraPub, std::placeholders::_1, std::placeholders::_2, *imageConverter, previewPubIT, previewInfoManager)); }; controlQ = device->getInputQueue(controlQName); } diff --git a/depthai_ros_driver/src/dai_nodes/sensors/sensor_helpers.cpp b/depthai_ros_driver/src/dai_nodes/sensors/sensor_helpers.cpp index eed39a0f..2d183c9b 100644 --- a/depthai_ros_driver/src/dai_nodes/sensors/sensor_helpers.cpp +++ b/depthai_ros_driver/src/dai_nodes/sensors/sensor_helpers.cpp @@ -8,114 +8,46 @@ namespace depthai_ros_driver { namespace dai_nodes { namespace sensor_helpers { -void ImageSensor::getSizeFromResolution(const dai::ColorCameraProperties::SensorResolution& res, int& width, int& height) { - switch(res) { - case dai::ColorCameraProperties::SensorResolution::THE_720_P: { - width = 1280; - height = 720; - break; - } - case dai::ColorCameraProperties::SensorResolution::THE_800_P: { - width = 1280; - height = 800; - break; - } - case dai::ColorCameraProperties::SensorResolution::THE_1080_P: { - width = 1920; - height = 1080; - break; - } - case dai::ColorCameraProperties::SensorResolution::THE_4_K: { - width = 3840; - height = 2160; - break; - } - case dai::ColorCameraProperties::SensorResolution::THE_12_MP: { - height = 4056; - width = 3040; - break; - } - case dai::ColorCameraProperties::SensorResolution::THE_1200_P: { - height = 1920; - width = 1200; - break; - } - case dai::ColorCameraProperties::SensorResolution::THE_5_MP: { - height = 2592; - width = 1944; - break; - } - case dai::ColorCameraProperties::SensorResolution::THE_13_MP: { - height = 4208; - width = 3120; - break; - } - case dai::ColorCameraProperties::SensorResolution::THE_4000X3000: { - height = 4000; - width = 3000; - break; - } - case dai::ColorCameraProperties::SensorResolution::THE_5312X6000: { - height = 5312; - width = 6000; - break; - } - case dai::ColorCameraProperties::SensorResolution::THE_48_MP: { - height = 8000; - width = 6000; - break; - } - default: { - throw std::runtime_error("Resolution not supported!"); - } - } -} -std::vector availableSensors = { - {"IMX378", {"12mp", "4k"}, true}, - {"OV9282", {"800P", "720p", "400p"}, false}, - {"OV9782", {"800P", "720p", "400p"}, true}, - {"OV9281", {"800P", "720p", "400p"}, true}, - {"IMX214", {"13mp", "12mp", "4k", "1080p"}, true}, - {"IMX412", {"13mp", "12mp", "4k", "1080p"}, true}, - {"OV7750", {"480P", "400p"}, false}, - {"OV7251", {"480P", "400p"}, false}, - {"IMX477", {"12mp", "4k", "1080p"}, true}, - {"IMX577", {"12mp", "4k", "1080p"}, true}, - {"AR0234", {"1200P"}, true}, - {"IMX582", {"48mp", "12mp", "4k"}, true}, - {"LCM48", {"48mp", "12mp", "4k"}, true}, -}; -void compressedImgCB(const std::string& /*name*/, - const std::shared_ptr& data, - dai::ros::ImageConverter& converter, - image_transport::CameraPublisher& pub, - std::shared_ptr infoManager, - dai::RawImgFrame::Type dataType) { - auto img = std::dynamic_pointer_cast(data); - std::deque deq; - auto info = infoManager->getCameraInfo(); - converter.toRosMsgFromBitStream(img, deq, dataType, info); - while(deq.size() > 0) { - auto currMsg = deq.front(); - info.header = currMsg.header; - pub.publish(currMsg, info); - deq.pop_front(); +std::vector availableSensors{{"IMX378", "1080p", {"12mp", "4k", "1080p"}, true}, + {"OV9282", "800p", {"800p", "720p", "400p"}, false}, + {"OV9782", "800p", {"800p", "720p", "400p"}, true}, + {"OV9281", "800p", {"800p", "720p", "400p"}, true}, + {"IMX214", "1080p", {"13mp", "12mp", "4k", "1080p"}, true}, + {"IMX412", "1080p", {"13mp", "12mp", "4k", "1080p"}, true}, + {"OV7750", "480p", {"480p", "400p"}, false}, + {"OV7251", "480p", {"480p", "400p"}, false}, + {"IMX477", "1080p", {"12mp", "4k", "1080p"}, true}, + {"IMX577", "1080p", {"12mp", "4k", "1080p"}, true}, + {"AR0234", "1200p", {"1200p"}, true}, + {"IMX582", "4k", {"48mp", "12mp", "4k"}, true}, + {"LCM48", "4k", {"48mp", "12mp", "4k"}, true}}; + +void basicCameraPub(const std::string& /*name*/, + const std::shared_ptr& data, + dai::ros::ImageConverter& converter, + image_transport::CameraPublisher& pub, + std::shared_ptr infoManager) { + if(ros::ok() && (pub.getNumSubscribers() > 0)) { + auto img = std::dynamic_pointer_cast(data); + auto info = infoManager->getCameraInfo(); + auto rawMsg = converter.toRosMsgRawPtr(img); + info.header = rawMsg.header; + pub.publish(rawMsg, info); } } -void imgCB(const std::string& /*name*/, - const std::shared_ptr& data, - dai::ros::ImageConverter& converter, - image_transport::CameraPublisher& pub, - std::shared_ptr infoManager) { - auto img = std::dynamic_pointer_cast(data); - std::deque deq; - auto info = infoManager->getCameraInfo(); - converter.toRosMsg(img, deq); - while(deq.size() > 0) { - auto currMsg = deq.front(); - info.header = currMsg.header; - pub.publish(currMsg, info); - deq.pop_front(); + +void cameraPub(const std::string& /*name*/, + const std::shared_ptr& data, + dai::ros::ImageConverter& converter, + image_transport::CameraPublisher& pub, + std::shared_ptr infoManager, + bool lazyPub) { + if(ros::ok() && (!lazyPub || pub.getNumSubscribers() > 0)) { + auto img = std::dynamic_pointer_cast(data); + auto info = infoManager->getCameraInfo(); + auto rawMsg = converter.toRosMsgRawPtr(img, info); + info.header = rawMsg.header; + pub.publish(rawMsg, info); } } sensor_msgs::CameraInfo getCalibInfo( diff --git a/depthai_ros_driver/src/dai_nodes/sensors/sensor_wrapper.cpp b/depthai_ros_driver/src/dai_nodes/sensors/sensor_wrapper.cpp index 98e29d36..7b6f1136 100644 --- a/depthai_ros_driver/src/dai_nodes/sensors/sensor_wrapper.cpp +++ b/depthai_ros_driver/src/dai_nodes/sensors/sensor_wrapper.cpp @@ -4,6 +4,7 @@ #include "depthai/pipeline/Pipeline.hpp" #include "depthai/pipeline/node/XLinkIn.hpp" #include "depthai_bridge/ImageConverter.hpp" +#include "depthai_ros_driver/dai_nodes/sensors/feature_tracker.hpp" #include "depthai_ros_driver/dai_nodes/sensors/mono.hpp" #include "depthai_ros_driver/dai_nodes/sensors/rgb.hpp" #include "depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp" @@ -57,6 +58,10 @@ SensorWrapper::SensorWrapper(const std::string& daiNodeName, sensorNode = std::make_unique(daiNodeName, node, pipeline, socket, (*sensorIt), publish); } } + if(ph->getParam("i_enable_feature_tracker")) { + featureTrackerNode = std::make_unique(daiNodeName + std::string("_feature_tracker"), node, pipeline); + sensorNode->link(featureTrackerNode->getInput()); + } ROS_DEBUG("Base node %s created", daiNodeName.c_str()); } SensorWrapper::~SensorWrapper() = default; @@ -88,6 +93,9 @@ void SensorWrapper::setupQueues(std::shared_ptr device) { if(!ph->getParam("i_disable_node")) { sensorNode->setupQueues(device); } + if(ph->getParam("i_enable_feature_tracker")) { + featureTrackerNode->setupQueues(device); + } } void SensorWrapper::closeQueues() { if(ph->getParam("i_simulate_from_topic")) { @@ -96,6 +104,9 @@ void SensorWrapper::closeQueues() { if(!ph->getParam("i_disable_node")) { sensorNode->closeQueues(); } + if(ph->getParam("i_enable_feature_tracker")) { + featureTrackerNode->closeQueues(); + } } void SensorWrapper::link(dai::Node::Input in, int linkType) { diff --git a/depthai_ros_driver/src/dai_nodes/stereo.cpp b/depthai_ros_driver/src/dai_nodes/stereo.cpp index 55c2c6cf..08cb902e 100644 --- a/depthai_ros_driver/src/dai_nodes/stereo.cpp +++ b/depthai_ros_driver/src/dai_nodes/stereo.cpp @@ -9,6 +9,7 @@ #include "depthai/pipeline/node/VideoEncoder.hpp" #include "depthai/pipeline/node/XLinkOut.hpp" #include "depthai_bridge/ImageConverter.hpp" +#include "depthai_ros_driver/dai_nodes/sensors/feature_tracker.hpp" #include "depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp" #include "depthai_ros_driver/dai_nodes/sensors/sensor_wrapper.hpp" #include "depthai_ros_driver/param_handlers/stereo_param_handler.hpp" @@ -22,17 +23,27 @@ Stereo::Stereo(const std::string& daiNodeName, ros::NodeHandle node, std::shared_ptr pipeline, std::shared_ptr device, - StereoSensorInfo leftInfo, - StereoSensorInfo rightInfo) - : BaseNode(daiNodeName, node, pipeline), it(node), leftSensInfo(leftInfo), rightSensInfo(rightInfo) { + dai::CameraBoardSocket leftSocket, + dai::CameraBoardSocket rightSocket) + : BaseNode(daiNodeName, node, pipeline), it(node) { ROS_DEBUG("Creating node %s", daiNodeName.c_str()); setNames(); - stereoCamNode = pipeline->create(); - left = std::make_unique(leftInfo.name, node, pipeline, device, leftInfo.socket, false); - right = std::make_unique(rightInfo.name, node, pipeline, device, rightInfo.socket, false); - ph = std::make_unique(node, daiNodeName); - ph->declareParams(stereoCamNode, rightInfo.name); + ph->updateSocketsFromParams(leftSocket, rightSocket); + auto features = device->getConnectedCameraFeatures(); + for(auto f : features) { + if(f.socket == leftSocket) { + leftSensInfo = f; + } else if(f.socket == rightSocket) { + rightSensInfo = f; + } else { + continue; + } + } + stereoCamNode = pipeline->create(); + left = std::make_unique(leftSensInfo.name, node, pipeline, device, leftSensInfo.socket, false); + right = std::make_unique(rightSensInfo.name, node, pipeline, device, rightSensInfo.socket, false); + ph->declareParams(stereoCamNode, features); setXinXout(pipeline); left->link(stereoCamNode->left); right->link(stereoCamNode->right); @@ -49,6 +60,7 @@ void Stereo::setXinXout(std::shared_ptr pipeline) { if(ph->getParam("i_publish_topic")) { xoutStereo = pipeline->create(); xoutStereo->setStreamName(stereoQName); + xoutStereo->input.setBlocking(false); if(ph->getParam("i_low_bandwidth")) { stereoEnc = sensor_helpers::createEncoder(pipeline, ph->getParam("i_low_bandwidth_quality")); stereoCamNode->disparity.link(stereoEnc->input); @@ -61,7 +73,7 @@ void Stereo::setXinXout(std::shared_ptr pipeline) { } } } - if(ph->getParam("i_publish_left_rect")) { + if(ph->getParam("i_publish_left_rect") || ph->getParam("i_publish_synced_rect_pair")) { xoutLeftRect = pipeline->create(); xoutLeftRect->setStreamName(leftRectQName); if(ph->getParam("i_left_rect_low_bandwidth")) { @@ -73,7 +85,7 @@ void Stereo::setXinXout(std::shared_ptr pipeline) { } } - if(ph->getParam("i_publish_right_rect")) { + if(ph->getParam("i_publish_right_rect") || ph->getParam("i_publish_synced_rect_pair")) { xoutRightRect = pipeline->create(); xoutRightRect->setStreamName(rightRectQName); if(ph->getParam("i_right_rect_low_bandwidth")) { @@ -84,57 +96,67 @@ void Stereo::setXinXout(std::shared_ptr pipeline) { stereoCamNode->rectifiedRight.link(xoutRightRect->input); } } + + if(ph->getParam("i_left_rect_enable_feature_tracker")) { + featureTrackerLeftR = std::make_unique(leftSensInfo.name + std::string("_rect_feature_tracker"), getROSNode(), pipeline); + + stereoCamNode->rectifiedLeft.link(featureTrackerLeftR->getInput()); + } + + if(ph->getParam("i_right_rect_enable_feature_tracker")) { + featureTrackerRightR = std::make_unique(rightSensInfo.name + std::string("_rect_feature_tracker"), getROSNode(), pipeline); + stereoCamNode->rectifiedRight.link(featureTrackerRightR->getInput()); + } } -void Stereo::setupLeftRectQueue(std::shared_ptr device) { - leftRectQ = device->getOutputQueue(leftRectQName, ph->getOtherNodeParam(leftSensInfo.name, "i_max_q_size"), false); - auto tfPrefix = getTFPrefix(leftSensInfo.name); - leftRectConv = std::make_unique(tfPrefix + "_camera_optical_frame", false, ph->getParam("i_get_base_device_timestamp")); - leftRectConv->setUpdateRosBaseTimeOnToRosMsg(ph->getParam("i_left_rect_update_ros_base_time_on_ros_msg")); - leftRectIM = std::make_shared(ros::NodeHandle(getROSNode(), leftSensInfo.name), "/" + leftSensInfo.name + "/rect"); - auto info = sensor_helpers::getCalibInfo(*leftRectConv, - device, - leftSensInfo.socket, - ph->getOtherNodeParam(leftSensInfo.name, "i_width"), - ph->getOtherNodeParam(leftSensInfo.name, "i_height")); - leftRectIM->setCameraInfo(info); - leftRectPub = it.advertiseCamera(leftSensInfo.name + "/image_rect", 1); - dai::RawImgFrame::Type encType = dai::RawImgFrame::Type::GRAY8; - if(left->getSensorData().color) { - encType = dai::RawImgFrame::Type::BGR888i; - } - if(ph->getParam("i_left_rect_low_bandwidth")) { - leftRectQ->addCallback( - std::bind(sensor_helpers::compressedImgCB, std::placeholders::_1, std::placeholders::_2, *leftRectConv, leftRectPub, leftRectIM, encType)); - } else { - leftRectQ->addCallback(std::bind(sensor_helpers::imgCB, std::placeholders::_1, std::placeholders::_2, *leftRectConv, leftRectPub, leftRectIM)); +void Stereo::setupRectQueue(std::shared_ptr device, + dai::CameraFeatures& sensorInfo, + const std::string& queueName, + std::unique_ptr& conv, + std::shared_ptr& im, + std::shared_ptr& q, + image_transport::CameraPublisher& pubIT, + bool isLeft) { + auto tfPrefix = getTFPrefix(sensorInfo.name); + conv = std::make_unique(tfPrefix + "_camera_optical_frame", false, ph->getParam("i_get_base_device_timestamp")); + conv->setUpdateRosBaseTimeOnToRosMsg(ph->getParam("i_update_ros_base_time_on_ros_msg")); + bool lowBandwidth = ph->getParam(isLeft ? "i_left_rect_low_bandwidth" : "i_right_rect_low_bandwidth"); + if(lowBandwidth) { + conv->convertFromBitstream(dai::RawImgFrame::Type::GRAY8); + } + bool addOffset = ph->getParam(isLeft ? "i_left_rect_add_exposure_offset" : "i_right_rect_add_exposure_offset"); + if(addOffset) { + auto offset = static_cast(ph->getParam(isLeft ? "i_left_rect_exposure_offset" : "i_right_rect_exposure_offset")); + conv->addExposureOffset(offset); + } + im = std::make_shared(ros::NodeHandle(getROSNode(), sensorInfo.name), "/" + sensorInfo.name + "/rect"); + + auto info = sensor_helpers::getCalibInfo( + *conv, device, sensorInfo.socket, ph->getOtherNodeParam(sensorInfo.name, "i_width"), ph->getOtherNodeParam(sensorInfo.name, "i_height")); + for(auto& d : info.D) { + d = 0.0; + } + + im->setCameraInfo(info); + + q = device->getOutputQueue(queueName, ph->getOtherNodeParam(sensorInfo.name, "i_max_q_size"), false); + + // if publish synced pair is set to true then we skip individual publishing of left and right rectified frames + bool addCallback = !ph->getParam("i_publish_synced_rect_pair"); + + pubIT = it.advertiseCamera(sensorInfo.name + "/image_rect", 1); + if(addCallback) { + q->addCallback(std::bind( + sensor_helpers::cameraPub, std::placeholders::_1, std::placeholders::_2, *conv, pubIT, im, ph->getParam("i_enable_lazy_publisher"))); } } +void Stereo::setupLeftRectQueue(std::shared_ptr device) { + setupRectQueue(device, leftSensInfo, leftRectQName, leftRectConv, leftRectIM, leftRectQ, leftRectPubIT, true); +} + void Stereo::setupRightRectQueue(std::shared_ptr device) { - rightRectQ = device->getOutputQueue(rightRectQName, ph->getOtherNodeParam(rightSensInfo.name, "i_max_q_size"), false); - auto tfPrefix = getTFPrefix(rightSensInfo.name); - rightRectConv = std::make_unique(tfPrefix + "_camera_optical_frame", false, ph->getParam("i_get_base_device_timestamp")); - rightRectConv->setUpdateRosBaseTimeOnToRosMsg(ph->getParam("i_right_rect_update_ros_base_time_on_ros_msg")); - rightRectIM = - std::make_shared(ros::NodeHandle(getROSNode(), rightSensInfo.name), "/" + rightSensInfo.name + "/rect"); - auto info = sensor_helpers::getCalibInfo(*rightRectConv, - device, - rightSensInfo.socket, - ph->getOtherNodeParam(rightSensInfo.name, "i_width"), - ph->getOtherNodeParam(rightSensInfo.name, "i_height")); - rightRectIM->setCameraInfo(info); - rightRectPub = it.advertiseCamera(rightSensInfo.name + "/image_rect", 1); - dai::RawImgFrame::Type encType = dai::RawImgFrame::Type::GRAY8; - if(right->getSensorData().color) { - encType = dai::RawImgFrame::Type::BGR888i; - } - if(ph->getParam("i_right_rect_low_bandwidth")) { - rightRectQ->addCallback( - std::bind(sensor_helpers::compressedImgCB, std::placeholders::_1, std::placeholders::_2, *rightRectConv, rightRectPub, rightRectIM, encType)); - } else { - rightRectQ->addCallback(std::bind(sensor_helpers::imgCB, std::placeholders::_1, std::placeholders::_2, *rightRectConv, rightRectPub, rightRectIM)); - } + setupRectQueue(device, rightSensInfo, rightRectQName, rightRectConv, rightRectIM, rightRectQ, rightRectPubIT, false); } void Stereo::setupStereoQueue(std::shared_ptr device) { @@ -147,7 +169,17 @@ void Stereo::setupStereoQueue(std::shared_ptr device) { } stereoConv = std::make_unique(tfPrefix + "_camera_optical_frame", false); stereoConv->setUpdateRosBaseTimeOnToRosMsg(ph->getParam("i_update_ros_base_time_on_ros_msg")); - stereoPub = it.advertiseCamera(getName() + "/image_raw", 1); + if(ph->getParam("i_low_bandwidth")) { + stereoConv->convertFromBitstream(dai::RawImgFrame::Type::RAW8); + } + if(!ph->getParam("i_output_disparity")) { + stereoConv->convertDispToDepth(); + } + if(ph->getParam("i_add_exposure_offset")) { + auto offset = static_cast(ph->getParam("i_exposure_offset")); + stereoConv->addExposureOffset(offset); + } + stereoIM = std::make_shared(ros::NodeHandle(getROSNode(), getName()), "/" + getName()); auto info = sensor_helpers::getCalibInfo(*stereoConv, device, @@ -155,28 +187,46 @@ void Stereo::setupStereoQueue(std::shared_ptr device) { ph->getParam("i_width"), ph->getParam("i_height")); auto calibHandler = device->readCalibration(); - info.P[3] = calibHandler.getBaselineDistance() * 10.0; // baseline in mm + + // remove distortion since image is rectified for(auto& d : info.D) { d = 0.0; } + for(auto& r : info.R) { + r = 0.0; + } + info.R[0] = info.R[4] = info.R[8] = 1.0; + info.P[3] = calibHandler.getBaselineDistance(leftSensInfo.socket, rightSensInfo.socket); stereoIM->setCameraInfo(info); - if(ph->getParam("i_low_bandwidth")) { - if(ph->getParam("i_output_disparity")) { - stereoQ->addCallback(std::bind(sensor_helpers::compressedImgCB, - std::placeholders::_1, - std::placeholders::_2, - *stereoConv, - stereoPub, - stereoIM, - dai::RawImgFrame::Type::GRAY8)); - } else { - // converting disp->depth - stereoQ->addCallback(std::bind( - sensor_helpers::compressedImgCB, std::placeholders::_1, std::placeholders::_2, *stereoConv, stereoPub, stereoIM, dai::RawImgFrame::Type::RAW8)); - } + stereoPubIT = it.advertiseCamera(getName() + "/image_raw", 1); + stereoQ->addCallback(std::bind(sensor_helpers::cameraPub, + std::placeholders::_1, + std::placeholders::_2, + *stereoConv, + stereoPubIT, + stereoIM, + ph->getParam("i_enable_lazy_publisher"))); +} + +void Stereo::syncTimerCB() { + auto left = leftRectQ->get(); + auto right = rightRectQ->get(); + if(left->getSequenceNum() != right->getSequenceNum()) { + ROS_WARN("Left and right rectified frames are not synchronized!"); } else { - stereoQ->addCallback(std::bind(sensor_helpers::imgCB, std::placeholders::_1, std::placeholders::_2, *stereoConv, stereoPub, stereoIM)); + bool lazyPub = ph->getParam("i_enable_lazy_publisher"); + if(ros::ok() && (!lazyPub || leftRectPubIT.getNumSubscribers() > 0 || rightRectPubIT.getNumSubscribers() > 0)) { + auto leftInfo = leftRectIM->getCameraInfo(); + auto leftRawMsg = leftRectConv->toRosMsgRawPtr(left); + leftInfo.header = leftRawMsg.header; + auto rightInfo = rightRectIM->getCameraInfo(); + auto rightRawMsg = rightRectConv->toRosMsgRawPtr(right); + rightRawMsg.header.stamp = leftRawMsg.header.stamp; + rightInfo.header = rightRawMsg.header; + leftRectPubIT.publish(leftRawMsg, leftInfo); + rightRectPubIT.publish(rightRawMsg, rightInfo); + } } } @@ -186,12 +236,23 @@ void Stereo::setupQueues(std::shared_ptr device) { if(ph->getParam("i_publish_topic")) { setupStereoQueue(device); } - if(ph->getParam("i_publish_left_rect")) { + if(ph->getParam("i_publish_left_rect") || ph->getParam("i_publish_synced_rect_pair")) { setupLeftRectQueue(device); } - if(ph->getParam("i_publish_right_rect")) { + if(ph->getParam("i_publish_right_rect") || ph->getParam("i_publish_synced_rect_pair")) { setupRightRectQueue(device); } + if(ph->getParam("i_publish_synced_rect_pair")) { + int timerPeriod = 1000.0 / ph->getOtherNodeParam(leftSensInfo.name, "i_fps"); + ROS_INFO("Setting up stereo pair sync timer with period %d ms based on left sensor FPS.", timerPeriod); + syncTimer = std::make_shared(getROSNode().createTimer(ros::Duration(1.0 / timerPeriod), std::bind(&Stereo::syncTimerCB, this))); + } + if(ph->getParam("i_left_rect_enable_feature_tracker")) { + featureTrackerLeftR->setupQueues(device); + } + if(ph->getParam("i_right_rect_enable_feature_tracker")) { + featureTrackerRightR->setupQueues(device); + } } void Stereo::closeQueues() { left->closeQueues(); @@ -199,12 +260,20 @@ void Stereo::closeQueues() { if(ph->getParam("i_publish_topic")) { stereoQ->close(); } - if(ph->getParam("i_publish_left_rect")) { + if(ph->getParam("i_publish_left_rect") || ph->getParam("i_publish_synced_rect_pair")) { + syncTimer.reset(); leftRectQ->close(); } - if(ph->getParam("i_publish_right_rect")) { + if(ph->getParam("i_publish_right_rect") || ph->getParam("i_publish_synced_rect_pair")) { + syncTimer.reset(); rightRectQ->close(); } + if(ph->getParam("i_left_rect_enable_feature_tracker")) { + featureTrackerLeftR->closeQueues(); + } + if(ph->getParam("i_right_rect_enable_feature_tracker")) { + featureTrackerRightR->closeQueues(); + } } void Stereo::link(dai::Node::Input in, int /*linkType*/) { diff --git a/depthai_ros_driver/src/dai_nodes/sys_logger.cpp b/depthai_ros_driver/src/dai_nodes/sys_logger.cpp new file mode 100644 index 00000000..fc587d1d --- /dev/null +++ b/depthai_ros_driver/src/dai_nodes/sys_logger.cpp @@ -0,0 +1,85 @@ +#include "depthai_ros_driver/dai_nodes/sys_logger.hpp" + +#include "depthai/device/DataQueue.hpp" +#include "depthai/device/Device.hpp" +#include "depthai/pipeline/Pipeline.hpp" +#include "depthai/pipeline/datatype/SystemInformation.hpp" +#include "depthai/pipeline/node/SystemLogger.hpp" +#include "depthai/pipeline/node/XLinkOut.hpp" +#include "ros/node_handle.h" + +namespace depthai_ros_driver { +namespace dai_nodes { +SysLogger::SysLogger(const std::string& daiNodeName, ros::NodeHandle node, std::shared_ptr pipeline) : BaseNode(daiNodeName, node, pipeline) { + ROS_DEBUG("Creating node %s", daiNodeName.c_str()); + setNames(); + sysNode = pipeline->create(); + setXinXout(pipeline); + ROS_DEBUG("Node %s created", daiNodeName.c_str()); +} +SysLogger::~SysLogger() = default; + +void SysLogger::setNames() { + loggerQName = getName() + "_queue"; +} + +void SysLogger::setXinXout(std::shared_ptr pipeline) { + xoutLogger = pipeline->create(); + xoutLogger->setStreamName(loggerQName); + sysNode->out.link(xoutLogger->input); +} + +void SysLogger::setupQueues(std::shared_ptr device) { + loggerQ = device->getOutputQueue(loggerQName, 8, false); + updater.reset(new diagnostic_updater::Updater()); + updater->setHardwareID(getROSNode().getNamespace() + std::string("_") + device->getMxId() + std::string("_") + device->getDeviceName()); + updater->add("sys_logger", std::bind(&SysLogger::produceDiagnostics, this, std::placeholders::_1)); + timer = getROSNode().createTimer(ros::Duration(1.0), std::bind(&SysLogger::timerCB, this)); +} +void SysLogger::timerCB() { + updater->update(); +} + +void SysLogger::closeQueues() { + loggerQ->close(); +} + +std::string SysLogger::sysInfoToString(const dai::SystemInformation& sysInfo) { + std::stringstream ss; + ss << "System Information: " << std::endl; + ss << " Leon CSS CPU Usage: " << sysInfo.leonCssCpuUsage.average * 100 << std::endl; + ss << " Leon MSS CPU Usage: " << sysInfo.leonMssCpuUsage.average * 100 << std::endl; + ss << " Ddr Memory Usage: " << sysInfo.ddrMemoryUsage.used / (1024.0f * 1024.0f) << std::endl; + ss << " Ddr Memory Total: " << sysInfo.ddrMemoryUsage.total / (1024.0f * 1024.0f) << std::endl; + ss << " Cmx Memory Usage: " << sysInfo.cmxMemoryUsage.used / (1024.0f * 1024.0f) << std::endl; + ss << " Cmx Memory Total: " << sysInfo.cmxMemoryUsage.total << std::endl; + ss << " Leon CSS Memory Usage: " << sysInfo.leonCssMemoryUsage.used / (1024.0f * 1024.0f) << std::endl; + ss << " Leon CSS Memory Total: " << sysInfo.leonCssMemoryUsage.total / (1024.0f * 1024.0f) << std::endl; + ss << " Leon MSS Memory Usage: " << sysInfo.leonMssMemoryUsage.used / (1024.0f * 1024.0f) << std::endl; + ss << " Leon MSS Memory Total: " << sysInfo.leonMssMemoryUsage.total / (1024.0f * 1024.0f) << std::endl; + ss << " Average Chip Temperature: " << sysInfo.chipTemperature.average << std::endl; + ss << " Leon CSS Chip Temperature: " << sysInfo.chipTemperature.css << std::endl; + ss << " Leon MSS Chip Temperature: " << sysInfo.chipTemperature.mss << std::endl; + ss << " UPA Chip Temperature: " << sysInfo.chipTemperature.upa << std::endl; + ss << " DSS Chip Temperature: " << sysInfo.chipTemperature.dss << std::endl; + + return ss.str(); +} + +void SysLogger::produceDiagnostics(diagnostic_updater::DiagnosticStatusWrapper& stat) { + try { + auto logData = loggerQ->tryGet(); + if(logData) { + stat.summary(diagnostic_msgs::DiagnosticStatus::OK, "System Information"); + stat.add("System Information", sysInfoToString(*logData)); + } else { + stat.summary(diagnostic_msgs::DiagnosticStatus::ERROR, "No Data"); + } + } catch(const std::exception& e) { + ROS_ERROR("No data on logger queue!"); + stat.summary(diagnostic_msgs::DiagnosticStatus::ERROR, e.what()); + } +} + +} // namespace dai_nodes +} // namespace depthai_ros_driver diff --git a/depthai_ros_driver/src/param_handlers/camera_param_handler.cpp b/depthai_ros_driver/src/param_handlers/camera_param_handler.cpp index d66b4626..3490c12a 100644 --- a/depthai_ros_driver/src/param_handlers/camera_param_handler.cpp +++ b/depthai_ros_driver/src/param_handlers/camera_param_handler.cpp @@ -22,15 +22,33 @@ dai::UsbSpeed CameraParamHandler::getUSBSpeed() { return utils::getValFromMap(getParam("i_usb_speed"), usbSpeedMap); } void CameraParamHandler::declareParams() { - getParam("i_pipeline_type"); - getParam("i_nn_type"); - getParam("i_enable_imu"); - getParam("i_enable_ir"); - getParam("i_usb_speed"); - getParam("i_mx_id"); - getParam("i_ip"); - getParam("i_laser_dot_brightness"); - getParam("i_floodlight_brightness"); + declareAndLogParam("i_pipeline_type", "RGBD"); + declareAndLogParam("i_nn_type", "spatial"); + declareAndLogParam("i_enable_imu", true); + declareAndLogParam("i_enable_ir", true); + declareAndLogParam("i_usb_speed", "SUPER_PLUS"); + declareAndLogParam("i_mx_id", ""); + declareAndLogParam("i_ip", ""); + declareAndLogParam("i_usb_port_id", ""); + declareAndLogParam("i_pipeline_dump", false); + declareAndLogParam("i_calibration_dump", false); + declareAndLogParam("i_external_calibration_path", ""); + declareAndLogParam("i_laser_dot_brightness", 800, getRangedIntDescriptor(0, 1200)); + declareAndLogParam("i_floodlight_brightness", 0, getRangedIntDescriptor(0, 1500)); + declareAndLogParam("i_publish_tf_from_calibration", false); + declareAndLogParam("i_tf_camera_name", "oak"); + declareAndLogParam("i_tf_camera_model", ""); + declareAndLogParam("i_tf_base_frame", "oak"); + declareAndLogParam("i_tf_parent_frame", "oak-d-base-frame"); + declareAndLogParam("i_tf_cam_pos_x", "0.0"); + declareAndLogParam("i_tf_cam_pos_y", "0.0"); + declareAndLogParam("i_tf_cam_pos_z", "0.0"); + declareAndLogParam("i_tf_cam_roll", "0.0"); + declareAndLogParam("i_tf_cam_pitch", "0.0"); + declareAndLogParam("i_tf_cam_yaw", "0.0"); + declareAndLogParam("i_tf_imu_from_descr", "false"); + declareAndLogParam("i_tf_custom_urdf_location", ""); + declareAndLogParam("i_tf_custom_xacro_args", ""); } dai::CameraControl CameraParamHandler::setRuntimeParams(parametersConfig& /*config*/) { dai::CameraControl ctrl; diff --git a/depthai_ros_driver/src/param_handlers/feature_tracker_param_handler.cpp b/depthai_ros_driver/src/param_handlers/feature_tracker_param_handler.cpp new file mode 100644 index 00000000..51d8280c --- /dev/null +++ b/depthai_ros_driver/src/param_handlers/feature_tracker_param_handler.cpp @@ -0,0 +1,29 @@ +#include "depthai_ros_driver/param_handlers/feature_tracker_param_handler.hpp" + +#include "depthai/pipeline/node/FeatureTracker.hpp" +#include "depthai_ros_driver/utils.hpp" +#include "ros/node_handle.h" + +namespace depthai_ros_driver { +namespace param_handlers { +FeatureTrackerParamHandler::FeatureTrackerParamHandler(ros::NodeHandle node, const std::string& name) : BaseParamHandler(node, name) {} +FeatureTrackerParamHandler::~FeatureTrackerParamHandler() = default; +void FeatureTrackerParamHandler::declareParams(std::shared_ptr featureTracker) { + declareAndLogParam("i_get_base_device_timestamp", false); + + featureTracker->setHardwareResources(declareAndLogParam("i_num_shaves", 2), declareAndLogParam("i_num_memory_slices", 2)); + motionEstMap = {{"LUCAS_KANADE_OPTICAL_FLOW", dai::FeatureTrackerConfig::MotionEstimator::Type::LUCAS_KANADE_OPTICAL_FLOW}, + {"HW_MOTION_ESTIMATION", dai::FeatureTrackerConfig::MotionEstimator::Type::HW_MOTION_ESTIMATION} + + }; + auto config = featureTracker->initialConfig.get(); + config.motionEstimator.type = (motionEstMap.at(declareAndLogParam("i_motion_estimator", "LUCAS_KANADE_OPTICAL_FLOW"))); + featureTracker->initialConfig.set(config); +} + +dai::CameraControl FeatureTrackerParamHandler::setRuntimeParams(parametersConfig& /*config*/) { + dai::CameraControl ctrl; + return ctrl; +} +} // namespace param_handlers +} // namespace depthai_ros_driver \ No newline at end of file diff --git a/depthai_ros_driver/src/param_handlers/imu_param_handler.cpp b/depthai_ros_driver/src/param_handlers/imu_param_handler.cpp index e38a1797..cb1dcd60 100644 --- a/depthai_ros_driver/src/param_handlers/imu_param_handler.cpp +++ b/depthai_ros_driver/src/param_handlers/imu_param_handler.cpp @@ -16,20 +16,29 @@ void ImuParamHandler::declareParams(std::shared_ptr imu, const s }; imuMessagetTypeMap = { {"IMU", imu::ImuMsgType::IMU}, {"IMU_WITH_MAG", imu::ImuMsgType::IMU_WITH_MAG}, {"IMU_WITH_MAG_SPLIT", imu::ImuMsgType::IMU_WITH_MAG_SPLIT}}; + declareAndLogParam("i_get_base_device_timestamp", false); + declareAndLogParam("i_max_q_size", 30); + declareAndLogParam("i_message_type", "IMU"); + declareAndLogParam("i_sync_method", "LINEAR_INTERPOLATE_ACCEL"); + declareAndLogParam("i_acc_cov", 0.0); + declareAndLogParam("i_gyro_cov", 0.0); + declareAndLogParam("i_rot_cov", -1.0); + declareAndLogParam("i_mag_cov", 0.0); + declareAndLogParam("i_update_ros_base_time_on_ros_msg", false); bool rotationAvailable = imuType == "BNO086"; - if(getParam("i_enable_rotation")) { + if(declareAndLogParam("i_enable_rotation", false)) { if(rotationAvailable) { - imu->enableIMUSensor(dai::IMUSensor::ROTATION_VECTOR, getParam("i_rotation_vec_freq", 400)); - imu->enableIMUSensor(dai::IMUSensor::MAGNETOMETER_CALIBRATED, getParam("i_magnetometer_freq", 100)); + imu->enableIMUSensor(dai::IMUSensor::ROTATION_VECTOR, declareAndLogParam("i_rot_freq", 400)); + imu->enableIMUSensor(dai::IMUSensor::MAGNETOMETER_CALIBRATED, declareAndLogParam("i_mag_freq", 100)); } else { ROS_ERROR("Rotation enabled but not available with current sensor"); - setParam("i_enable_rotation", false); + declareAndLogParam("i_enable_rotation", false, true); } } - imu->enableIMUSensor(dai::IMUSensor::ACCELEROMETER_RAW, getParam("i_acc_freq", 500)); - imu->enableIMUSensor(dai::IMUSensor::GYROSCOPE_RAW, getParam("i_gyro_freq", 400)); - imu->setBatchReportThreshold(getParam("i_batch_report_threshold", 5)); - imu->setMaxBatchReports(getParam("i_max_batch_reports", 20)); + imu->enableIMUSensor(dai::IMUSensor::ACCELEROMETER_RAW, declareAndLogParam("i_acc_freq", 400)); + imu->enableIMUSensor(dai::IMUSensor::GYROSCOPE_RAW, declareAndLogParam("i_gyro_freq", 400)); + imu->setBatchReportThreshold(declareAndLogParam("i_batch_report_threshold", 5)); + imu->setMaxBatchReports(declareAndLogParam("i_max_batch_reports", 10)); } dai::ros::ImuSyncMethod ImuParamHandler::getSyncMethod() { diff --git a/depthai_ros_driver/src/param_handlers/nn_param_handler.cpp b/depthai_ros_driver/src/param_handlers/nn_param_handler.cpp index 30eb38b8..a7da1a5b 100644 --- a/depthai_ros_driver/src/param_handlers/nn_param_handler.cpp +++ b/depthai_ros_driver/src/param_handlers/nn_param_handler.cpp @@ -24,7 +24,9 @@ NNParamHandler::NNParamHandler(ros::NodeHandle node, const std::string& name) : NNParamHandler::~NNParamHandler() = default; std::string NNParamHandler::getConfigPath() { std::string configPath = ros::package::getPath("depthai_ros_driver") + "/config/nn/"; - auto nnPath = getParam("i_nn_config_path"); + std::string default_nn_conf_name = "mobilenet.json"; + std::string default_path = configPath + default_nn_conf_name; + auto nnPath = declareAndLogParam("i_nn_config_path", default_path); if(nnPath == "depthai_ros_driver/yolo") { nnPath = configPath + "yolo.json"; } else if(nnPath == "depthai_ros_driver/segmentation") { @@ -52,6 +54,9 @@ nn::NNFamily NNParamHandler::getNNFamily() { void NNParamHandler::setNNParams(nlohmann::json data, std::shared_ptr /*nn*/) { if(data["mappings"].contains("labels")) { labels = data["mappings"]["labels"].get>(); + if(!labels.empty()) { + declareAndLogParam>("i_label_map", labels); + } } } @@ -72,6 +77,9 @@ void NNParamHandler::setNNParams(nlohmann::json data, std::shared_ptr>(); + if(!labels.empty()) { + declareAndLogParam>("i_label_map", labels); + } } setSpatialParams(nn); } @@ -98,6 +106,9 @@ void NNParamHandler::setNNParams(nlohmann::json data, std::shared_ptr>(); + if(!labels.empty()) { + declareAndLogParam>("i_label_map", labels); + } } if(data["nn_config"].contains("NN_specific_metadata")) { setYoloParams(data, nn); @@ -107,11 +118,12 @@ void NNParamHandler::setNNParams(nlohmann::json data, std::shared_ptr imageManip) { auto blob = dai::OpenVINO::Blob(model_path); auto firstInfo = blob.networkInputs.begin(); - auto inputSize = firstInfo->second.dims[0]; - - if(inputSize > 590) { + auto inputWidth = firstInfo->second.dims[0]; + auto inputHeight = firstInfo->second.dims[1]; + if(inputWidth > 590 || inputHeight > 590) { std::ostringstream stream; - stream << "Current network input size is too large to resize. Please set following parameters: rgb.i_preview_size: " << inputSize; + stream << "Current network input size is too large to resize. Please set following parameters: rgb.i_preview_width: " << inputWidth; + stream << ", rgb.i_preview_height: " << inputHeight; stream << " and nn.i_disable_resize to true"; throw std::runtime_error(stream.str()); } @@ -119,8 +131,8 @@ void NNParamHandler::setImageManip(const std::string& model_path, std::shared_pt imageManip->inputImage.setBlocking(false); imageManip->inputImage.setQueueSize(8); imageManip->setKeepAspectRatio(false); - ROS_INFO("NN input size: %d x %d. Resizing input image in case of different dimensions.", inputSize, inputSize); - imageManip->initialConfig.setResize(inputSize, inputSize); + ROS_INFO("NN input size: %d x %d. Resizing input image in case of different dimensions.", inputWidth, inputHeight); + imageManip->initialConfig.setResize(inputWidth, inputHeight); } std::string NNParamHandler::getModelPath(const nlohmann::json& data) { std::string modelPath; diff --git a/depthai_ros_driver/src/param_handlers/sensor_param_handler.cpp b/depthai_ros_driver/src/param_handlers/sensor_param_handler.cpp index 046765d3..e343c07b 100644 --- a/depthai_ros_driver/src/param_handlers/sensor_param_handler.cpp +++ b/depthai_ros_driver/src/param_handlers/sensor_param_handler.cpp @@ -10,9 +10,40 @@ namespace depthai_ros_driver { namespace param_handlers { -SensorParamHandler::SensorParamHandler(ros::NodeHandle node, const std::string& name) : BaseParamHandler(node, name) {} +SensorParamHandler::SensorParamHandler(ros::NodeHandle node, const std::string& name) : BaseParamHandler(node, name) { + declareCommonParams(); +} SensorParamHandler::~SensorParamHandler() = default; +void SensorParamHandler::declareCommonParams() { + declareAndLogParam("i_max_q_size", 30); + declareAndLogParam("i_low_bandwidth", false); + declareAndLogParam("i_low_bandwidth_quality", 50); + declareAndLogParam("i_calibration_file", ""); + declareAndLogParam("i_simulate_from_topic", false); + declareAndLogParam("i_simulated_topic_name", ""); + declareAndLogParam("i_disable_node", false); + declareAndLogParam("i_get_base_device_timestamp", false); + declareAndLogParam("i_board_socket_id", 0); + declareAndLogParam("i_update_ros_base_time_on_ros_msg", false); + declareAndLogParam("i_enable_feature_tracker", false); + declareAndLogParam("i_enable_lazy_publisher", true); + declareAndLogParam("i_add_exposure_offset", false); + declareAndLogParam("i_exposure_offset", 0); + fSyncModeMap = { + {"OFF", dai::CameraControl::FrameSyncMode::OFF}, + {"OUTPUT", dai::CameraControl::FrameSyncMode::OUTPUT}, + {"INPUT", dai::CameraControl::FrameSyncMode::INPUT}, + }; + cameraImageOrientationMap = { + {"NORMAL", dai::CameraImageOrientation::NORMAL}, + {"ROTATE_180_DEG", dai::CameraImageOrientation::ROTATE_180_DEG}, + {"AUTO", dai::CameraImageOrientation::AUTO}, + {"HORIZONTAL_MIRROR", dai::CameraImageOrientation::HORIZONTAL_MIRROR}, + {"VERTICAL_FLIP", dai::CameraImageOrientation::VERTICAL_FLIP}, + }; +} + void SensorParamHandler::declareParams(std::shared_ptr monoCam, dai::CameraBoardSocket socket, dai_nodes::sensor_helpers::ImageSensor, @@ -24,63 +55,76 @@ void SensorParamHandler::declareParams(std::shared_ptr mo {"800", dai::MonoCameraProperties::SensorResolution::THE_800_P}, {"1200", dai::MonoCameraProperties::SensorResolution::THE_1200_P}, }; - getParam("i_max_q_size", 30); - getParam("i_publish_topic", publish); - getParam("i_board_socket_id", static_cast(socket)); - + declareAndLogParam("i_board_socket_id", static_cast(socket), true); monoCam->setBoardSocket(socket); - monoCam->setFps(getParam("i_fps", 30.0)); - - monoCam->setResolution(utils::getValFromMap(getParam("i_resolution", "720"), monoResolutionMap)); - getParam("i_width"); - getParam("i_height"); - size_t iso = getParam("r_iso", 800); - size_t exposure = getParam("r_exposure", 1000); - - if(getParam("r_set_man_exposure", false)) { + monoCam->setFps(declareAndLogParam("i_fps", 30.0)); + declareAndLogParam("i_publish_topic", publish); + monoCam->setResolution(utils::getValFromMap(declareAndLogParam("i_resolution", "720"), monoResolutionMap)); + declareAndLogParam("i_width", monoCam->getResolutionWidth()); + declareAndLogParam("i_height", monoCam->getResolutionHeight()); + size_t iso = declareAndLogParam("r_iso", 800, getRangedIntDescriptor(100, 1600)); + size_t exposure = declareAndLogParam("r_exposure", 1000, getRangedIntDescriptor(1, 33000)); + + if(declareAndLogParam("r_set_man_exposure", false)) { monoCam->initialControl.setManualExposure(exposure, iso); } + if(declareAndLogParam("i_fsync_continuous", false)) { + monoCam->initialControl.setFrameSyncMode(utils::getValFromMap(declareAndLogParam("i_fsync_mode", "INPUT"), fSyncModeMap)); + } + if(declareAndLogParam("i_fsync_trigger", false)) { + monoCam->initialControl.setExternalTrigger(declareAndLogParam("i_num_frames_burst", 1), declareAndLogParam("i_num_frames_discard", 0)); + } + if(declareAndLogParam("i_set_isp3a_fps", false)) { + monoCam->setIsp3aFps(declareAndLogParam("i_isp3a_fps", 10)); + } + monoCam->setImageOrientation(utils::getValFromMap(declareAndLogParam("i_sensor_img_orientation", "NORMAL"), cameraImageOrientationMap)); } void SensorParamHandler::declareParams(std::shared_ptr colorCam, dai::CameraBoardSocket socket, dai_nodes::sensor_helpers::ImageSensor sensor, bool publish) { - rgbResolutionMap = {{"720", dai::ColorCameraProperties::SensorResolution::THE_720_P}, - {"1080", dai::ColorCameraProperties::SensorResolution::THE_1080_P}, - {"4K", dai::ColorCameraProperties::SensorResolution::THE_4_K}, - {"12MP", dai::ColorCameraProperties::SensorResolution::THE_12_MP}, - {"13MP", dai::ColorCameraProperties::SensorResolution::THE_13_MP}, - {"800", dai::ColorCameraProperties::SensorResolution::THE_800_P}, - {"1200", dai::ColorCameraProperties::SensorResolution::THE_1200_P}, + rgbResolutionMap = {{"720p", dai::ColorCameraProperties::SensorResolution::THE_720_P}, + {"1080p", dai::ColorCameraProperties::SensorResolution::THE_1080_P}, + {"4k", dai::ColorCameraProperties::SensorResolution::THE_4_K}, + {"12mp", dai::ColorCameraProperties::SensorResolution::THE_12_MP}, + {"13mp", dai::ColorCameraProperties::SensorResolution::THE_13_MP}, + {"800p", dai::ColorCameraProperties::SensorResolution::THE_800_P}, + {"1200p", dai::ColorCameraProperties::SensorResolution::THE_1200_P}, {"5MP", dai::ColorCameraProperties::SensorResolution::THE_5_MP}, {"4000x3000", dai::ColorCameraProperties::SensorResolution::THE_4000X3000}, {"5312X6000", dai::ColorCameraProperties::SensorResolution::THE_5312X6000}, - {"48_MP", dai::ColorCameraProperties::SensorResolution::THE_48_MP}, + {"48mp", dai::ColorCameraProperties::SensorResolution::THE_48_MP}, {"1440X1080", dai::ColorCameraProperties::SensorResolution::THE_1440X1080}}; - - getParam("i_max_q_size", 30); - getParam("i_publish_topic", publish); - getParam("i_enable_preview", false); - getParam("i_board_socket_id", static_cast(socket)); - + declareAndLogParam("i_publish_topic", publish); + declareAndLogParam("i_board_socket_id", static_cast(socket), true); + declareAndLogParam("i_output_isp", true); + declareAndLogParam("i_enable_preview", false); colorCam->setBoardSocket(socket); - colorCam->setFps(getParam("i_fps", 30.0)); - colorCam->setPreviewSize(getParam("i_preview_size", 300), getParam("i_preview_size", 300)); - auto resolution = utils::getValFromMap(getParam("i_resolution", "1080"), rgbResolutionMap); - int width, height; - colorCam->setResolution(resolution); - sensor.getSizeFromResolution(colorCam->getResolution(), width, height); + colorCam->setFps(declareAndLogParam("i_fps", 30.0)); + int preview_size = declareAndLogParam("i_preview_size", 300); + int preview_width = declareAndLogParam("i_preview_width", preview_size); + int preview_height = declareAndLogParam("i_preview_height", preview_size); + colorCam->setPreviewSize(preview_width, preview_height); + auto resString = declareAndLogParam("i_resolution", sensor.defaultResolution); + + // if resolution not in allowed resolutions, use default + if(std::find(sensor.allowedResolutions.begin(), sensor.allowedResolutions.end(), resString) == sensor.allowedResolutions.end()) { + ROS_WARN( + "Resolution %s not supported by sensor %s. Using default resolution %s", resString.c_str(), sensor.name.c_str(), sensor.defaultResolution.c_str()); + resString = sensor.defaultResolution; + } - colorCam->setInterleaved(getParam("i_interleaved", false)); - if(getParam("i_set_isp_scale", true)) { - int num = getParam("i_isp_num", 2); - int den = getParam("i_isp_den", 3); + colorCam->setResolution(utils::getValFromMap(resString, rgbResolutionMap)); + int width = colorCam->getResolutionWidth(); + int height = colorCam->getResolutionHeight(); + + colorCam->setInterleaved(declareAndLogParam("i_interleaved", false)); + if(declareAndLogParam("i_set_isp_scale", true)) { + int num = declareAndLogParam("i_isp_num", 2); + int den = declareAndLogParam("i_isp_den", 3); width = (width * num + den - 1) / den; height = (height * num + den - 1) / den; - if(width < getParam("i_preview_size") || height < getParam("i_preview_size")) { - throw std::runtime_error("ISP image size lower than preview size! Adjust preview size accordingly"); - } colorCam->setIspScale(num, den); if(width % 16 != 0 && height % 16 != 0) { std::stringstream err_stream; @@ -91,26 +135,31 @@ void SensorParamHandler::declareParams(std::shared_ptr c ROS_ERROR(err_stream.str().c_str()); } } - if(getParam("i_output_isp")) { - setParam("i_width", width); - setParam("i_height", height); - } - colorCam->setVideoSize(getParam("i_width", width), getParam("i_height", height)); - - colorCam->setPreviewKeepAspectRatio(getParam("i_keep_preview_aspect_ratio", true)); - size_t iso = getParam("r_iso", 800); - size_t exposure = getParam("r_exposure", 20000); - size_t whitebalance = getParam("r_whitebalance", 3300); - size_t focus = getParam("r_focus", 1); - if(getParam("r_set_man_focus", false)) { + colorCam->setVideoSize(declareAndLogParam("i_width", width), declareAndLogParam("i_height", height)); + colorCam->setPreviewKeepAspectRatio(declareAndLogParam("i_keep_preview_aspect_ratio", true)); + size_t iso = declareAndLogParam("r_iso", 800, getRangedIntDescriptor(100, 1600)); + size_t exposure = declareAndLogParam("r_exposure", 20000, getRangedIntDescriptor(1, 33000)); + size_t whitebalance = declareAndLogParam("r_whitebalance", 3300, getRangedIntDescriptor(1000, 12000)); + size_t focus = declareAndLogParam("r_focus", 1, getRangedIntDescriptor(0, 255)); + if(declareAndLogParam("r_set_man_focus", false)) { colorCam->initialControl.setManualFocus(focus); } - if(getParam("r_set_man_exposure", false)) { + if(declareAndLogParam("r_set_man_exposure", false)) { colorCam->initialControl.setManualExposure(exposure, iso); } - if(getParam("r_set_man_whitebalance", false)) { + if(declareAndLogParam("r_set_man_whitebalance", false)) { colorCam->initialControl.setManualWhiteBalance(whitebalance); } + if(declareAndLogParam("i_fsync_continuous", false)) { + colorCam->initialControl.setFrameSyncMode(utils::getValFromMap(declareAndLogParam("i_fsync_mode", "INPUT"), fSyncModeMap)); + } + if(declareAndLogParam("i_fsync_trigger", false)) { + colorCam->initialControl.setExternalTrigger(declareAndLogParam("i_num_frames_burst", 1), declareAndLogParam("i_num_frames_discard", 0)); + } + if(declareAndLogParam("i_set_isp3a_fps", false)) { + colorCam->setIsp3aFps(declareAndLogParam("i_isp3a_fps", 10)); + } + colorCam->setImageOrientation(utils::getValFromMap(declareAndLogParam("i_sensor_img_orientation", "NORMAL"), cameraImageOrientationMap)); } dai::CameraControl SensorParamHandler::setRuntimeParams(parametersConfig& config) { dai::CameraControl ctrl; diff --git a/depthai_ros_driver/src/param_handlers/stereo_param_handler.cpp b/depthai_ros_driver/src/param_handlers/stereo_param_handler.cpp index 7fe748cc..98755eb0 100644 --- a/depthai_ros_driver/src/param_handlers/stereo_param_handler.cpp +++ b/depthai_ros_driver/src/param_handlers/stereo_param_handler.cpp @@ -1,5 +1,6 @@ #include "depthai_ros_driver/param_handlers/stereo_param_handler.hpp" +#include "depthai-shared/common/CameraFeatures.hpp" #include "depthai/depthai.hpp" #include "depthai/pipeline/nodes.hpp" #include "depthai_ros_driver/utils.hpp" @@ -33,76 +34,130 @@ StereoParamHandler::StereoParamHandler(ros::NodeHandle node, const std::string& }; } +void StereoParamHandler::updateSocketsFromParams(dai::CameraBoardSocket& left, dai::CameraBoardSocket& right) { + int newLeftS = getParam("i_left_socket_id", static_cast(left)); + int newRightS = getParam("i_right_socket_id", static_cast(right)); + if(newLeftS != static_cast(left) || newRightS != static_cast(right)) { + ROS_WARN("Left or right socket changed, updating stereo node"); + ROS_WARN("Old left socket: %d, new left socket: %d", static_cast(left), newLeftS); + ROS_WARN("Old right socket: %d, new right socket: %d", static_cast(right), newRightS); + } + left = static_cast(newLeftS); + right = static_cast(newRightS); +} + StereoParamHandler::~StereoParamHandler() = default; -void StereoParamHandler::declareParams(std::shared_ptr stereo, const std::string& rightName) { - getParam("i_max_q_size"); - stereo->setLeftRightCheck(getParam("i_lr_check")); +void StereoParamHandler::declareParams(std::shared_ptr stereo, const std::vector& camFeatures) { + declareAndLogParam("i_max_q_size", 30); + declareAndLogParam("i_low_bandwidth", false); + declareAndLogParam("i_low_bandwidth_quality", 50); + declareAndLogParam("i_output_disparity", false); + declareAndLogParam("i_get_base_device_timestamp", false); + declareAndLogParam("i_update_ros_base_time_on_ros_msg", false); + declareAndLogParam("i_publish_topic", true); + declareAndLogParam("i_add_exposure_offset", false); + declareAndLogParam("i_exposure_offset", 0); + declareAndLogParam("i_enable_lazy_publisher", true); + + declareAndLogParam("i_publish_synced_rect_pair", false); + declareAndLogParam("i_publish_left_rect", false); + declareAndLogParam("i_left_rect_low_bandwidth", false); + declareAndLogParam("i_left_rect_low_bandwidth_quality", 50); + declareAndLogParam("i_left_rect_add_exposure_offset", false); + declareAndLogParam("i_left_rect_exposure_offset", 0); + declareAndLogParam("i_left_rect_enable_feature_tracker", false); + + declareAndLogParam("i_publish_right_rect", false); + declareAndLogParam("i_right_rect_low_bandwidth", false); + declareAndLogParam("i_right_rect_low_bandwidth_quality", 50); + declareAndLogParam("i_right_rect_enable_feature_tracker", false); + declareAndLogParam("i_right_rect_add_exposure_offset", false); + declareAndLogParam("i_right_rect_exposure_offset", 0); + + stereo->setLeftRightCheck(declareAndLogParam("i_lr_check", true)); int width = 1280; int height = 720; - dai::CameraBoardSocket socket = dai::CameraBoardSocket::CAM_A; - if(getParam("i_align_depth")) { - width = getOtherNodeParam("rgb", "i_width", width); - height = getOtherNodeParam("rgb", "i_height", height); - } else { - width = getOtherNodeParam(rightName, "i_width", width); - height = getOtherNodeParam(rightName, "i_height", height); - socket = dai::CameraBoardSocket::CAM_C; + auto socket = static_cast(declareAndLogParam("i_board_socket_id", static_cast(dai::CameraBoardSocket::CAM_A))); + std::string socketName; + if(declareAndLogParam("i_align_depth", true)) { + socketName = utils::getSocketName(socket, camFeatures); + width = getOtherNodeParam(socketName, "i_width"); + height = getOtherNodeParam(socketName, "i_height"); + declareAndLogParam("i_socket_name", socketName); + stereo->setDepthAlign(socket); } - stereo->setDepthAlign(socket); - setParam("i_width", width); - setParam("i_height", height); - setParam("i_board_socket_id", static_cast(socket)); - if(getParam("i_set_input_size")) { - stereo->setInputResolution(getParam("i_input_width"), getParam("i_input_height")); + + if(declareAndLogParam("i_set_input_size", false)) { + stereo->setInputResolution(declareAndLogParam("i_input_width", 1280), declareAndLogParam("i_input_height", 720)); } + width = declareAndLogParam("i_width", width); + height = declareAndLogParam("i_height", height); stereo->setOutputSize(width, height); + stereo->setDefaultProfilePreset(depthPresetMap.at(declareAndLogParam("i_depth_preset", "HIGH_ACCURACY"))); + if(declareAndLogParam("i_enable_distortion_correction", false)) { + stereo->enableDistortionCorrection(true); + } - stereo->setDefaultProfilePreset(depthPresetMap.at(getParam("i_depth_preset"))); - stereo->enableDistortionCorrection(getParam("i_enable_distortion_correction")); - - stereo->initialConfig.setBilateralFilterSigma(getParam("i_bilateral_sigma")); - stereo->initialConfig.setLeftRightCheckThreshold(getParam("i_lrc_threshold")); - stereo->initialConfig.setMedianFilter(static_cast(getParam("i_depth_filter_size"))); - stereo->initialConfig.setConfidenceThreshold(getParam("i_stereo_conf_threshold")); - if(getParam("i_subpixel", false)) { + stereo->initialConfig.setBilateralFilterSigma(declareAndLogParam("i_bilateral_sigma", 0)); + stereo->initialConfig.setLeftRightCheckThreshold(declareAndLogParam("i_lrc_threshold", 10)); + stereo->initialConfig.setMedianFilter(static_cast(declareAndLogParam("i_depth_filter_size", 5))); + stereo->initialConfig.setConfidenceThreshold(declareAndLogParam("i_stereo_conf_threshold", 240)); + if(declareAndLogParam("i_subpixel", false)) { stereo->initialConfig.setSubpixel(true); - stereo->initialConfig.setSubpixelFractionalBits(getParam("i_subpixel_fractional_bits")); + stereo->initialConfig.setSubpixelFractionalBits(declareAndLogParam("i_subpixel_fractional_bits", 3)); + } + stereo->setRectifyEdgeFillColor(declareAndLogParam("i_rectify_edge_fill_color", 0)); + if(declareAndLogParam("i_enable_alpha_scaling", false)) { + stereo->setAlphaScaling(declareAndLogParam("i_alpha_scaling", 0.0)); } - stereo->setRectifyEdgeFillColor(getParam("i_rectify_edge_fill_color")); auto config = stereo->initialConfig.get(); - config.costMatching.disparityWidth = utils::getValFromMap(getParam("i_disparity_width", "DISPARITY_96"), disparityWidthMap); - stereo->setExtendedDisparity(getParam("i_extended_disp")); - config.costMatching.enableCompanding = getParam("i_enable_companding", false); - config.postProcessing.temporalFilter.enable = getParam("i_enable_temporal_filter"); + config.costMatching.disparityWidth = utils::getValFromMap(declareAndLogParam("i_disparity_width", "DISPARITY_96"), disparityWidthMap); + stereo->setExtendedDisparity(declareAndLogParam("i_extended_disp", false)); + config.costMatching.enableCompanding = declareAndLogParam("i_enable_companding", false); + config.postProcessing.temporalFilter.enable = declareAndLogParam("i_enable_temporal_filter", false); if(config.postProcessing.temporalFilter.enable) { - config.postProcessing.temporalFilter.alpha = getParam("i_temporal_filter_alpha"); - config.postProcessing.temporalFilter.delta = getParam("i_temporal_filter_delta"); + config.postProcessing.temporalFilter.alpha = declareAndLogParam("i_temporal_filter_alpha", 0.4); + config.postProcessing.temporalFilter.delta = declareAndLogParam("i_temporal_filter_delta", 20); config.postProcessing.temporalFilter.persistencyMode = - utils::getValFromMap(getParam("i_temporal_filter_persistency"), temporalPersistencyMap); + utils::getValFromMap(declareAndLogParam("i_temporal_filter_persistency", "VALID_2_IN_LAST_4"), temporalPersistencyMap); } - config.postProcessing.speckleFilter.enable = getParam("i_enable_speckle_filter"); + config.postProcessing.speckleFilter.enable = declareAndLogParam("i_enable_speckle_filter", false); if(config.postProcessing.speckleFilter.enable) { - config.postProcessing.speckleFilter.speckleRange = getParam("i_speckle_filter_speckle_range"); + config.postProcessing.speckleFilter.speckleRange = declareAndLogParam("i_speckle_filter_speckle_range", 50); + } + if(declareAndLogParam("i_enable_disparity_shift", false)) { + config.algorithmControl.disparityShift = declareAndLogParam("i_disparity_shift", 0); } - config.postProcessing.spatialFilter.enable = getParam("i_enable_spatial_filter"); + config.postProcessing.spatialFilter.enable = declareAndLogParam("i_enable_spatial_filter", false); if(config.postProcessing.spatialFilter.enable) { - config.postProcessing.spatialFilter.holeFillingRadius = getParam("i_spatial_filter_hole_filling_radius"); - config.postProcessing.spatialFilter.alpha = getParam("i_spatial_filter_alpha"); - config.postProcessing.spatialFilter.delta = getParam("i_spatial_filter_delta"); - config.postProcessing.spatialFilter.numIterations = getParam("i_spatial_filter_iterations"); + config.postProcessing.spatialFilter.holeFillingRadius = declareAndLogParam("i_spatial_filter_hole_filling_radius", 2); + config.postProcessing.spatialFilter.alpha = declareAndLogParam("i_spatial_filter_alpha", 0.5); + config.postProcessing.spatialFilter.delta = declareAndLogParam("i_spatial_filter_delta", 20); + config.postProcessing.spatialFilter.numIterations = declareAndLogParam("i_spatial_filter_iterations", 1); } - if(getParam("i_enable_threshold_filter")) { - config.postProcessing.thresholdFilter.minRange = getParam("i_threshold_filter_min_range"); - config.postProcessing.thresholdFilter.maxRange = getParam("i_threshold_filter_max_range"); + if(declareAndLogParam("i_enable_threshold_filter", false)) { + config.postProcessing.thresholdFilter.minRange = declareAndLogParam("i_threshold_filter_min_range", 400); + config.postProcessing.thresholdFilter.maxRange = declareAndLogParam("i_threshold_filter_max_range", 15000); } - if(getParam("i_enable_brightness_filter")) { - config.postProcessing.brightnessFilter.minBrightness = getParam("i_brightness_filter_min_brightness"); - config.postProcessing.brightnessFilter.maxBrightness = getParam("i_brightness_filter_max_brightness"); + if(declareAndLogParam("i_enable_brightness_filter", false)) { + config.postProcessing.brightnessFilter.minBrightness = declareAndLogParam("i_brightness_filter_min_brightness", 0); + config.postProcessing.brightnessFilter.maxBrightness = declareAndLogParam("i_brightness_filter_max_brightness", 256); } - if(getParam("i_enable_decimation_filter")) { + if(declareAndLogParam("i_enable_decimation_filter", false)) { config.postProcessing.decimationFilter.decimationMode = - utils::getValFromMap(getParam("i_decimation_filter_decimation_mode"), decimationModeMap); - config.postProcessing.decimationFilter.decimationFactor = getParam("i_decimation_filter_decimation_factor"); + utils::getValFromMap(declareAndLogParam("i_decimation_filter_decimation_mode", "PIXEL_SKIPPING"), decimationModeMap); + config.postProcessing.decimationFilter.decimationFactor = declareAndLogParam("i_decimation_filter_decimation_factor", 1); + int decimatedWidth = width / config.postProcessing.decimationFilter.decimationFactor; + int decimatedHeight = height / config.postProcessing.decimationFilter.decimationFactor; + ROS_INFO("Decimation filter enabled with decimation factor %d. Previous width and height: %d x %d, after decimation: %d x %d", + config.postProcessing.decimationFilter.decimationFactor, + width, + height, + decimatedWidth, + decimatedHeight); + stereo->setOutputSize(decimatedWidth, decimatedHeight); + declareAndLogParam("i_width", decimatedWidth, true); + declareAndLogParam("i_height", decimatedHeight, true); } stereo->initialConfig.set(config); } diff --git a/depthai_ros_driver/src/pipeline/base_types.cpp b/depthai_ros_driver/src/pipeline/base_types.cpp index 1a3502bf..861be3fc 100644 --- a/depthai_ros_driver/src/pipeline/base_types.cpp +++ b/depthai_ros_driver/src/pipeline/base_types.cpp @@ -128,17 +128,14 @@ std::vector> CamArray::createPipeline(ros:: std::shared_ptr pipeline, const std::string& /*nnType*/) { std::vector> daiNodes; - int i = 0; - int j = 0; - for(auto& sensor : device->getCameraSensorNames()) { - // append letter for greater sensor number - if(i % alphabet.size() == 0) { - j++; + for(auto& feature : device->getConnectedCameraFeatures()) { + if(feature.name == "color") { + auto daiNode = std::make_unique("rgb", node, pipeline, device, feature.socket); + daiNodes.push_back(std::move(daiNode)); + } else { + auto daiNode = std::make_unique(feature.name, node, pipeline, device, feature.socket); + daiNodes.push_back(std::move(daiNode)); } - std::string nodeName(j, alphabet[i % alphabet.size()]); - auto daiNode = std::make_unique(nodeName, node, pipeline, device, sensor.first); - daiNodes.push_back(std::move(daiNode)); - i++; }; return daiNodes; } diff --git a/depthai_ros_driver/src/pipeline/pipeline_generator.cpp b/depthai_ros_driver/src/pipeline/pipeline_generator.cpp index 07ebec75..350b6c96 100644 --- a/depthai_ros_driver/src/pipeline/pipeline_generator.cpp +++ b/depthai_ros_driver/src/pipeline/pipeline_generator.cpp @@ -3,6 +3,7 @@ #include "depthai/device/Device.hpp" #include "depthai/pipeline/Pipeline.hpp" #include "depthai_ros_driver/dai_nodes/sensors/imu.hpp" +#include "depthai_ros_driver/dai_nodes/sys_logger.hpp" #include "depthai_ros_driver/pipeline/base_pipeline.hpp" #include "depthai_ros_driver/utils.hpp" #include "pluginlib/class_loader.hpp" @@ -45,7 +46,8 @@ std::vector> PipelineGenerator::createPipel daiNodes.push_back(std::move(imu)); } } - + auto sysLogger = std::make_unique("sys_logger", node, pipeline); + daiNodes.push_back(std::move(sysLogger)); ROS_INFO("Finished setting up pipeline."); return daiNodes; } diff --git a/depthai_ros_driver/src/utils.cpp b/depthai_ros_driver/src/utils.cpp index a82b02e9..6d50a2c5 100644 --- a/depthai_ros_driver/src/utils.cpp +++ b/depthai_ros_driver/src/utils.cpp @@ -1,4 +1,7 @@ #include "depthai_ros_driver/utils.hpp" + +#include "depthai-shared/common/CameraBoardSocket.hpp" +#include "depthai-shared/common/CameraFeatures.hpp" namespace depthai_ros_driver { namespace utils { std::string getUpperCaseStr(const std::string& string) { @@ -6,5 +9,19 @@ std::string getUpperCaseStr(const std::string& string) { for(auto& c : upper) c = toupper(c); return upper; } +std::string getSocketName(dai::CameraBoardSocket socket, std::vector camFeatures) { + std::string name; + for(auto& cam : camFeatures) { + if(cam.socket == socket) { + if(cam.name == "color" || cam.name == "center") { + name = "rgb"; + } else { + name = cam.name; + } + return name; + } + } + throw std::runtime_error("Camera socket not found"); +} } // namespace utils } // namespace depthai_ros_driver \ No newline at end of file diff --git a/depthai_ros_msgs/CMakeLists.txt b/depthai_ros_msgs/CMakeLists.txt index ed9fbc2f..893561c7 100644 --- a/depthai_ros_msgs/CMakeLists.txt +++ b/depthai_ros_msgs/CMakeLists.txt @@ -31,6 +31,8 @@ add_message_files ( SpatialDetection.msg SpatialDetectionArray.msg ImuWithMagneticField.msg + TrackedFeature.msg + TrackedFeatures.msg ) ## Generate services in the 'srv' folder diff --git a/depthai_ros_msgs/msg/TrackedFeature.msg b/depthai_ros_msgs/msg/TrackedFeature.msg new file mode 100644 index 00000000..f0aead6e --- /dev/null +++ b/depthai_ros_msgs/msg/TrackedFeature.msg @@ -0,0 +1,10 @@ +std_msgs/Header header + +geometry_msgs/Point position +uint32 id + +uint32 age + +float32 harris_score + +float32 tracking_error diff --git a/depthai_ros_msgs/msg/TrackedFeatures.msg b/depthai_ros_msgs/msg/TrackedFeatures.msg new file mode 100644 index 00000000..ab977295 --- /dev/null +++ b/depthai_ros_msgs/msg/TrackedFeatures.msg @@ -0,0 +1,2 @@ +std_msgs/Header header +depthai_ros_msgs/TrackedFeature[] features \ No newline at end of file