diff --git a/depthai-ros/CHANGELOG.rst b/depthai-ros/CHANGELOG.rst index d7a61e1b..14279097 100644 --- a/depthai-ros/CHANGELOG.rst +++ b/depthai-ros/CHANGELOG.rst @@ -2,7 +2,17 @@ Changelog for package depthai-ros ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -2.7.2 (20230-5-08) +2.7.3 (2023-06-16) +------------------- +* Pipeline generation as a plugin +* Fixed bounding box generation issue +* Stereo rectified streams publishing +* Camera trigger mechanisms +* Brightness filter + + +2.7.2 (2023-5-08) +------------------- * IMU improvements 2.7.1 (2023-03-29) diff --git a/depthai-ros/CMakeLists.txt b/depthai-ros/CMakeLists.txt index 0fe2ed1a..a091d06b 100644 --- a/depthai-ros/CMakeLists.txt +++ b/depthai-ros/CMakeLists.txt @@ -1,6 +1,6 @@ cmake_minimum_required (VERSION 3.10.2) # CMake version in Ubuntu 18.04 LTS -project(depthai-ros VERSION 2.7.2 LANGUAGES CXX C) +project(depthai-ros VERSION 2.7.3 LANGUAGES CXX C) set(CMAKE_CXX_STANDARD 14) diff --git a/depthai-ros/package.xml b/depthai-ros/package.xml index 0fe54e22..b3dc50a0 100644 --- a/depthai-ros/package.xml +++ b/depthai-ros/package.xml @@ -1,7 +1,7 @@ depthai-ros - 2.7.2 + 2.7.3 The depthai-ros package diff --git a/depthai_bridge/CMakeLists.txt b/depthai_bridge/CMakeLists.txt index 9d5a11f3..de561eef 100644 --- a/depthai_bridge/CMakeLists.txt +++ b/depthai_bridge/CMakeLists.txt @@ -1,7 +1,7 @@ cmake_minimum_required (VERSION 3.10.2) # CMake version in Ubuntu 18.04 LTS set (CMAKE_POSITION_INDEPENDENT_CODE ON) -project(depthai_bridge VERSION 2.7.2 LANGUAGES CXX C) +project(depthai_bridge VERSION 2.7.3 LANGUAGES CXX C) set(CMAKE_CXX_STANDARD 14) set(CMAKE_CXX_STANDARD_REQUIRED ON) diff --git a/depthai_bridge/include/depthai_bridge/ImuConverter.hpp b/depthai_bridge/include/depthai_bridge/ImuConverter.hpp index 734a9779..84efadf7 100644 --- a/depthai_bridge/include/depthai_bridge/ImuConverter.hpp +++ b/depthai_bridge/include/depthai_bridge/ImuConverter.hpp @@ -30,7 +30,8 @@ class ImuConverter { double angular_velocity_cov = 0.0, double rotation_cov = 0.0, double magnetic_field_cov = 0.0, - bool enable_rotation = false); + bool enable_rotation = false, + bool getBaseDeviceTimestamp = false); ~ImuConverter(); /** @@ -132,6 +133,7 @@ class ImuConverter { int64_t _totalNsChange{0}; // Whether to update the ROS base time on each message conversion bool _updateRosBaseTimeOnToRosMsg{false}; + bool _getBaseDeviceTimestamp; void fillImuMsg(dai::IMUReportAccelerometer report, ImuMsgs::Imu& msg); void fillImuMsg(dai::IMUReportGyroscope report, ImuMsgs::Imu& msg); @@ -144,7 +146,7 @@ class ImuConverter { void fillImuMsg(dai::IMUReportMagneticField report, depthai_ros_msgs::ImuWithMagneticField& msg); template - void CreateUnitMessage(I first, S second, T third, F fourth, M& msg, dai::Timestamp timestamp) { + void CreateUnitMessage(I first, S second, T third, F fourth, M& msg, std::chrono::_V2::steady_clock::time_point timestamp) { fillImuMsg(first, msg); fillImuMsg(second, msg); fillImuMsg(third, msg); @@ -152,7 +154,7 @@ class ImuConverter { msg.header.frame_id = _frameName; - msg.header.stamp = getFrameTime(_rosBaseTime, _steadyBaseTime, timestamp.get()); + msg.header.stamp = getFrameTime(_rosBaseTime, _steadyBaseTime, timestamp); } template @@ -182,7 +184,12 @@ class ImuConverter { const double alpha = diff.count() / dt; I interp = lerpImu(interp0, interp1, alpha); M msg; - CreateUnitMessage(interp, currSecond, currThird, currFourth, msg, currSecond.timestamp); + std::chrono::_V2::steady_clock::time_point tstamp; + if(_getBaseDeviceTimestamp) + tstamp = currSecond.getTimestampDevice(); + else + tstamp = currSecond.getTimestamp(); + CreateUnitMessage(interp, currSecond, currThird, currFourth, msg, tstamp); imuMsgs.push_back(msg); second.pop_front(); third.pop_front(); diff --git a/depthai_bridge/package.xml b/depthai_bridge/package.xml index 64ee9730..b78d2744 100644 --- a/depthai_bridge/package.xml +++ b/depthai_bridge/package.xml @@ -1,7 +1,7 @@ depthai_bridge - 2.7.2 + 2.7.3 The depthai_bridge package Sachin Guruswamy diff --git a/depthai_bridge/src/ImuConverter.cpp b/depthai_bridge/src/ImuConverter.cpp index ead7e035..ad340822 100644 --- a/depthai_bridge/src/ImuConverter.cpp +++ b/depthai_bridge/src/ImuConverter.cpp @@ -13,7 +13,8 @@ ImuConverter::ImuConverter(const std::string& frameName, double angular_velocity_cov, double rotation_cov, double magnetic_field_cov, - bool enable_rotation) + bool enable_rotation, + bool getBaseDeviceTimestamp) : _frameName(frameName), _syncMode(syncMode), _linear_accel_cov(linear_accel_cov), @@ -22,7 +23,8 @@ ImuConverter::ImuConverter(const std::string& frameName, _magnetic_field_cov(magnetic_field_cov), _enable_rotation(enable_rotation), _sequenceNum(0), - _steadyBaseTime(std::chrono::steady_clock::now()) { + _steadyBaseTime(std::chrono::steady_clock::now()), + _getBaseDeviceTimestamp(getBaseDeviceTimestamp) { _rosBaseTime = ::ros::Time::now(); } @@ -98,7 +100,13 @@ void ImuConverter::toRosMsg(std::shared_ptr inData, std::dequepackets[i].rotationVector; auto magn = inData->packets[i].magneticField; ImuMsgs::Imu msg; - CreateUnitMessage(accel, gyro, rot, magn, msg, accel.timestamp); + std::chrono::_V2::steady_clock::time_point tstamp; + if(_getBaseDeviceTimestamp) + tstamp = accel.getTimestampDevice(); + else + tstamp = accel.getTimestamp(); + + CreateUnitMessage(accel, gyro, rot, magn, msg, tstamp); outImuMsgs.push_back(msg); } } @@ -114,7 +122,13 @@ void ImuConverter::toRosDaiMsg(std::shared_ptr inData, std::deque< auto rot = inData->packets[i].rotationVector; auto magn = inData->packets[i].magneticField; depthai_ros_msgs::ImuWithMagneticField msg; - CreateUnitMessage(accel, gyro, rot, magn, msg, accel.timestamp); + std::chrono::_V2::steady_clock::time_point tstamp; + if(_getBaseDeviceTimestamp) + tstamp = accel.getTimestampDevice(); + else + tstamp = accel.getTimestamp(); + + CreateUnitMessage(accel, gyro, rot, magn, msg, tstamp); outImuMsgs.push_back(msg); } } diff --git a/depthai_descriptions/CMakeLists.txt b/depthai_descriptions/CMakeLists.txt index 4bb72d8a..518dfd66 100644 --- a/depthai_descriptions/CMakeLists.txt +++ b/depthai_descriptions/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 3.10.2) -project(depthai_descriptions VERSION 2.7.2 LANGUAGES CXX C) +project(depthai_descriptions VERSION 2.7.3 LANGUAGES CXX C) find_package(catkin REQUIRED diff --git a/depthai_descriptions/package.xml b/depthai_descriptions/package.xml index ddd99add..854c8372 100644 --- a/depthai_descriptions/package.xml +++ b/depthai_descriptions/package.xml @@ -1,7 +1,7 @@ depthai_descriptions - 2.7.2 + 2.7.3 The depthai_descriptions package Adam Serafin diff --git a/depthai_examples/CMakeLists.txt b/depthai_examples/CMakeLists.txt index 214fb9cf..52371849 100644 --- a/depthai_examples/CMakeLists.txt +++ b/depthai_examples/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required (VERSION 3.10.2) # CMake version in Ubuntu 18.04 LTS -project(depthai_examples VERSION 2.7.2 LANGUAGES CXX C) +project(depthai_examples VERSION 2.7.3 LANGUAGES CXX C) set(CMAKE_CXX_STANDARD 14) set(CMAKE_CXX_STANDARD_REQUIRED ON) diff --git a/depthai_examples/package.xml b/depthai_examples/package.xml index e41e0750..e18ff31b 100644 --- a/depthai_examples/package.xml +++ b/depthai_examples/package.xml @@ -1,7 +1,7 @@ depthai_examples - 2.7.2 + 2.7.3 The depthai_examples package diff --git a/depthai_filters/CMakeLists.txt b/depthai_filters/CMakeLists.txt index 3dc9e0ef..62de84f1 100644 --- a/depthai_filters/CMakeLists.txt +++ b/depthai_filters/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 3.10.2) -project(depthai_filters VERSION 2.7.2 LANGUAGES CXX C) +project(depthai_filters VERSION 2.7.3 LANGUAGES CXX C) set(CMAKE_CXX_STANDARD 14) set(CMAKE_CXX_STANDARD_REQUIRED ON) diff --git a/depthai_filters/package.xml b/depthai_filters/package.xml index ba789611..14ea7fa7 100644 --- a/depthai_filters/package.xml +++ b/depthai_filters/package.xml @@ -1,7 +1,7 @@ depthai_filters - 2.7.2 + 2.7.3 The depthai_filters package Adam Serafin diff --git a/depthai_ros_driver/CMakeLists.txt b/depthai_ros_driver/CMakeLists.txt index 87f40dab..75470100 100644 --- a/depthai_ros_driver/CMakeLists.txt +++ b/depthai_ros_driver/CMakeLists.txt @@ -67,7 +67,6 @@ include_directories( ${OpenCV_INCLUDE_DIRS} ) -include_directories(include) catkin_package( LIBRARIES @@ -134,8 +133,9 @@ target_link_libraries( add_library( ${PROJECT_NAME} SHARED + src/pipeline/pipeline_generator.cpp + src/pipeline/base_types.cpp src/camera.cpp - src/pipeline_generator.cpp ) @@ -177,6 +177,9 @@ install( DESTINATION lib/${PROJECT_NAME} ) +install(FILES plugins.xml + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}) + install( DIRECTORY include/ DESTINATION include diff --git a/depthai_ros_driver/cfg/parameters.cfg b/depthai_ros_driver/cfg/parameters.cfg index dd663981..84f55953 100755 --- a/depthai_ros_driver/cfg/parameters.cfg +++ b/depthai_ros_driver/cfg/parameters.cfg @@ -82,7 +82,7 @@ camera.add("rgb_i_board_socket_id", int_t, 0, "Sensor board socket id", 0) camera.add("rgb_i_calibration_file", str_t, 0, "Path to calibration file", "") camera.add("rgb_i_fps", double_t, 0, "FPS", 60.0) camera.add("rgb_i_height", int_t, 0, "Image height", 720) -camera.add("rgb_i_interleaved", bool_t, 0, "Is frame interleaved or planar", True) +camera.add("rgb_i_interleaved", bool_t, 0, "Is frame interleaved or planar", False) camera.add("rgb_r_keep_preview_aspect_ratio", bool_t, 0, "Keep preview aspect ratio", True) camera.add("rgb_i_low_bandwidth", bool_t, 0, "Use encoding for data", False) camera.add("rgb_i_low_bandwidth_quality", int_t, 0, "Quality when using low-bandwidth mode", 50, 1, 100) @@ -116,6 +116,9 @@ camera.add("stereo_i_decimation_filter_decimation_factor", int_t, 0, "Decimation camera.add("stereo_i_decimation_filter_decimation_mode", str_t, 0, "Decimation mode", "PIXEL_SKIPPING") camera.add("stereo_i_depth_filter_size", int_t, 0, "Depth filter size", 5) camera.add("stereo_i_depth_preset", str_t, 0, "Depth preset", "HIGH_ACCURACY") +camera.add("stereo_i_enable_brightness_filter", bool_t, 0, "Enable brightness filter", False) +camera.add("stereo_i_brightness_filter_min_brightness", int_t, 0, "Brightness filter min", 0) +camera.add("stereo_i_brightness_filter_max_brightness", int_t, 0, "Brightness filter max", 256) camera.add("stereo_i_enable_companding", bool_t, 0, "Enable companding", False) camera.add("stereo_i_enable_decimation_filter", bool_t, 0, "Enable decimation filter", False) camera.add("stereo_i_enable_distortion_correction", bool_t, 0, "Enable distortion correction", True) @@ -129,6 +132,13 @@ camera.add("stereo_i_low_bandwidth_quality", int_t, 0, "Quality when using low-b camera.add("stereo_i_lr_check", bool_t, 0, "Enable left-right check", True) camera.add("stereo_i_lrc_threshold", int_t, 0, "Left-right check threshold", 10) camera.add("stereo_i_max_q_size", int_t, 0, "Internal queue size", 30) +camera.add("stereo_i_publish_left_rect", bool_t, 0, "Publish rectified left", False) +camera.add("stereo_i_left_rect_low_bandwidth", bool_t, 0, "Low bandwidth for rectified left", False) +camera.add("stereo_i_left_rect_low_bandwidth_quality", int_t, 0, "Low bandwidth left quality", 50) +camera.add("stereo_i_publish_right_rect", bool_t, 0, "Publish rectified right", False) +camera.add("stereo_i_right_rect_low_bandwidth", bool_t, 0, "Low bandwidth for rectified right", False) +camera.add("stereo_i_right_rect_low_bandwidth_quality", int_t, 0, "Low bandwidth right quality", 50) +camera.add("stereo_i_publish_topic", bool_t, 0, "Publish stereo topic", True) camera.add("stereo_i_rectify_edge_fill_color", int_t, 0, "Rectify edge fill color", 0) camera.add("stereo_i_spatial_filter_alpha", double_t, 0, "Spatial filter alpha", 0.5) camera.add("stereo_i_spatial_filter_delta", int_t, 0, "Spatial filter delta", 20) @@ -159,6 +169,7 @@ camera.add("imu_i_acc_freq", int_t, 0, "Accelerometer frequency", 500) camera.add("imu_i_acc_cov", double_t, 0, "Accelerometer covariance", 0.0) camera.add("imu_i_batch_report_threshold", int_t, 0, "Batch report size", 5) camera.add("imu_i_enable_rotation", bool_t, 0, "Whether to enable rotation vector & magnetometer data (available when using IMU_WITH_MAGN/ IMU_WITH_MAGN_SPLIT message type)", False) +camera.add("imu_i_get_base_device_timestamp", bool_t, 0, "Use base device timestamp", False) camera.add("imu_i_gyro_cov", double_t, 0, "Gyroscope covariance", 0.0) camera.add("imu_i_gyro_freq", int_t, 0, "Gyroscope frequency", 400) camera.add("imu_i_mag_cov", double_t, 0, "Magnetometer covariance", 0.0) diff --git a/depthai_ros_driver/config/camera.yaml b/depthai_ros_driver/config/camera.yaml index e39d657a..566d4e64 100644 --- a/depthai_ros_driver/config/camera.yaml +++ b/depthai_ros_driver/config/camera.yaml @@ -20,7 +20,7 @@ left_i_preview_size: 300 left_i_publish_topic: false left_i_resolution: '720' - left_i_set_isp_scale: false + left_i_set_isp_scale: true left_i_width: 1280 left_r_exposure: 1000 left_r_focus: 1 @@ -67,7 +67,7 @@ right_i_preview_size: 300 right_i_publish_topic: false right_i_resolution: '720' - right_i_set_isp_scale: false + right_i_set_isp_scale: true right_i_width: 1280 right_r_exposure: 1000 right_r_focus: 1 diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/detection.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/detection.hpp index bedfd40a..c4f7b873 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/detection.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/detection.hpp @@ -44,11 +44,17 @@ class Detection : public BaseNode { void setupQueues(std::shared_ptr device) override { nnQ = device->getOutputQueue(nnQName, ph->getParam("i_max_q_size"), false); auto tfPrefix = getTFPrefix("rgb"); - detConverter = std::make_unique(tfPrefix + "_camera_optical_frame", - imageManip->initialConfig.getResizeConfig().width, - imageManip->initialConfig.getResizeConfig().height, - false, - ph->getParam("i_get_base_device_timestamp")); + int width; + int height; + if(ph->getParam("i_disable_resize")) { + width = ph->getOtherNodeParam("rgb", "i_preview_size"); + height = ph->getOtherNodeParam("rgb", "i_preview_size"); + } else { + width = imageManip->initialConfig.getResizeConfig().width; + height = imageManip->initialConfig.getResizeConfig().height; + } + detConverter = std::make_unique( + tfPrefix + "_camera_optical_frame", width, height, false, ph->getParam("i_get_base_device_timestamp")); detPub = getROSNode().template advertise(getName() + "/detections", 10); nnQ->addCallback(std::bind(&Detection::detectionCB, this, std::placeholders::_1, std::placeholders::_2)); @@ -56,8 +62,7 @@ class Detection : public BaseNode { ptQ = device->getOutputQueue(ptQName, ph->getParam("i_max_q_size"), false); imageConverter = std::make_unique(tfPrefix + "_camera_optical_frame", false); infoManager = std::make_shared(ros::NodeHandle(getROSNode(), getName()), "/" + getName()); - infoManager->setCameraInfo(sensor_helpers::getCalibInfo( - *imageConverter, device, dai::CameraBoardSocket::RGB, imageManip->initialConfig.getResizeWidth(), imageManip->initialConfig.getResizeWidth())); + infoManager->setCameraInfo(sensor_helpers::getCalibInfo(*imageConverter, device, dai::CameraBoardSocket::RGB, width, height)); ptPub = it.advertiseCamera(getName() + "/passthrough/image_raw", 1); ptQ->addCallback(std::bind(sensor_helpers::imgCB, std::placeholders::_1, std::placeholders::_2, *imageConverter, ptPub, infoManager)); diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/spatial_detection.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/spatial_detection.hpp index f692c3ec..ae32431b 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/spatial_detection.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/spatial_detection.hpp @@ -47,11 +47,17 @@ class SpatialDetection : public BaseNode { void setupQueues(std::shared_ptr device) override { nnQ = device->getOutputQueue(nnQName, ph->getParam("i_max_q_size"), false); auto tfPrefix = getTFPrefix("rgb"); - detConverter = std::make_unique(tfPrefix + "_camera_optical_frame", - imageManip->initialConfig.getResizeConfig().width, - imageManip->initialConfig.getResizeConfig().height, - false, - ph->getParam("i_get_base_device_timestamp")); + int width; + int height; + if(ph->getParam("i_disable_resize")) { + width = ph->getOtherNodeParam("rgb", "i_preview_size"); + height = ph->getOtherNodeParam("rgb", "i_preview_size"); + } else { + width = imageManip->initialConfig.getResizeConfig().width; + height = imageManip->initialConfig.getResizeConfig().height; + } + detConverter = std::make_unique( + tfPrefix + "_camera_optical_frame", width, height, false, ph->getParam("i_get_base_device_timestamp")); nnQ->addCallback(std::bind(&SpatialDetection::spatialCB, this, std::placeholders::_1, std::placeholders::_2)); detPub = getROSNode().template advertise(getName() + "/spatial_detections", 10); @@ -59,11 +65,7 @@ class SpatialDetection : public BaseNode { ptQ = device->getOutputQueue(ptQName, ph->getParam("i_max_q_size"), false); ptImageConverter = std::make_unique(tfPrefix + "_camera_optical_frame", false); ptInfoMan = std::make_shared(ros::NodeHandle(getROSNode(), getName()), "/" + getName()); - ptInfoMan->setCameraInfo(sensor_helpers::getCalibInfo(*ptImageConverter, - device, - dai::CameraBoardSocket::RGB, - imageManip->initialConfig.getResizeWidth(), - imageManip->initialConfig.getResizeWidth())); + ptInfoMan->setCameraInfo(sensor_helpers::getCalibInfo(*ptImageConverter, device, dai::CameraBoardSocket::RGB, width, height)); ptPub = it.advertiseCamera(getName() + "/passthrough/image_raw", 1); ptQ->addCallback(std::bind(sensor_helpers::imgCB, std::placeholders::_1, std::placeholders::_2, *ptImageConverter, ptPub, ptInfoMan)); @@ -80,9 +82,8 @@ class SpatialDetection : public BaseNode { ptDepthQ = device->getOutputQueue(ptDepthQName, ph->getParam("i_max_q_size"), false); ptDepthImageConverter = std::make_unique(tfPrefix + "_camera_optical_frame", false); ptDepthInfoMan = std::make_shared(ros::NodeHandle(getROSNode(), getName()), "/" + getName()); - int width, height; - getROSNode().getParam("stereo_i_width", width); - getROSNode().getParam("stereo_i_height", height); + int width = ph->getOtherNodeParam("stereo", "i_width"); + int height = ph->getOtherNodeParam("stereo", "i_height"); ptDepthInfoMan->setCameraInfo(sensor_helpers::getCalibInfo(*ptDepthImageConverter, device, socket, width, height)); ptDepthPub = it.advertiseCamera(getName() + "/passthrough_depth/image_raw", 1); diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sensor_wrapper.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sensor_wrapper.hpp index 3fa943f9..8a15c426 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sensor_wrapper.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sensor_wrapper.hpp @@ -7,6 +7,7 @@ #include "depthai-shared/common/CameraBoardSocket.hpp" #include "depthai/pipeline/Node.hpp" #include "depthai_ros_driver/dai_nodes/base_node.hpp" +#include "depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp" #include "ros/subscriber.h" #include "sensor_msgs/Image.h" @@ -47,6 +48,7 @@ class SensorWrapper : public BaseNode { void setNames() override; void setXinXout(std::shared_ptr pipeline) override; void closeQueues() override; + sensor_helpers::ImageSensor getSensorData(); private: void subCB(const sensor_msgs::Image::ConstPtr& img); @@ -59,6 +61,7 @@ class SensorWrapper : public BaseNode { std::string inQName; int socketID; bool ready; + sensor_helpers::ImageSensor sensorData; }; } // namespace dai_nodes diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/stereo.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/stereo.hpp index 99af32aa..13d6febe 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/stereo.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/stereo.hpp @@ -5,6 +5,7 @@ #include #include "depthai_ros_driver/dai_nodes/base_node.hpp" +#include "depthai_ros_driver/dai_nodes/sensors/sensor_wrapper.hpp" #include "depthai_ros_driver/parametersConfig.h" #include "image_transport/camera_publisher.h" #include "image_transport/image_transport.h" @@ -41,9 +42,20 @@ namespace dai_nodes { namespace link_types { enum class StereoLinkType { left, right }; } + +struct StereoSensorInfo { + std::string name; + dai::CameraBoardSocket socket; +}; + class Stereo : public BaseNode { public: - explicit Stereo(const std::string& daiNodeName, ros::NodeHandle node, std::shared_ptr pipeline, std::shared_ptr device); + explicit Stereo(const std::string& daiNodeName, + ros::NodeHandle node, + std::shared_ptr pipeline, + std::shared_ptr device, + StereoSensorInfo leftInfo = StereoSensorInfo{"left", dai::CameraBoardSocket::LEFT}, + StereoSensorInfo rightInfo = StereoSensorInfo{"right", dai::CameraBoardSocket::RIGHT}); ~Stereo(); void updateParams(parametersConfig& config) override; void setupQueues(std::shared_ptr device) override; @@ -54,19 +66,22 @@ class Stereo : public BaseNode { void closeQueues() override; private: - void stereoQCB(const std::string& name, const std::shared_ptr& data); - std::unique_ptr imageConverter; - std::shared_ptr infoManager; + void setupStereoQueue(std::shared_ptr device); + void setupLeftRectQueue(std::shared_ptr device); + void setupRightRectQueue(std::shared_ptr device); image_transport::ImageTransport it; - image_transport::CameraPublisher stereoPub; + std::unique_ptr stereoConv, leftRectConv, rightRectConv; + image_transport::CameraPublisher stereoPub, leftRectPub, rightRectPub; + std::shared_ptr stereoIM, leftRectIM, rightRectIM; std::shared_ptr stereoCamNode; - std::shared_ptr videoEnc; - std::unique_ptr left; - std::unique_ptr right; + std::shared_ptr stereoEnc, leftRectEnc, rightRectEnc; + std::unique_ptr left; + std::unique_ptr right; std::unique_ptr ph; - std::shared_ptr stereoQ; - std::shared_ptr xoutStereo; - std::string stereoQName; + std::shared_ptr stereoQ, leftRectQ, rightRectQ; + std::shared_ptr xoutStereo, xoutLeftRect, xoutRightRect; + std::string stereoQName, leftRectQName, rightRectQName; + StereoSensorInfo leftSensInfo, rightSensInfo; }; } // namespace dai_nodes diff --git a/depthai_ros_driver/include/depthai_ros_driver/param_handlers/stereo_param_handler.hpp b/depthai_ros_driver/include/depthai_ros_driver/param_handlers/stereo_param_handler.hpp index ec24ffd9..19066ab8 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/param_handlers/stereo_param_handler.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/param_handlers/stereo_param_handler.hpp @@ -20,7 +20,7 @@ class StereoParamHandler : public BaseParamHandler { public: explicit StereoParamHandler(ros::NodeHandle node, const std::string& name); ~StereoParamHandler(); - void declareParams(std::shared_ptr stereo); + void declareParams(std::shared_ptr stereo, const std::string& rightName); dai::CameraControl setRuntimeParams(parametersConfig& config) override; private: diff --git a/depthai_ros_driver/include/depthai_ros_driver/pipeline/base_pipeline.hpp b/depthai_ros_driver/include/depthai_ros_driver/pipeline/base_pipeline.hpp new file mode 100644 index 00000000..a17e44f4 --- /dev/null +++ b/depthai_ros_driver/include/depthai_ros_driver/pipeline/base_pipeline.hpp @@ -0,0 +1,62 @@ +#pragma once + +#include +#include +#include +#include + +#include "depthai_ros_driver/dai_nodes/base_node.hpp" +#include "depthai_ros_driver/dai_nodes/nn/nn_helpers.hpp" +#include "depthai_ros_driver/dai_nodes/nn/nn_wrapper.hpp" +#include "depthai_ros_driver/dai_nodes/nn/spatial_nn_wrapper.hpp" +#include "depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp" +#include "depthai_ros_driver/dai_nodes/stereo.hpp" + +namespace dai { +class Pipeline; +class Device; +} // namespace dai + +namespace rclcpp { +class Node; +} + +namespace depthai_ros_driver { +namespace pipeline_gen { +enum class NNType { None, RGB, Spatial }; +class BasePipeline { + public: + ~BasePipeline() = default; + std::unique_ptr createNN(ros::NodeHandle node, std::shared_ptr pipeline, dai_nodes::BaseNode& daiNode) { + auto nn = std::make_unique("nn", node, pipeline); + daiNode.link(nn->getInput(), static_cast(dai_nodes::link_types::RGBLinkType::preview)); + return nn; + } + std::unique_ptr createSpatialNN(ros::NodeHandle node, + std::shared_ptr pipeline, + dai_nodes::BaseNode& daiNode, + dai_nodes::BaseNode& daiStereoNode) { + auto nn = std::make_unique("nn", node, pipeline); + daiNode.link(nn->getInput(static_cast(dai_nodes::nn_helpers::link_types::SpatialNNLinkType::input)), + static_cast(dai_nodes::link_types::RGBLinkType::preview)); + daiStereoNode.link(nn->getInput(static_cast(dai_nodes::nn_helpers::link_types::SpatialNNLinkType::inputDepth))); + return nn; + } + + virtual std::vector> createPipeline(ros::NodeHandle node, + std::shared_ptr device, + std::shared_ptr pipeline, + const std::string& nnType) = 0; + + protected: + BasePipeline(){}; + const std::string alphabet = "abcdefghijklmnopqrstuvwxyz"; + std::unordered_map nnTypeMap = { + {"", NNType::None}, + {"NONE", NNType::None}, + {"RGB", NNType::RGB}, + {"SPATIAL", NNType::Spatial}, + }; +}; +} // namespace pipeline_gen +} // namespace depthai_ros_driver \ No newline at end of file diff --git a/depthai_ros_driver/include/depthai_ros_driver/pipeline/base_types.hpp b/depthai_ros_driver/include/depthai_ros_driver/pipeline/base_types.hpp new file mode 100644 index 00000000..dd1aa805 --- /dev/null +++ b/depthai_ros_driver/include/depthai_ros_driver/pipeline/base_types.hpp @@ -0,0 +1,65 @@ +#pragma once + +#include +#include +#include +#include + +#include "depthai_ros_driver/dai_nodes/base_node.hpp" +#include "depthai_ros_driver/pipeline/base_pipeline.hpp" + +namespace dai { +class Pipeline; +class Device; +} // namespace dai + +namespace ros { +class NodeHandle; +} + +namespace depthai_ros_driver { +namespace pipeline_gen { +class RGB : public BasePipeline { + public: + std::vector> createPipeline(ros::NodeHandle node, + std::shared_ptr device, + std::shared_ptr pipeline, + const std::string& nnType) override; +}; +class RGBD : public BasePipeline { + public: + std::vector> createPipeline(ros::NodeHandle node, + std::shared_ptr device, + std::shared_ptr pipeline, + const std::string& nnType) override; +}; +class RGBStereo : public BasePipeline { + public: + std::vector> createPipeline(ros::NodeHandle node, + std::shared_ptr device, + std::shared_ptr pipeline, + const std::string& nnType) override; +}; +class Stereo : public BasePipeline { + public: + std::vector> createPipeline(ros::NodeHandle node, + std::shared_ptr device, + std::shared_ptr pipeline, + const std::string& nnType) override; +}; +class Depth : public BasePipeline { + public: + std::vector> createPipeline(ros::NodeHandle node, + std::shared_ptr device, + std::shared_ptr pipeline, + const std::string& nnType) override; +}; +class CamArray : public BasePipeline { + public: + std::vector> createPipeline(ros::NodeHandle node, + std::shared_ptr device, + std::shared_ptr pipeline, + const std::string& nnType) override; +}; +} // namespace pipeline_gen +} // namespace depthai_ros_driver \ No newline at end of file diff --git a/depthai_ros_driver/include/depthai_ros_driver/pipeline_generator.hpp b/depthai_ros_driver/include/depthai_ros_driver/pipeline/pipeline_generator.hpp similarity index 76% rename from depthai_ros_driver/include/depthai_ros_driver/pipeline_generator.hpp rename to depthai_ros_driver/include/depthai_ros_driver/pipeline/pipeline_generator.hpp index 07decf58..b475b84e 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/pipeline_generator.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/pipeline/pipeline_generator.hpp @@ -19,7 +19,6 @@ class NodeHandle; namespace depthai_ros_driver { namespace pipeline_gen { enum class PipelineType { RGB, RGBD, RGBStereo, Depth, Stereo, CamArray }; -enum class NNType { None, RGB, Spatial }; class PipelineGenerator { public: @@ -39,18 +38,19 @@ class PipelineGenerator { bool enableImu); private: + protected: + std::unordered_map pluginTypeMap{{"RGB", "depthai_ros_driver::pipeline_gen::RGB"}, + {"RGBD", "depthai_ros_driver::pipeline_gen::RGBD"}, + {"RGBSTEREO", "depthai_ros_driver::pipeline_gen::RGBStereo"}, + {"STEREO", "depthai_ros_driver::pipeline_gen::Stereo"}, + {"DEPTH", "depthai_ros_driver::pipeline_gen::Depth"}, + {"CAMARRAY", "depthai_ros_driver::pipeline_gen::CamArray"}}; std::unordered_map pipelineTypeMap{{"RGB", PipelineType::RGB}, {"RGBD", PipelineType::RGBD}, {"RGBSTEREO", PipelineType::RGBStereo}, {"STEREO", PipelineType::Stereo}, {"DEPTH", PipelineType::Depth}, {"CAMARRAY", PipelineType::CamArray}}; - std::unordered_map nnTypeMap = { - {"", NNType::None}, - {"NONE", NNType::None}, - {"RGB", NNType::RGB}, - {"SPATIAL", NNType::Spatial}, - }; const std::string alphabet = "abcdefghijklmnopqrstuvwxyz"; }; } // namespace pipeline_gen diff --git a/depthai_ros_driver/package.xml b/depthai_ros_driver/package.xml index e6b62ea5..c7374d09 100644 --- a/depthai_ros_driver/package.xml +++ b/depthai_ros_driver/package.xml @@ -2,7 +2,7 @@ depthai_ros_driver - 2.7.2 + 2.7.3 Depthai ROS Monolithic node. Adam Serafin Sachin Guruswamy @@ -32,9 +32,11 @@ depthai_ros_msgs depthai_examples nodelet + pluginlib + catkin diff --git a/depthai_ros_driver/plugins.xml b/depthai_ros_driver/plugins.xml new file mode 100644 index 00000000..c5886876 --- /dev/null +++ b/depthai_ros_driver/plugins.xml @@ -0,0 +1,20 @@ + + + RGB Pipeline. + + + RGBD Pipeline. + + + RGBStereo Pipeline. + + + Stereo Pipeline. + + + Depth Pipeline. + + + CamArray Pipeline. + + \ No newline at end of file diff --git a/depthai_ros_driver/src/camera.cpp b/depthai_ros_driver/src/camera.cpp index c7d611f7..e6f5140d 100644 --- a/depthai_ros_driver/src/camera.cpp +++ b/depthai_ros_driver/src/camera.cpp @@ -5,7 +5,7 @@ #include "depthai/device/Device.hpp" #include "depthai/pipeline/Pipeline.hpp" -#include "depthai_ros_driver/pipeline_generator.hpp" +#include "depthai_ros_driver/pipeline/pipeline_generator.hpp" #include "dynamic_reconfigure/server.h" #include "nodelet/nodelet.h" #include "pluginlib/class_list_macros.h" diff --git a/depthai_ros_driver/src/dai_nodes/sensors/sensor_wrapper.cpp b/depthai_ros_driver/src/dai_nodes/sensors/sensor_wrapper.cpp index a1bfa2e9..98e29d36 100644 --- a/depthai_ros_driver/src/dai_nodes/sensors/sensor_wrapper.cpp +++ b/depthai_ros_driver/src/dai_nodes/sensors/sensor_wrapper.cpp @@ -50,6 +50,7 @@ SensorWrapper::SensorWrapper(const std::string& daiNodeName, throw std::runtime_error("Sensor not supported!"); } ROS_DEBUG("Node %s has sensor %s", daiNodeName.c_str(), sensorName.c_str()); + sensorData = *sensorIt; if((*sensorIt).color) { sensorNode = std::make_unique(daiNodeName, node, pipeline, socket, (*sensorIt), publish); } else { @@ -59,7 +60,9 @@ SensorWrapper::SensorWrapper(const std::string& daiNodeName, ROS_DEBUG("Base node %s created", daiNodeName.c_str()); } SensorWrapper::~SensorWrapper() = default; - +sensor_helpers::ImageSensor SensorWrapper::getSensorData() { + return sensorData; +} void SensorWrapper::subCB(const sensor_msgs::Image::ConstPtr& img) { dai::ImgFrame data; if(ready) { diff --git a/depthai_ros_driver/src/dai_nodes/stereo.cpp b/depthai_ros_driver/src/dai_nodes/stereo.cpp index 25d3060b..534d924c 100644 --- a/depthai_ros_driver/src/dai_nodes/stereo.cpp +++ b/depthai_ros_driver/src/dai_nodes/stereo.cpp @@ -18,16 +18,21 @@ namespace depthai_ros_driver { namespace dai_nodes { -Stereo::Stereo(const std::string& daiNodeName, ros::NodeHandle node, std::shared_ptr pipeline, std::shared_ptr device) - : BaseNode(daiNodeName, node, pipeline), it(node) { +Stereo::Stereo(const std::string& daiNodeName, + ros::NodeHandle node, + std::shared_ptr pipeline, + std::shared_ptr device, + StereoSensorInfo leftInfo, + StereoSensorInfo rightInfo) + : BaseNode(daiNodeName, node, pipeline), it(node), leftSensInfo(leftInfo), rightSensInfo(rightInfo) { ROS_DEBUG("Creating node %s", daiNodeName.c_str()); setNames(); stereoCamNode = pipeline->create(); - left = std::make_unique("left", node, pipeline, device, dai::CameraBoardSocket::LEFT, false); - right = std::make_unique("right", node, pipeline, device, dai::CameraBoardSocket::RIGHT, false); + left = std::make_unique(leftInfo.name, node, pipeline, device, leftInfo.socket, false); + right = std::make_unique(rightInfo.name, node, pipeline, device, rightInfo.socket, false); ph = std::make_unique(node, daiNodeName); - ph->declareParams(stereoCamNode); + ph->declareParams(stereoCamNode, rightInfo.name); setXinXout(pipeline); left->link(stereoCamNode->left); right->link(stereoCamNode->right); @@ -36,70 +41,165 @@ Stereo::Stereo(const std::string& daiNodeName, ros::NodeHandle node, std::shared Stereo::~Stereo() = default; void Stereo::setNames() { stereoQName = getName() + "_stereo"; + leftRectQName = getName() + "_left_rect"; + rightRectQName = getName() + "_right_rect"; } void Stereo::setXinXout(std::shared_ptr pipeline) { - xoutStereo = pipeline->create(); - xoutStereo->setStreamName(stereoQName); - if(ph->getParam("i_low_bandwidth")) { - videoEnc = sensor_helpers::createEncoder(pipeline, ph->getParam("i_low_bandwidth_quality")); - stereoCamNode->disparity.link(videoEnc->input); - videoEnc->bitstream.link(xoutStereo->input); + if(ph->getParam("i_publish_topic")) { + xoutStereo = pipeline->create(); + xoutStereo->setStreamName(stereoQName); + if(ph->getParam("i_low_bandwidth")) { + stereoEnc = sensor_helpers::createEncoder(pipeline, ph->getParam("i_low_bandwidth_quality")); + stereoCamNode->disparity.link(stereoEnc->input); + stereoEnc->bitstream.link(xoutStereo->input); + } else { + if(ph->getParam("i_output_disparity")) { + stereoCamNode->disparity.link(xoutStereo->input); + } else { + stereoCamNode->depth.link(xoutStereo->input); + } + } + } + if(ph->getParam("i_publish_left_rect")) { + xoutLeftRect = pipeline->create(); + xoutLeftRect->setStreamName(leftRectQName); + if(ph->getParam("i_left_rect_low_bandwidth")) { + leftRectEnc = sensor_helpers::createEncoder(pipeline, ph->getParam("i_left_rect_low_bandwidth_quality")); + stereoCamNode->rectifiedLeft.link(leftRectEnc->input); + leftRectEnc->bitstream.link(xoutLeftRect->input); + } else { + stereoCamNode->rectifiedLeft.link(xoutLeftRect->input); + } + } + + if(ph->getParam("i_publish_right_rect")) { + xoutRightRect = pipeline->create(); + xoutRightRect->setStreamName(rightRectQName); + if(ph->getParam("i_right_rect_low_bandwidth")) { + rightRectEnc = sensor_helpers::createEncoder(pipeline, ph->getParam("i_right_rect_low_bandwidth_quality")); + stereoCamNode->rectifiedRight.link(rightRectEnc->input); + rightRectEnc->bitstream.link(xoutRightRect->input); + } else { + stereoCamNode->rectifiedRight.link(xoutRightRect->input); + } + } +} + +void Stereo::setupLeftRectQueue(std::shared_ptr device) { + leftRectQ = device->getOutputQueue(leftRectQName, ph->getOtherNodeParam(leftSensInfo.name, "i_max_q_size"), false); + auto tfPrefix = getTFPrefix(leftSensInfo.name); + leftRectConv = std::make_unique(tfPrefix + "_camera_optical_frame", false, ph->getParam("i_get_base_device_timestamp")); + leftRectIM = std::make_shared(ros::NodeHandle(getROSNode(), leftSensInfo.name), "/" + leftSensInfo.name + "/rect"); + auto info = sensor_helpers::getCalibInfo(*leftRectConv, + device, + leftSensInfo.socket, + ph->getOtherNodeParam(leftSensInfo.name, "i_width"), + ph->getOtherNodeParam(leftSensInfo.name, "i_height")); + leftRectIM->setCameraInfo(info); + leftRectPub = it.advertiseCamera(leftSensInfo.name + "/image_rect", 1); + dai::RawImgFrame::Type encType = dai::RawImgFrame::Type::GRAY8; + if(left->getSensorData().color) { + encType = dai::RawImgFrame::Type::BGR888i; + } + if(ph->getParam("i_left_rect_low_bandwidth")) { + leftRectQ->addCallback( + std::bind(sensor_helpers::compressedImgCB, std::placeholders::_1, std::placeholders::_2, *leftRectConv, leftRectPub, leftRectIM, encType)); } else { - stereoCamNode->depth.link(xoutStereo->input); + leftRectQ->addCallback(std::bind(sensor_helpers::imgCB, std::placeholders::_1, std::placeholders::_2, *leftRectConv, leftRectPub, leftRectIM)); } } -void Stereo::setupQueues(std::shared_ptr device) { - left->setupQueues(device); - right->setupQueues(device); +void Stereo::setupRightRectQueue(std::shared_ptr device) { + rightRectQ = device->getOutputQueue(rightRectQName, ph->getOtherNodeParam(rightSensInfo.name, "i_max_q_size"), false); + auto tfPrefix = getTFPrefix(rightSensInfo.name); + rightRectConv = std::make_unique(tfPrefix + "_camera_optical_frame", false, ph->getParam("i_get_base_device_timestamp")); + rightRectIM = + std::make_shared(ros::NodeHandle(getROSNode(), rightSensInfo.name), "/" + rightSensInfo.name + "/rect"); + auto info = sensor_helpers::getCalibInfo(*rightRectConv, + device, + rightSensInfo.socket, + ph->getOtherNodeParam(rightSensInfo.name, "i_width"), + ph->getOtherNodeParam(rightSensInfo.name, "i_height")); + rightRectIM->setCameraInfo(info); + rightRectPub = it.advertiseCamera(rightSensInfo.name + "/image_rect", 1); + dai::RawImgFrame::Type encType = dai::RawImgFrame::Type::GRAY8; + if(right->getSensorData().color) { + encType = dai::RawImgFrame::Type::BGR888i; + } + if(ph->getParam("i_right_rect_low_bandwidth")) { + rightRectQ->addCallback( + std::bind(sensor_helpers::compressedImgCB, std::placeholders::_1, std::placeholders::_2, *rightRectConv, rightRectPub, rightRectIM, encType)); + } else { + rightRectQ->addCallback(std::bind(sensor_helpers::imgCB, std::placeholders::_1, std::placeholders::_2, *rightRectConv, rightRectPub, rightRectIM)); + } +} + +void Stereo::setupStereoQueue(std::shared_ptr device) { stereoQ = device->getOutputQueue(stereoQName, ph->getParam("i_max_q_size"), false); std::string tfPrefix; if(ph->getParam("i_align_depth")) { tfPrefix = getTFPrefix("rgb"); } else { - tfPrefix = getTFPrefix("right"); + tfPrefix = getTFPrefix(rightSensInfo.name); } - imageConverter = std::make_unique(tfPrefix + "_camera_optical_frame", false); + stereoConv = std::make_unique(tfPrefix + "_camera_optical_frame", false); stereoPub = it.advertiseCamera(getName() + "/image_raw", 1); - infoManager = std::make_shared(ros::NodeHandle(getROSNode(), getName()), "/" + getName()); - auto info = sensor_helpers::getCalibInfo(*imageConverter, + stereoIM = std::make_shared(ros::NodeHandle(getROSNode(), getName()), "/" + getName()); + auto info = sensor_helpers::getCalibInfo(*stereoConv, device, static_cast(ph->getParam("i_board_socket_id")), ph->getParam("i_width"), ph->getParam("i_height")); auto calibHandler = device->readCalibration(); info.P[3] = calibHandler.getBaselineDistance() * 10.0; // baseline in mm - infoManager->setCameraInfo(info); + stereoIM->setCameraInfo(info); if(ph->getParam("i_low_bandwidth")) { if(ph->getParam("i_output_disparity")) { stereoQ->addCallback(std::bind(sensor_helpers::compressedImgCB, std::placeholders::_1, std::placeholders::_2, - *imageConverter, + *stereoConv, stereoPub, - infoManager, + stereoIM, dai::RawImgFrame::Type::GRAY8)); } else { // converting disp->depth - stereoQ->addCallback(std::bind(sensor_helpers::compressedImgCB, - std::placeholders::_1, - std::placeholders::_2, - *imageConverter, - stereoPub, - infoManager, - dai::RawImgFrame::Type::RAW8)); + stereoQ->addCallback(std::bind( + sensor_helpers::compressedImgCB, std::placeholders::_1, std::placeholders::_2, *stereoConv, stereoPub, stereoIM, dai::RawImgFrame::Type::RAW8)); } } else { - stereoQ->addCallback(std::bind(sensor_helpers::imgCB, std::placeholders::_1, std::placeholders::_2, *imageConverter, stereoPub, infoManager)); + stereoQ->addCallback(std::bind(sensor_helpers::imgCB, std::placeholders::_1, std::placeholders::_2, *stereoConv, stereoPub, stereoIM)); + } +} + +void Stereo::setupQueues(std::shared_ptr device) { + left->setupQueues(device); + right->setupQueues(device); + if(ph->getParam("i_publish_topic")) { + setupStereoQueue(device); + } + if(ph->getParam("i_publish_left_rect")) { + setupLeftRectQueue(device); + } + if(ph->getParam("i_publish_right_rect")) { + setupRightRectQueue(device); } } void Stereo::closeQueues() { left->closeQueues(); right->closeQueues(); - stereoQ->close(); + if(ph->getParam("i_publish_topic")) { + stereoQ->close(); + } + if(ph->getParam("i_publish_left_rect")) { + leftRectQ->close(); + } + if(ph->getParam("i_publish_right_rect")) { + rightRectQ->close(); + } } void Stereo::link(dai::Node::Input in, int /*linkType*/) { diff --git a/depthai_ros_driver/src/param_handlers/stereo_param_handler.cpp b/depthai_ros_driver/src/param_handlers/stereo_param_handler.cpp index 2941ef9c..0f96b1ea 100644 --- a/depthai_ros_driver/src/param_handlers/stereo_param_handler.cpp +++ b/depthai_ros_driver/src/param_handlers/stereo_param_handler.cpp @@ -34,7 +34,7 @@ StereoParamHandler::StereoParamHandler(ros::NodeHandle node, const std::string& } StereoParamHandler::~StereoParamHandler() = default; -void StereoParamHandler::declareParams(std::shared_ptr stereo) { +void StereoParamHandler::declareParams(std::shared_ptr stereo, const std::string& rightName) { getParam("i_max_q_size"); stereo->setLeftRightCheck(getParam("i_lr_check")); int width = 1280; @@ -44,11 +44,13 @@ void StereoParamHandler::declareParams(std::shared_ptr s width = getOtherNodeParam("rgb", "i_width", width); height = getOtherNodeParam("rgb", "i_height", height); } else { - width = getParam("i_width"); - height = getParam("i_height"); + width = getOtherNodeParam(rightName, "i_width", width); + height = getOtherNodeParam(rightName, "i_height", height); socket = dai::CameraBoardSocket::RIGHT; } stereo->setDepthAlign(socket); + setParam("i_width", width); + setParam("i_height", height); setParam("i_board_socket_id", static_cast(socket)); if(getParam("i_set_input_size")) { stereo->setInputResolution(getParam("i_input_width"), getParam("i_input_height")); @@ -94,6 +96,10 @@ void StereoParamHandler::declareParams(std::shared_ptr s config.postProcessing.thresholdFilter.minRange = getParam("i_threshold_filter_min_range"); config.postProcessing.thresholdFilter.maxRange = getParam("i_threshold_filter_max_range"); } + if(getParam("i_enable_brightness_filter")) { + config.postProcessing.brightnessFilter.minBrightness = getParam("i_brightness_filter_min_brightness"); + config.postProcessing.brightnessFilter.maxBrightness = getParam("i_brightness_filter_max_brightness"); + } if(getParam("i_enable_decimation_filter")) { config.postProcessing.decimationFilter.decimationMode = utils::getValFromMap(getParam("i_decimation_filter_decimation_mode"), decimationModeMap); diff --git a/depthai_ros_driver/src/pipeline/base_types.cpp b/depthai_ros_driver/src/pipeline/base_types.cpp new file mode 100644 index 00000000..6ce63c97 --- /dev/null +++ b/depthai_ros_driver/src/pipeline/base_types.cpp @@ -0,0 +1,155 @@ +#include "depthai_ros_driver/pipeline/base_types.hpp" + +#include "depthai/device/Device.hpp" +#include "depthai/pipeline/Pipeline.hpp" +#include "depthai_ros_driver/dai_nodes/base_node.hpp" +#include "depthai_ros_driver/dai_nodes/nn/nn_helpers.hpp" +#include "depthai_ros_driver/dai_nodes/nn/nn_wrapper.hpp" +#include "depthai_ros_driver/dai_nodes/nn/spatial_nn_wrapper.hpp" +#include "depthai_ros_driver/dai_nodes/sensors/imu.hpp" +#include "depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp" +#include "depthai_ros_driver/dai_nodes/sensors/sensor_wrapper.hpp" +#include "depthai_ros_driver/dai_nodes/stereo.hpp" +#include "depthai_ros_driver/pipeline/base_pipeline.hpp" +#include "depthai_ros_driver/utils.hpp" +#include "ros/node_handle.h" + +namespace depthai_ros_driver { +namespace pipeline_gen { + +std::vector> RGB::createPipeline(ros::NodeHandle node, + std::shared_ptr device, + std::shared_ptr pipeline, + const std::string& nnType) { + std::string nTypeUpCase = utils::getUpperCaseStr(nnType); + auto nType = utils::getValFromMap(nTypeUpCase, nnTypeMap); + + std::vector> daiNodes; + auto rgb = std::make_unique("rgb", node, pipeline, device, dai::CameraBoardSocket::RGB); + switch(nType) { + case NNType::None: + break; + case NNType::RGB: { + auto nn = createNN(node, pipeline, *rgb); + daiNodes.push_back(std::move(nn)); + break; + } + case NNType::Spatial: { + ROS_WARN("Spatial NN selected, but configuration is RGB."); + } + default: + break; + } + daiNodes.push_back(std::move(rgb)); + return daiNodes; +} +std::vector> RGBD::createPipeline(ros::NodeHandle node, + std::shared_ptr device, + std::shared_ptr pipeline, + const std::string& nnType) { + std::string nTypeUpCase = utils::getUpperCaseStr(nnType); + auto nType = utils::getValFromMap(nTypeUpCase, nnTypeMap); + + std::vector> daiNodes; + auto rgb = std::make_unique("rgb", node, pipeline, device, dai::CameraBoardSocket::RGB); + auto stereo = std::make_unique("stereo", node, pipeline, device); + switch(nType) { + case NNType::None: + break; + case NNType::RGB: { + auto nn = createNN(node, pipeline, *rgb); + daiNodes.push_back(std::move(nn)); + break; + } + case NNType::Spatial: { + auto nn = createSpatialNN(node, pipeline, *rgb, *stereo); + daiNodes.push_back(std::move(nn)); + break; + } + default: + break; + } + daiNodes.push_back(std::move(rgb)); + daiNodes.push_back(std::move(stereo)); + return daiNodes; +} +std::vector> RGBStereo::createPipeline(ros::NodeHandle node, + std::shared_ptr device, + std::shared_ptr pipeline, + const std::string& nnType) { + std::string nTypeUpCase = utils::getUpperCaseStr(nnType); + auto nType = utils::getValFromMap(nTypeUpCase, nnTypeMap); + + std::vector> daiNodes; + auto rgb = std::make_unique("rgb", node, pipeline, device, dai::CameraBoardSocket::RGB); + auto left = std::make_unique("left", node, pipeline, device, dai::CameraBoardSocket::LEFT); + auto right = std::make_unique("right", node, pipeline, device, dai::CameraBoardSocket::RIGHT); + switch(nType) { + case NNType::None: + break; + case NNType::RGB: { + auto nn = createNN(node, pipeline, *rgb); + daiNodes.push_back(std::move(nn)); + break; + } + case NNType::Spatial: { + ROS_WARN("Spatial NN selected, but configuration is RGBStereo."); + } + default: + break; + } + daiNodes.push_back(std::move(rgb)); + daiNodes.push_back(std::move(left)); + daiNodes.push_back(std::move(right)); + return daiNodes; +} +std::vector> Stereo::createPipeline(ros::NodeHandle node, + std::shared_ptr device, + std::shared_ptr pipeline, + const std::string& /*nnType*/) { + std::vector> daiNodes; + auto left = std::make_unique("left", node, pipeline, device, dai::CameraBoardSocket::LEFT); + auto right = std::make_unique("right", node, pipeline, device, dai::CameraBoardSocket::RIGHT); + daiNodes.push_back(std::move(left)); + daiNodes.push_back(std::move(right)); + return daiNodes; +} +std::vector> Depth::createPipeline(ros::NodeHandle node, + std::shared_ptr device, + std::shared_ptr pipeline, + const std::string& /*nnType*/) { + std::vector> daiNodes; + auto stereo = std::make_unique("stereo", node, pipeline, device); + daiNodes.push_back(std::move(stereo)); + return daiNodes; +} +std::vector> CamArray::createPipeline(ros::NodeHandle node, + std::shared_ptr device, + std::shared_ptr pipeline, + const std::string& /*nnType*/) { + std::vector> daiNodes; + int i = 0; + int j = 0; + for(auto& sensor : device->getCameraSensorNames()) { + // append letter for greater sensor number + if(i % alphabet.size() == 0) { + j++; + } + std::string nodeName(j, alphabet[i % alphabet.size()]); + auto daiNode = std::make_unique(nodeName, node, pipeline, device, sensor.first); + daiNodes.push_back(std::move(daiNode)); + i++; + }; + return daiNodes; +} +} // namespace pipeline_gen +} // namespace depthai_ros_driver + +#include + +PLUGINLIB_EXPORT_CLASS(depthai_ros_driver::pipeline_gen::RGB, depthai_ros_driver::pipeline_gen::BasePipeline) +PLUGINLIB_EXPORT_CLASS(depthai_ros_driver::pipeline_gen::RGBD, depthai_ros_driver::pipeline_gen::BasePipeline) +PLUGINLIB_EXPORT_CLASS(depthai_ros_driver::pipeline_gen::RGBStereo, depthai_ros_driver::pipeline_gen::BasePipeline) +PLUGINLIB_EXPORT_CLASS(depthai_ros_driver::pipeline_gen::Stereo, depthai_ros_driver::pipeline_gen::BasePipeline) +PLUGINLIB_EXPORT_CLASS(depthai_ros_driver::pipeline_gen::Depth, depthai_ros_driver::pipeline_gen::BasePipeline) +PLUGINLIB_EXPORT_CLASS(depthai_ros_driver::pipeline_gen::CamArray, depthai_ros_driver::pipeline_gen::BasePipeline) \ No newline at end of file diff --git a/depthai_ros_driver/src/pipeline/pipeline_generator.cpp b/depthai_ros_driver/src/pipeline/pipeline_generator.cpp new file mode 100644 index 00000000..07ebec75 --- /dev/null +++ b/depthai_ros_driver/src/pipeline/pipeline_generator.cpp @@ -0,0 +1,72 @@ +#include "depthai_ros_driver/pipeline/pipeline_generator.hpp" + +#include "depthai/device/Device.hpp" +#include "depthai/pipeline/Pipeline.hpp" +#include "depthai_ros_driver/dai_nodes/sensors/imu.hpp" +#include "depthai_ros_driver/pipeline/base_pipeline.hpp" +#include "depthai_ros_driver/utils.hpp" +#include "pluginlib/class_loader.hpp" +#include "ros/node_handle.h" + +namespace depthai_ros_driver { +namespace pipeline_gen { +std::vector> PipelineGenerator::createPipeline(ros::NodeHandle node, + std::shared_ptr device, + std::shared_ptr pipeline, + const std::string& pipelineType, + const std::string& nnType, + bool enableImu) { + ROS_INFO("Pipeline type: %s", pipelineType.c_str()); + std::string pluginType = pipelineType; + std::vector> daiNodes; + // Check if one of the default types. + try { + std::string pTypeUpCase = utils::getUpperCaseStr(pipelineType); + auto pType = utils::getValFromMap(pTypeUpCase, pipelineTypeMap); + pType = validatePipeline(pType, device->getCameraSensorNames().size()); + pluginType = utils::getValFromMap(pTypeUpCase, pluginTypeMap); + } catch(std::out_of_range& e) { + ROS_DEBUG("Pipeline type [%s] not found in base types, trying to load as a plugin.", pipelineType.c_str()); + } + pluginlib::ClassLoader pipelineLoader("depthai_ros_driver", "depthai_ros_driver::pipeline_gen::BasePipeline"); + + try { + boost::shared_ptr pipelinePlugin = pipelineLoader.createInstance(pluginType); + daiNodes = pipelinePlugin->createPipeline(node, device, pipeline, nnType); + } catch(pluginlib::PluginlibException& ex) { + ROS_ERROR("The plugin failed to load for some reason. Error: %s\n", ex.what()); + } + + if(enableImu) { + if(device->getConnectedIMU() == "NONE") { + ROS_WARN("IMU enabled but not available!"); + } else { + auto imu = std::make_unique("imu", node, pipeline, device); + daiNodes.push_back(std::move(imu)); + } + } + + ROS_INFO("Finished setting up pipeline."); + return daiNodes; +} + +PipelineType PipelineGenerator::validatePipeline(PipelineType type, int sensorNum) { + if(sensorNum == 1) { + if(type != PipelineType::RGB) { + ROS_ERROR("Wrong pipeline chosen for camera as it has only one sensor. Switching to RGB."); + return PipelineType::RGB; + } + } else if(sensorNum == 2) { + if(type != PipelineType::Stereo || type != PipelineType::Depth) { + ROS_ERROR("Wrong pipeline chosen for camera as it has only stereo pair. Switching to Stereo."); + return PipelineType::Stereo; + } + } else if(sensorNum > 3 && type != PipelineType::CamArray) { + ROS_ERROR("For cameras with more than three sensors you can only use CamArray. Switching to CamArray."); + return PipelineType::CamArray; + } + return type; +} + +} // namespace pipeline_gen +} // namespace depthai_ros_driver \ No newline at end of file diff --git a/depthai_ros_driver/src/pipeline_generator.cpp b/depthai_ros_driver/src/pipeline_generator.cpp deleted file mode 100644 index a65e4c18..00000000 --- a/depthai_ros_driver/src/pipeline_generator.cpp +++ /dev/null @@ -1,170 +0,0 @@ -#include "depthai_ros_driver/pipeline_generator.hpp" - -#include "depthai/device/Device.hpp" -#include "depthai/pipeline/Pipeline.hpp" -#include "depthai_ros_driver/dai_nodes/nn/nn_helpers.hpp" -#include "depthai_ros_driver/dai_nodes/nn/nn_wrapper.hpp" -#include "depthai_ros_driver/dai_nodes/nn/spatial_nn_wrapper.hpp" -#include "depthai_ros_driver/dai_nodes/sensors/imu.hpp" -#include "depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp" -#include "depthai_ros_driver/dai_nodes/sensors/sensor_wrapper.hpp" -#include "depthai_ros_driver/dai_nodes/stereo.hpp" -#include "depthai_ros_driver/utils.hpp" -#include "ros/node_handle.h" - -namespace depthai_ros_driver { -namespace pipeline_gen { -std::vector> PipelineGenerator::createPipeline(ros::NodeHandle node, - std::shared_ptr device, - std::shared_ptr pipeline, - const std::string& pipelineType, - const std::string& nnType, - bool enableImu) { - ROS_INFO("Pipeline type: %s", pipelineType.c_str()); - std::string pTypeUpCase = utils::getUpperCaseStr(pipelineType); - std::string nTypeUpCase = utils::getUpperCaseStr(nnType); - auto pType = utils::getValFromMap(pTypeUpCase, pipelineTypeMap); - pType = validatePipeline(pType, device->getCameraSensorNames().size()); - auto nType = utils::getValFromMap(nTypeUpCase, nnTypeMap); - std::vector> daiNodes; - switch(pType) { - case PipelineType::RGB: { - auto rgb = std::make_unique("rgb", node, pipeline, device, dai::CameraBoardSocket::RGB); - switch(nType) { - case NNType::None: - break; - case NNType::RGB: { - auto nn = createNN(node, pipeline, *rgb); - daiNodes.push_back(std::move(nn)); - break; - } - case NNType::Spatial: { - ROS_WARN("Spatial NN selected, but configuration is RGB. NN not created."); - } - default: - break; - } - daiNodes.push_back(std::move(rgb)); - break; - } - - case PipelineType::RGBD: { - auto rgb = std::make_unique("rgb", node, pipeline, device, dai::CameraBoardSocket::RGB); - auto stereo = std::make_unique("stereo", node, pipeline, device); - switch(nType) { - case NNType::None: - break; - case NNType::RGB: { - auto nn = createNN(node, pipeline, *rgb); - daiNodes.push_back(std::move(nn)); - break; - } - case NNType::Spatial: { - auto nn = createSpatialNN(node, pipeline, *rgb, *stereo); - daiNodes.push_back(std::move(nn)); - break; - } - default: - break; - } - daiNodes.push_back(std::move(rgb)); - daiNodes.push_back(std::move(stereo)); - break; - } - case PipelineType::RGBStereo: { - auto rgb = std::make_unique("rgb", node, pipeline, device, dai::CameraBoardSocket::RGB); - auto left = std::make_unique("left", node, pipeline, device, dai::CameraBoardSocket::LEFT); - auto right = std::make_unique("right", node, pipeline, device, dai::CameraBoardSocket::RIGHT); - switch(nType) { - case NNType::None: - break; - case NNType::RGB: { - auto nn = createNN(node, pipeline, *rgb); - daiNodes.push_back(std::move(nn)); - break; - } - case NNType::Spatial: { - ROS_WARN("Spatial NN selected, but configuration is RGBStereo. NN not created."); - } - default: - break; - } - daiNodes.push_back(std::move(rgb)); - daiNodes.push_back(std::move(left)); - daiNodes.push_back(std::move(right)); - break; - } - case PipelineType::Stereo: { - auto left = std::make_unique("left", node, pipeline, device, dai::CameraBoardSocket::LEFT); - auto right = std::make_unique("right", node, pipeline, device, dai::CameraBoardSocket::RIGHT); - daiNodes.push_back(std::move(left)); - daiNodes.push_back(std::move(right)); - break; - } - case PipelineType::Depth: { - auto stereo = std::make_unique("stereo", node, pipeline, device); - daiNodes.push_back(std::move(stereo)); - break; - } - case PipelineType::CamArray: { - int i = 0; - int j = 0; - for(auto& sensor : device->getCameraSensorNames()) { - // append letter for greater sensor number - if(i % alphabet.size() == 0) { - j++; - } - std::string nodeName(j, alphabet[i % alphabet.size()]); - auto daiNode = std::make_unique(nodeName, node, pipeline, device, sensor.first); - daiNodes.push_back(std::move(daiNode)); - i++; - }; - break; - } - default: { - std::string configuration = pipelineType; - throw std::runtime_error("UNKNOWN PIPELINE TYPE SPECIFIED/CAMERA DOESN'T SUPPORT GIVEN PIPELINE. Configuration: " + configuration); - } - } - if(enableImu) { - auto imu = std::make_unique("imu", node, pipeline, device); - daiNodes.push_back(std::move(imu)); - } - ROS_INFO("Finished setting up pipeline."); - return daiNodes; -} -std::unique_ptr PipelineGenerator::createNN(ros::NodeHandle node, std::shared_ptr pipeline, dai_nodes::BaseNode& daiNode) { - auto nn = std::make_unique("nn", node, pipeline); - daiNode.link(nn->getInput(), static_cast(dai_nodes::link_types::RGBLinkType::preview)); - return nn; -} -std::unique_ptr PipelineGenerator::createSpatialNN(ros::NodeHandle node, - std::shared_ptr pipeline, - dai_nodes::BaseNode& daiNode, - dai_nodes::BaseNode& daiStereoNode) { - auto nn = std::make_unique("nn", node, pipeline); - daiNode.link(nn->getInput(static_cast(dai_nodes::nn_helpers::link_types::SpatialNNLinkType::input)), - static_cast(dai_nodes::link_types::RGBLinkType::preview)); - daiStereoNode.link(nn->getInput(static_cast(dai_nodes::nn_helpers::link_types::SpatialNNLinkType::inputDepth))); - return nn; -} -PipelineType PipelineGenerator::validatePipeline(PipelineType type, int sensorNum) { - if(sensorNum == 1) { - if(type != PipelineType::RGB) { - ROS_ERROR("Wrong pipeline chosen for camera as it has only one sensor. Switching to RGB."); - return PipelineType::RGB; - } - } else if(sensorNum == 2) { - if(type != PipelineType::Stereo || type != PipelineType::Depth) { - ROS_ERROR("Wrong pipeline chosen for camera as it has only stereo pair. Switching to Stereo."); - return PipelineType::Stereo; - } - } else if(sensorNum > 3 && type != PipelineType::CamArray) { - ROS_ERROR("For cameras with more than three sensors you can only use CamArray. Switching to CamArray."); - return PipelineType::CamArray; - } - return type; -} - -} // namespace pipeline_gen -} // namespace depthai_ros_driver \ No newline at end of file diff --git a/depthai_ros_msgs/CMakeLists.txt b/depthai_ros_msgs/CMakeLists.txt index d0338bdf..bcd37c14 100644 --- a/depthai_ros_msgs/CMakeLists.txt +++ b/depthai_ros_msgs/CMakeLists.txt @@ -1,6 +1,6 @@ cmake_minimum_required (VERSION 3.10.2) # CMake version in Ubuntu 18.04 LTS -project (depthai_ros_msgs VERSION 2.7.2) +project (depthai_ros_msgs VERSION 2.7.3) if(POLICY CMP0057) cmake_policy(SET CMP0057 NEW) diff --git a/depthai_ros_msgs/package.xml b/depthai_ros_msgs/package.xml index f8f0dc43..1b1681f2 100644 --- a/depthai_ros_msgs/package.xml +++ b/depthai_ros_msgs/package.xml @@ -1,7 +1,7 @@ depthai_ros_msgs - 2.7.2 + 2.7.3 Package to keep interface independent of the driver Sachin Guruswamy