From c212f8305d609d255546f797783ef4c722cb1d19 Mon Sep 17 00:00:00 2001 From: Amal Nanavati Date: Fri, 3 Nov 2023 17:53:31 -0700 Subject: [PATCH] Implement Octomap and Misc Bug Fixes (#125) * Fixed watchdog F/T, added camera info to repub, added behavior logs * Added mask to republisher * added mask * Add manually-defined shadow to mask * Added temporal and spatial postprocessors * Added thresholding * Added extra safeguards when stopping Servo * Add persistent attached food sphere to forkTip * Updated save_image script * Removed food collision object as Octomap doesn't properly filter it out See here for more: https://github.com/ros-planning/moveit_ros/issues/315 * Added a separate depth octomap topic that removes the forktip and food * Addressed PR comments --- .pylintrc | 2 + .../acquisition/compute_action_constraints.py | 1 + ada_feeding/ada_feeding/idioms/__init__.py | 1 + .../ada_feeding/idioms/wait_for_secs.py | 38 +++ .../ada_feeding/trees/acquire_food_tree.py | 14 +- .../ada_feeding/trees/start_servo_tree.py | 1 + .../ada_feeding/trees/stop_servo_tree.py | 62 ++++- ada_feeding/config/ada_planning_scene.yaml | 15 +- ada_feeding/scripts/ada_planning_scene.py | 88 ++++-- ada_feeding/scripts/save_image.py | 50 ++-- .../depth_post_processors.py | 241 ++++++++++++++++ .../ada_feeding_perception/republisher.py | 262 ++++++++++++++++-- .../config/republisher.yaml | 43 ++- .../model/fork_handle_mask.png | Bin 0 -> 8256 bytes .../model/fork_handle_mask_with_shadow.png | Bin 0 -> 7919 bytes ada_feeding_perception/setup.py | 5 + 16 files changed, 751 insertions(+), 72 deletions(-) create mode 100644 ada_feeding/ada_feeding/idioms/wait_for_secs.py create mode 100644 ada_feeding_perception/ada_feeding_perception/depth_post_processors.py create mode 100644 ada_feeding_perception/model/fork_handle_mask.png create mode 100644 ada_feeding_perception/model/fork_handle_mask_with_shadow.png diff --git a/.pylintrc b/.pylintrc index 48b40557..5c67333f 100644 --- a/.pylintrc +++ b/.pylintrc @@ -26,6 +26,7 @@ clear-cache-post-run=no # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code. extension-pkg-allow-list=cv2, + cv, tf2_py # A comma-separated list of package or module names from where C extensions may @@ -33,6 +34,7 @@ extension-pkg-allow-list=cv2, # run arbitrary code. (This is an alternative name to extension-pkg-allow-list # for backward compatibility.) extension-pkg-whitelist=cv2, + cv, tf2_py # Return non-zero exit code if any of these messages/categories are detected, diff --git a/ada_feeding/ada_feeding/behaviors/acquisition/compute_action_constraints.py b/ada_feeding/ada_feeding/behaviors/acquisition/compute_action_constraints.py index 9069e8c0..de478da1 100644 --- a/ada_feeding/ada_feeding/behaviors/acquisition/compute_action_constraints.py +++ b/ada_feeding/ada_feeding/behaviors/acquisition/compute_action_constraints.py @@ -122,6 +122,7 @@ def setup(self, **kwargs): @override def update(self) -> py_trees.common.Status: # Docstring copied from @override + self.logger.info(f"{self.name} [{self.__class__.__name__}::update()]") # Input Validation if not self.blackboard_exists("action_select_response"): diff --git a/ada_feeding/ada_feeding/idioms/__init__.py b/ada_feeding/ada_feeding/idioms/__init__.py index 587b6457..aa42df79 100644 --- a/ada_feeding/ada_feeding/idioms/__init__.py +++ b/ada_feeding/ada_feeding/idioms/__init__.py @@ -6,3 +6,4 @@ from .pre_moveto_config import pre_moveto_config from .retry_call_ros_service import retry_call_ros_service from .scoped_behavior import scoped_behavior +from .wait_for_secs import wait_for_secs diff --git a/ada_feeding/ada_feeding/idioms/wait_for_secs.py b/ada_feeding/ada_feeding/idioms/wait_for_secs.py new file mode 100644 index 00000000..45a426fc --- /dev/null +++ b/ada_feeding/ada_feeding/idioms/wait_for_secs.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +This module defines the wait_for_secs idiom, which returns a behavior that +waits for a specified number of seconds. +""" + +# Standard imports + +# Third-party imports +import py_trees + + +def wait_for_secs( + name: str, + secs: float, +) -> py_trees.behaviour.Behaviour: + """ + Creates a behavior that returns RUNNING for a specified number of secs + and then returns SUCCESS. + + Parameters + ---------- + name: The name of the behavior. + secs: The number of seconds to wait. + """ + + # pylint: disable=abstract-class-instantiated + # Creating a Running behavior is not instantiating an abstract class. + + return py_trees.decorators.FailureIsSuccess( + name=name, + child=py_trees.decorators.Timeout( + name=name + "Timeout", + duration=secs, + child=py_trees.behaviours.Running(), + ), + ) diff --git a/ada_feeding/ada_feeding/trees/acquire_food_tree.py b/ada_feeding/ada_feeding/trees/acquire_food_tree.py index 4ce47f73..d17ce652 100644 --- a/ada_feeding/ada_feeding/trees/acquire_food_tree.py +++ b/ada_feeding/ada_feeding/trees/acquire_food_tree.py @@ -97,6 +97,8 @@ def create_tree( ) ### Define Tree Logic + # NOTE: If octomap clearing ends up being an issue, we should + # consider adding a call to the /clear_octomap service to this tree. # Root Sequence root_seq = py_trees.composites.Sequence( name=name, @@ -160,7 +162,7 @@ def create_tree( ), ), # Re-Tare FT Sensor and default to 4N threshold - pre_moveto_config(name="PreAquireFTTare"), + pre_moveto_config(name="PreAcquireFTTare"), ### Move Above Food MoveIt2PoseConstraint( name="MoveAbovePose", @@ -222,10 +224,10 @@ def create_tree( ], ), ToggleCollisionObject( - name="AllowTable", + name="AllowTableAndOctomap", ns=name, inputs={ - "collision_object_ids": ["table"], + "collision_object_ids": ["table", ""], "allow": True, }, ), @@ -237,10 +239,10 @@ def create_tree( children=[ pre_moveto_config(name="PostAcquireFTSet", re_tare=False), ToggleCollisionObject( - name="DisallowTable", + name="DisallowTableAndOctomap", ns=name, inputs={ - "collision_object_ids": ["table"], + "collision_object_ids": ["table", ""], "allow": False, }, ), @@ -362,7 +364,7 @@ def create_tree( ), # Auto Zero-Twist on terminate() ### Extraction ComputeActionTwist( - name="ComputeGrasp", + name="ComputeExtract", ns=name, inputs={ "action": BlackboardKey("action"), diff --git a/ada_feeding/ada_feeding/trees/start_servo_tree.py b/ada_feeding/ada_feeding/trees/start_servo_tree.py index 87fa6533..8ef0a54d 100644 --- a/ada_feeding/ada_feeding/trees/start_servo_tree.py +++ b/ada_feeding/ada_feeding/trees/start_servo_tree.py @@ -60,6 +60,7 @@ def create_tree( activate_controllers=[self.servo_controller_name], deactivate_controllers=[self.move_group_controller_name], activate_asap=True, + strictness=SwitchController.Request.BEST_EFFORT, ) switch_controllers_key_response = Blackboard.separator.join( [name, "switch_controllers", "response"] diff --git a/ada_feeding/ada_feeding/trees/stop_servo_tree.py b/ada_feeding/ada_feeding/trees/stop_servo_tree.py index 3de0f707..1895e629 100644 --- a/ada_feeding/ada_feeding/trees/stop_servo_tree.py +++ b/ada_feeding/ada_feeding/trees/stop_servo_tree.py @@ -8,6 +8,7 @@ import operator # Third-party imports +from controller_manager_msgs.srv import SwitchController from geometry_msgs.msg import Twist, TwistStamped from overrides import override import py_trees @@ -19,7 +20,9 @@ from std_srvs.srv import Trigger # Local imports -from ada_feeding.idioms import retry_call_ros_service +from ada_feeding.behaviors import UpdateTimestamp +from ada_feeding.helpers import BlackboardKey +from ada_feeding.idioms import retry_call_ros_service, wait_for_secs from ada_feeding.trees import TriggerTree @@ -27,13 +30,18 @@ class StopServoTree(TriggerTree): """ This behavior tree does the following: 1. Pubishes one 0-velocity twist message to `~/servo_twist_cmds`. - 2. Calls the `~/stop_servo` service to stop MoveIt Servo + 2. Waits for `delay` seconds, to allow servo to publish stop commands + to the controller. + 3. Calls the `~/stop_servo` service to stop MoveIt Servo. + 4. Calls the `~/switch_controller` service to turn off the servo controller. """ def __init__( self, node: Node, base_frame_id: str = "j2n6s200_link_base", + servo_controller_name: str = "jaco_arm_servo_controller", + delay: float = 0.5, ) -> None: """ Initializes the behavior tree. @@ -47,6 +55,8 @@ def __init__( # Initialize the TriggerTree class super().__init__(node=node) self.base_frame_id = base_frame_id + self.servo_controller_name = servo_controller_name + self.delay = delay @override def create_tree( @@ -75,6 +85,17 @@ def create_tree( ), ) + # Update the timestamp of the twist message so its not stale + update_timestamp = UpdateTimestamp( + name=name + "Update Timestamp", + inputs={ + "stamped_msg": BlackboardKey(twist_key), + }, + outputs={ + "stamped_msg": BlackboardKey(twist_key), + }, + ) + # Create the behavior to publish the twist message twist_pub = py_trees_ros.publishers.FromBlackboard( name=name + "Publish Twist", @@ -84,6 +105,9 @@ def create_tree( blackboard_variable=twist_key, ) + # Add a delay to allow servo to publish stop commands + delay_behavior = wait_for_secs(name + "Delay", self.delay) + # Create the behavior to stop servo stop_servo_key_response = Blackboard.separator.join( [name, "stop_servo", "response"] @@ -104,12 +128,44 @@ def create_tree( ], ) + # Create the behavior to turn off the controllers + stop_controller_req = SwitchController.Request( + activate_controllers=[], + deactivate_controllers=[self.servo_controller_name], + activate_asap=True, + strictness=SwitchController.Request.BEST_EFFORT, + ) + stop_controllers_key_response = Blackboard.separator.join( + [name, "switch_controllers", "response"] + ) + stop_controllers = retry_call_ros_service( + name=name + "Activate Servo Controller", + service_type=SwitchController, + service_name="~/switch_controller", + key_request=None, + request=stop_controller_req, + key_response=stop_controllers_key_response, + response_checks=[ + py_trees.common.ComparisonExpression( + variable=stop_controllers_key_response + ".ok", + value=True, + operator=operator.eq, + ) + ], + ) + # Put them together in a sequence with memory # pylint: disable=duplicate-code return py_trees.trees.BehaviourTree( root=py_trees.composites.Sequence( name=name, memory=True, - children=[twist_pub, stop_servo], + children=[ + update_timestamp, + twist_pub, + delay_behavior, + stop_servo, + stop_controllers, + ], ) ) diff --git a/ada_feeding/config/ada_planning_scene.yaml b/ada_feeding/config/ada_planning_scene.yaml index ab9fb33d..763494eb 100644 --- a/ada_feeding/config/ada_planning_scene.yaml +++ b/ada_feeding/config/ada_planning_scene.yaml @@ -9,12 +9,17 @@ ada_planning_scene: - workspace_wall_front - workspace_wall_left - workspace_wall_top + # - food # For each object, specify: # - Shape: EITHER `filepath` (for a mesh) OR `primitive_type` and # `primitive_dims`(for a primitive -- see shape_msgs/SolidPrimitive.msg) # - Pose: `position` and `quat_xyzw` (see geometry_msgs/Pose.msg) # - Frame ID: `frame_id` (the frame_id of the object that the pose is # relative to) + # - [Optional] to attach the collision object specify `attached` to be True. + # In that case, `frame_id` must be a link on the robot to attach the object + # to, and `touch_links` must be a list of additional links that should be + # ignored for collision checking. wheelchair: # the wheelchair mesh filename: wheelchair.stl position: [0.02, -0.02, -0.05] @@ -55,4 +60,12 @@ ada_planning_scene: primitive_dims: [1.59, 1.5, 0.01] # Box has 3 dims: [x, y, z] position: [-0.05, 0.17, 1.05] quat_xyzw: [0.0, 0.0, 0.0, 1.0] - frame_id: root # the frame_id that the pose is relative to \ No newline at end of file + frame_id: root # the frame_id that the pose is relative to + food: + primitive_type: 2 # Sphere=2. See shape_msgs/SolidPrimitive.msg + primitive_dims: [0.05] # Sphere has 1 dim: [radius] + position: [0.0, 0.0, 0.0] + quat_xyzw: [0.0, 0.0, 0.0, 1.0] + attached: True + frame_id: forkTip # the frame_id that the pose is relative to + touch_links: [forkTine] # the links, in addition to frame_id, that should be ignored for collision checking diff --git a/ada_feeding/scripts/ada_planning_scene.py b/ada_feeding/scripts/ada_planning_scene.py index 37f8c7dd..33077b1f 100755 --- a/ada_feeding/scripts/ada_planning_scene.py +++ b/ada_feeding/scripts/ada_planning_scene.py @@ -22,8 +22,8 @@ from rclpy.executors import MultiThreadedExecutor from rclpy.node import Node -CollisionMeshParams = namedtuple( - "CollisionMeshParams", +CollisionObjectParams = namedtuple( + "CollisionObjectParams", [ "filepath", "primitive_type", @@ -31,6 +31,8 @@ "position", "quat_xyzw", "frame_id", + "attached", + "touch_links", ], ) @@ -189,6 +191,32 @@ def load_parameters(self) -> None: read_only=True, ), ) + attached = self.declare_parameter( + f"{object_id}.attached", + False, + descriptor=ParameterDescriptor( + name="attached", + type=ParameterType.PARAMETER_BOOL, + description=( + "Whether the object is attached to the robot. " + "If True, frame_id must be a robot link." + ), + read_only=True, + ), + ) + touch_links = self.declare_parameter( + f"{object_id}.touch_links", + None, + descriptor=ParameterDescriptor( + name="touch_links", + type=ParameterType.PARAMETER_STRING_ARRAY, + description=( + "The links that the object is allowed to touch. " + "Only applies if `attached` is True." + ), + read_only=True, + ), + ) # Add the object to the list of objects filepath = ( @@ -196,13 +224,16 @@ def load_parameters(self) -> None: if filename.value is None else path.join(assets_dir.value, filename.value) ) - self.objects[object_id] = CollisionMeshParams( + touch_links = [] if touch_links.value is None else touch_links.value + self.objects[object_id] = CollisionObjectParams( filepath=filepath, primitive_type=primitive_type.value, primitive_dims=primitive_dims.value, position=position.value, quat_xyzw=quat_xyzw.value, frame_id=frame_id.value, + attached=attached.value, + touch_links=touch_links, ) def wait_for_moveit(self) -> None: @@ -225,22 +256,43 @@ def initialize_planning_scene(self) -> None: # Add each object to the planning scene for object_id, params in self.objects.items(): if params.primitive_type is None: - self.moveit2.add_collision_mesh( - id=object_id, - filepath=params.filepath, - position=params.position, - quat_xyzw=params.quat_xyzw, - frame_id=params.frame_id, - ) + if params.attached: + self.moveit2.add_attached_collision_mesh( + id=object_id, + filepath=params.filepath, + position=params.position, + quat_xyzw=params.quat_xyzw, + link_name=params.frame_id, + touch_links=params.touch_links, + ) + else: + self.moveit2.add_collision_mesh( + id=object_id, + filepath=params.filepath, + position=params.position, + quat_xyzw=params.quat_xyzw, + frame_id=params.frame_id, + ) else: - self.moveit2.add_collision_primitive( - id=object_id, - prim_type=params.primitive_type, - dims=params.primitive_dims, - position=params.position, - quat_xyzw=params.quat_xyzw, - frame_id=params.frame_id, - ) + if params.attached: + self.moveit2.add_attached_collision_primitive( + id=object_id, + prim_type=params.primitive_type, + dims=params.primitive_dims, + position=params.position, + quat_xyzw=params.quat_xyzw, + link_name=params.frame_id, + touch_links=params.touch_links, + ) + else: + self.moveit2.add_collision_primitive( + id=object_id, + prim_type=params.primitive_type, + dims=params.primitive_dims, + position=params.position, + quat_xyzw=params.quat_xyzw, + frame_id=params.frame_id, + ) time.sleep(0.2) diff --git a/ada_feeding/scripts/save_image.py b/ada_feeding/scripts/save_image.py index 359498a3..dca9ca59 100755 --- a/ada_feeding/scripts/save_image.py +++ b/ada_feeding/scripts/save_image.py @@ -6,6 +6,7 @@ # Standard imports import threading +from typing import Union # Third-party imports import cv2 @@ -60,8 +61,8 @@ def __init__(self) -> None: self.latest_color_image = None self.latest_color_image_lock = threading.Lock() self.create_subscription( - CompressedImage, - "/camera/color/image_raw/compressed", + Image, + "/camera/color/image_raw", self.color_image_callback, 1, ) @@ -69,7 +70,7 @@ def __init__(self) -> None: self.latest_depth_image_lock = threading.Lock() self.create_subscription( Image, - "/camera/aligned_depth_to_color/image_raw", + "/local/camera/aligned_depth_to_color/image_raw", self.depth_image_callback, 1, ) @@ -77,7 +78,7 @@ def __init__(self) -> None: # Log that the node is ready self.get_logger().info("SaveImage node ready") - def color_image_callback(self, msg: CompressedImage) -> None: + def color_image_callback(self, msg: Union[CompressedImage, Image]) -> None: """ Callback function for the RealSense's compressed color image topic. """ @@ -97,40 +98,45 @@ def save_image_callback( """ Callback function for the save_image ROS service. """ + response.message = "" # Check if the filepath parameter is set if self.filepath.value is None: response.success = False - response.message = "The filepath parameter is not set" + response.message += ", The filepath parameter is not set" return response # Check if the latest color image is available with self.latest_color_image_lock: if self.latest_color_image is None: response.success = False - response.message = "The latest color image is not available" - return response + response.message += ", The latest color image is not available" + else: + # Save the latest color image + if isinstance(self.latest_color_image, CompressedImage): + color_image = self.bridge.compressed_imgmsg_to_cv2( + self.latest_color_image + ) + else: + color_image = self.bridge.imgmsg_to_cv2(self.latest_color_image) + color_image = cv2.cvtColor(color_image, cv2.COLOR_RGB2BGR) + color_image_filepath = self.filepath.value + "_rgb.jpg" + self.get_logger().info(f"Saving color image to {color_image_filepath}") + cv2.imwrite(color_image_filepath, color_image) # Check if the latest depth image is available with self.latest_depth_image_lock: if self.latest_depth_image is None: response.success = False - response.message = "The latest depth image is not available" - return response - - # Save the latest color image - color_image = self.bridge.compressed_imgmsg_to_cv2(self.latest_color_image) - color_image_filepath = self.filepath.value + "_rgb.jpg" - self.get_logger().info(f"Saving color image to {color_image_filepath}") - cv2.imwrite(color_image_filepath, color_image) - - # Save the latest depth image - depth_image = self.bridge.imgmsg_to_cv2(self.latest_depth_image) - depth_image_filepath = self.filepath.value + "_depth.png" - self.get_logger().info(f"Saving depth image to {depth_image_filepath}") - cv2.imwrite(depth_image_filepath, depth_image) + response.message += ", The latest depth image is not available" + else: + # Save the latest depth image + depth_image = self.bridge.imgmsg_to_cv2(self.latest_depth_image) + depth_image_filepath = self.filepath.value + "_depth.png" + self.get_logger().info(f"Saving depth image to {depth_image_filepath}") + cv2.imwrite(depth_image_filepath, depth_image) # Return a success response - response.success = True + response.success = (len(response.message) == 0) response.message = "Successfully saved the latest color image and depth image" return response diff --git a/ada_feeding_perception/ada_feeding_perception/depth_post_processors.py b/ada_feeding_perception/ada_feeding_perception/depth_post_processors.py new file mode 100644 index 00000000..82ee9f1c --- /dev/null +++ b/ada_feeding_perception/ada_feeding_perception/depth_post_processors.py @@ -0,0 +1,241 @@ +#!/usr/bin/env python3 +""" +This module contains a series of post-processors for depth images. +""" + +# Standard imports +from typing import Callable + +# Third-party imports +import cv2 as cv +from cv_bridge import CvBridge +import numpy as np +import numpy.typing as npt +from sensor_msgs.msg import Image + + +def create_mask_post_processor( + mask_img: npt.NDArray[np.uint8], bridge: CvBridge +) -> Callable[[Image], Image]: + """ + Creates the mask post-processor function, which applies a fixed mask to the image. + + Parameters + ---------- + mask_img : npt.NDArray[np.uint8] + The mask to apply. + bridge : CvBridge + The CvBridge object to use. + + Returns + ------- + Callable[[Image], Image] + The post-processor function. + """ + + def mask_post_processor(msg: Image) -> Image: + """ + Applies a fixed mask to the image. Scales the mask to the image. + + Parameters + ---------- + msg : Image + The image to mask. + + Returns + ------- + Image + The masked image. All other attributes of the message remain the same. + """ + # Read the ROS msg as a CV image + img = bridge.imgmsg_to_cv2(msg) + + # Scale the mask to be the size of the img + mask = cv.resize(mask_img, img.shape[:2][::-1]) + + # Apply the mask to the img + masked_img = cv.bitwise_and(img, img, mask=mask) + + # Get the new img message + masked_msg = bridge.cv2_to_imgmsg(masked_img) + masked_msg.header = msg.header + + return masked_msg + + return mask_post_processor + + +def create_temporal_post_processor( + temporal_window_size: int, bridge: CvBridge +) -> Callable[[Image], Image]: + """ + Creates the temporal post-processor function, with a dedicated window. + This post-processor only keeps pixels that are consistently non-zero across + the temporal window. + + Parameters + ---------- + temporal_window_size : int + The size of the temporal window (num frames). + bridge : CvBridge + The CvBridge object to use. + + Returns + ------- + Callable[[Image], Image] + The post-processor function. + """ + + temporal_window = [] + + def temporal_post_processor(msg: Image) -> Image: + """ + The temporal post-processor stores the last `temporal_window_size` images. + It returns the most recent image, but only the pixels in that image that + are non-zero across all images in the window. + + Parameters + ---------- + msg : Image + The image to process. + + Returns + ------- + Image + The processed image. All other attributes of the message remain the same. + """ + # Read the ROS msg as a CV image + img = bridge.imgmsg_to_cv2(msg) + + # Add it to the window + temporal_window.append(img) + + # If the window is full, remove the oldest image + if len(temporal_window) > temporal_window_size: + temporal_window.pop(0) + + # Get the mask + mask = (img > 0).astype(np.uint8) + for i in range(0, len(temporal_window) - 1): + mask = np.bitwise_and(mask, (temporal_window[i] > 0).astype(np.uint8)) + + # Mask the latest image + masked_img = cv.bitwise_and(img, img, mask=mask) + + # Get the new img message + masked_msg = bridge.cv2_to_imgmsg(masked_img) + masked_msg.header = msg.header + + return masked_msg + + return temporal_post_processor + + +def create_spatial_post_processor( + spatial_num_pixels: int, bridge: CvBridge +) -> Callable[[Image], Image]: + """ + Creates the spatial post-processor function, which applies the `opening` morpholical + transformation to the image. + + Parameters + ---------- + spatial_num_pixels : int + The size of the square opening kernel. + bridge : CvBridge + The CvBridge object to use. + + Returns + ------- + Callable[[Image], Image] + The post-processor function. + """ + + def spatial_post_processor(msg: Image) -> Image: + """ + Applies the `opening` morpholical transformation to the image. + + Parameters + ---------- + msg : Image + The image to process. + + Returns + ------- + Image + The processed image. All other attributes of the message remain the same. + """ + # Read the ROS msg as a CV image + img = bridge.imgmsg_to_cv2(msg) + + # Apply the opening morphological transformation + mask = (img > 0).astype(np.uint8) + kernel = np.ones((spatial_num_pixels, spatial_num_pixels), np.uint8) + opened_mask = cv.morphologyEx(mask, cv.MORPH_OPEN, kernel) + masked_img = cv.bitwise_and(img, img, mask=opened_mask) + + # Get the new img message + masked_msg = bridge.cv2_to_imgmsg(masked_img) + masked_msg.header = msg.header + + return masked_msg + + return spatial_post_processor + + +def create_threshold_post_processor( + threshold_min: int, threshold_max: int, bridge: CvBridge +) -> Callable[[Image], Image]: + """ + Creates the threshold post-processor function, which only keeps pixels within a + min and max value. + + Parameters + ---------- + threshold_min : int + The minimum threshold. + threshold_max : int + The maximum threshold. + bridge : CvBridge + The CvBridge object to use. + + Returns + ------- + Callable[[Image], Image] + The post-processor function. + """ + + def threshold_post_processor(msg: Image) -> Image: + """ + Applies a threshold to the image. + + Parameters + ---------- + msg : Image + The image to process. + threshold_min : int + The minimum threshold. + threshold_max : int + The maximum threshold. + bridge : CvBridge + The CvBridge object to use. + + Returns + ------- + Image + The processed image. All other attributes of the message remain the same. + """ + # Read the ROS msg as a CV image + img = bridge.imgmsg_to_cv2(msg) + + # Apply the threshold + mask = cv.inRange(img, threshold_min, threshold_max) + masked_img = cv.bitwise_and(img, img, mask=mask) + + # Get the new img message + masked_msg = bridge.cv2_to_imgmsg(masked_img) + masked_msg.header = msg.header + + return masked_msg + + return threshold_post_processor diff --git a/ada_feeding_perception/ada_feeding_perception/republisher.py b/ada_feeding_perception/ada_feeding_perception/republisher.py index 45932ed1..095426d3 100644 --- a/ada_feeding_perception/ada_feeding_perception/republisher.py +++ b/ada_feeding_perception/ada_feeding_perception/republisher.py @@ -10,9 +10,13 @@ """ # Standard imports +import os from typing import Any, Callable, List, Tuple # Third-party imports +from ament_index_python.packages import get_package_share_directory +import cv2 as cv +from cv_bridge import CvBridge from rcl_interfaces.msg import ParameterDescriptor, ParameterType import rclpy from rclpy.callback_groups import MutuallyExclusiveCallbackGroup @@ -21,6 +25,12 @@ # Local imports from ada_feeding.helpers import import_from_string +from .depth_post_processors import ( + create_mask_post_processor, + create_spatial_post_processor, + create_temporal_post_processor, + create_threshold_post_processor, +) class Republisher(Node): @@ -33,6 +43,8 @@ def __init__(self) -> None: """ Initialize the node. """ + # pylint: disable=too-many-locals, too-many-branches, too-many-nested-blocks + # Necessary because we are handling several post-processors and topics. super().__init__("republisher") self._default_callback_group = rclpy.callback_groups.ReentrantCallbackGroup() @@ -41,32 +53,105 @@ def __init__(self) -> None: ( self.from_topics, topic_type_strs, - republished_namespace, + to_topics, + post_processors_strs, + mask_relative_path, + temporal_window_size, + spatial_num_pixels, + threshold_min, + threshold_max, ) = self.load_parameters() + # Configure the post-processors. + identity_post_processor_str = "none" + mask_post_processor_str = "mask" + temporal_post_processor_str = "temporal" + spatial_post_processor_str = "spatial" + threshold_post_processor_str = "threshold" + # If at least one post-processor is "mask", load the mask + bridge = CvBridge() + mask_img = None + for i, post_processors_str in enumerate(post_processors_strs): + for j, post_processor_str in enumerate(post_processors_str): + if post_processor_str == mask_post_processor_str and mask_img is None: + if mask_relative_path is None: + self.get_logger().warn( + "Must specify `mask_relative_path` to use post-processor " + f"`{mask_post_processor_str}`. Replacing with post-processor " + f"{identity_post_processor_str}." + ) + post_processors_strs[i][j] = identity_post_processor_str + continue + + # Get the mask path + mask_path = os.path.join( + get_package_share_directory("ada_feeding_perception"), + mask_relative_path, + ) + + # Load the image as a binary mask + mask_img = cv.imread(mask_path, cv.IMREAD_GRAYSCALE) + + # Configure the post-processing functions + self.create_post_processors = { + identity_post_processor_str: lambda: lambda msg: msg, + mask_post_processor_str: lambda: create_mask_post_processor( + mask_img, bridge + ), + temporal_post_processor_str: lambda: create_temporal_post_processor( + temporal_window_size, bridge + ), + spatial_post_processor_str: lambda: create_spatial_post_processor( + spatial_num_pixels, bridge + ), + threshold_post_processor_str: lambda: create_threshold_post_processor( + threshold_min, threshold_max, bridge + ), + } + # Import the topic types self.topic_types = [] for topic_type_str in topic_type_strs: self.topic_types.append(import_from_string(topic_type_str)) # For each topic, create a callback, publisher, and subscriber - num_topics = min(len(self.from_topics), len(self.topic_types)) + num_topics = min(len(self.from_topics), len(self.topic_types), len(to_topics)) self.callbacks = [] self.pubs = [] self.subs = [] for i in range(num_topics): + # Get the post-processors + post_processor_fns = [] + if i < len(post_processors_strs): + for post_processor_str in post_processors_strs[i]: + if post_processor_str in self.create_post_processors: + post_processor_fn = self.create_post_processors[ + post_processor_str + ]() + # Check the type of the post-processor + if "msg" in post_processor_fn.__annotations__: + if ( + post_processor_fn.__annotations__["msg"] == Any + or post_processor_fn.__annotations__["msg"] + == self.topic_types[i] + ): + post_processor_fns.append(post_processor_fn) + else: + self.get_logger().warn( + f"Type mismatch for post-processor {post_processor_str} for " + f"topic at index {i}. Will not post-process." + ) + else: + self.get_logger().warn( + f"No valid post-processor for topic at index {i}. Will not post-process." + ) + # Create the callback - callback = self.create_callback(i) + callback = self.create_callback(i, post_processor_fns) self.callbacks.append(callback) # Create the publisher - to_topic = "/".join( - [ - "", - republished_namespace.lstrip("/"), - self.from_topics[i].lstrip("/"), - ] - ) + to_topic = to_topics[i] publisher = self.create_publisher( msg_type=self.topic_types[i], topic=to_topic, @@ -84,7 +169,11 @@ def __init__(self) -> None: ) self.subs.append(subscriber) - def load_parameters(self) -> Tuple[List[str], List[str], List[str]]: + def load_parameters( + self, + ) -> Tuple[ + List[str], List[str], List[str], List[List[str]], str, int, int, int, int + ]: """ Load the parameters for the republisher. @@ -94,8 +183,21 @@ def load_parameters(self) -> Tuple[List[str], List[str], List[str]]: The topics to subscribe to. topic_types : List[str] The types of the topics to subscribe to in format, e.g., `std_msgs.msg.String`. - republished_namespace : str - The namespace to republish topics under. + to_topics : List[str] + The topics to republish to. + post_processors : List[List[str]] + The post-processing functions to apply to the messages before republishing. + mask_relative_path : str + The path of the binary mask to be used with the post-processor, relative to + the ada_feeding_perception share directory. + temporal_window_size : int + The size of the window (num frames) to use for the temporal post-processor. + spatial_num_pixels : int + The number of pixels to use for the spatial post-processor. + threshold_min : int + The minimum value to use for the threshold post-processor. + threshold_max : int + The maximum value to use for the threshold post-processor. """ # Read the from topics from_topics = self.declare_parameter( @@ -123,13 +225,111 @@ def load_parameters(self) -> Tuple[List[str], List[str], List[str]]: ) # Read the to topics - republished_namespace = self.declare_parameter( - "republished_namespace", - "/local", + to_topics = self.declare_parameter( + "to_topics", + descriptor=ParameterDescriptor( + name="to_topics", + type=ParameterType.PARAMETER_STRING_ARRAY, + description="List of the topics to republish to.", + read_only=True, + ), + ) + + # Read the post-processing functions + post_processors = self.declare_parameter( + "post_processors", + descriptor=ParameterDescriptor( + name="post_processors", + type=ParameterType.PARAMETER_STRING_ARRAY, + description=( + "List of the post-processing functions to apply to the messages " + "before republishing, as a comma-separated list." + ), + read_only=True, + ), + ) + # Split the post-processors + post_processors_retval = post_processors.value + if post_processors_retval is None: + post_processors_retval = [] + else: + post_processors_retval = [ + [ + single_post_processor.strip() + for single_post_processor in post_processor.split(",") + ] + for post_processor in post_processors_retval + ] + + # Get the mask's relative path to use with the "mask" post-processor + mask_relative_path = self.declare_parameter( + "mask_relative_path", + None, descriptor=ParameterDescriptor( - name="republished_namespace", + name="mask_relative_path", type=ParameterType.PARAMETER_STRING, - description="The namespace to republish topics under.", + description=( + "The path of the binary mask to be used with the post-processor, " + "relative to the ada_feeding_perception share directory." + ), + read_only=True, + ), + ) + + # Get the window size to use with the temporal post-processor + temporal_window_size = self.declare_parameter( + "temporal_window_size", + 5, + descriptor=ParameterDescriptor( + name="temporal_window_size", + type=ParameterType.PARAMETER_INTEGER, + description=( + "The size of the window (num frames) to use for the temporal post-processor. " + "Default: 5" + ), + read_only=True, + ), + ) + + # Get the number of pixels to use with the spatial post-processor + spatial_num_pixels = self.declare_parameter( + "spatial_num_pixels", + 10, + descriptor=ParameterDescriptor( + name="spatial_num_pixels", + type=ParameterType.PARAMETER_INTEGER, + description=( + "The number of pixels to use for the spatial post-processor. " + "Default: 10" + ), + read_only=True, + ), + ) + + # The min/max threshold values for the threshold post-processor + threshold_min = self.declare_parameter( + "threshold_min", + 0, + descriptor=ParameterDescriptor( + name="threshold_min", + type=ParameterType.PARAMETER_INTEGER, + description=( + "The minimum value to use for the threshold post-processor. " + "Default: 0" + ), + read_only=True, + ), + ) + threshold_max = self.declare_parameter( + "threshold_max", + 20000, + descriptor=ParameterDescriptor( + name="threshold_max", + type=ParameterType.PARAMETER_INTEGER, + description=( + "The maximum value to use for the threshold post-processor. " + "Default: 20000" + ), read_only=True, ), ) @@ -141,10 +341,25 @@ def load_parameters(self) -> Tuple[List[str], List[str], List[str]]: topic_types_retval = topic_types.value if topic_types_retval is None: topic_types_retval = [] + to_topics_retval = to_topics.value + if to_topics_retval is None: + to_topics_retval = [] + + return ( + from_topics_retval, + topic_types_retval, + to_topics.value, + post_processors_retval, + mask_relative_path.value, + temporal_window_size.value, + spatial_num_pixels.value, + threshold_min.value, + threshold_max.value, + ) - return from_topics_retval, topic_types_retval, republished_namespace.value - - def create_callback(self, i: int) -> Callable: + def create_callback( + self, i: int, post_processors: List[Callable[[Any], Any]] + ) -> Callable: """ Create the callback for the subscriber. @@ -152,6 +367,9 @@ def create_callback(self, i: int) -> Callable: ---------- i : int The index of the callback. + post_processor : List[Callable[[Any], Any]] + The post-processing functions to apply to the message before republishing. + Each must take in a message and return a message of the same type. Returns ------- @@ -171,6 +389,8 @@ def callback(msg: Any): # self.get_logger().info( # f"Received message on topic {i} {self.from_topics[i]}" # ) + for post_processor in post_processors: + msg = post_processor(msg) self.pubs[i].publish(msg) return callback diff --git a/ada_feeding_perception/config/republisher.yaml b/ada_feeding_perception/config/republisher.yaml index c2e6a924..3da35c67 100644 --- a/ada_feeding_perception/config/republisher.yaml +++ b/ada_feeding_perception/config/republisher.yaml @@ -6,9 +6,50 @@ republisher: - /camera/color/image_raw/compressed - /camera/color/camera_info - /camera/aligned_depth_to_color/image_raw + - /camera/aligned_depth_to_color/camera_info + - /local/camera/aligned_depth_to_color/image_raw # The types of topics to republish from topic_types: - sensor_msgs.msg.CompressedImage - sensor_msgs.msg.CameraInfo - - sensor_msgs.msg.Image \ No newline at end of file + - sensor_msgs.msg.Image + - sensor_msgs.msg.CameraInfo + - sensor_msgs.msg.Image + + # The name of the topics to republish to + to_topics: + - /local/camera/color/image_raw/compressed + - /local/camera/color/camera_info + - /local/camera/aligned_depth_to_color/image_raw + - /local/camera/aligned_depth_to_color/camera_info + - /local/camera/aligned_depth_to_color/depth_octomap + + # The names of a post-processing functions to apply to the message before + # republishing it. Current options are: + # - none (Any): the identity + # - temporal (Image): Stores the last `temporal_window_size` images and publishes + # the last image, masking out pixels that are zero in *any* of the images + # in the window. + # - spatial (Image): Applies the opening morphological operation to the mask of + # (non-)zero pixels in the image, with a square kernel of size `spatial_num_pixels`. + # - mask (Image): Applies the mask stored in `mask_relative_path` to the image. + # - threshold (Image): Masks out pixels whose values are outside of the specified + # range. This is intended to be used only with single-channel (e.g., depth) images. + # Any of these post-processing functions can be combined in a comma-separated list. + post_processors: + - "" + - "" + - "" + - "" + - threshold,mask,temporal,spatial # Apply filters to the depth image for the Octomap + # The binary mask used for "mask" post-processing. This mask will get scaled, + # possibly disproportionately, to the same of the image. + mask_relative_path: model/fork_handle_mask.png + # The size of the temporal window for the "temporal" post-processor. + temporal_window_size: 5 + # The size of the square kernel for the "spatial" post-processor. + spatial_num_pixels: 10 + # The min/max pixel value to be included in the threshold. For depth images, the units are mm. + threshold_min: 350 + threshold_max: 2500 diff --git a/ada_feeding_perception/model/fork_handle_mask.png b/ada_feeding_perception/model/fork_handle_mask.png new file mode 100644 index 0000000000000000000000000000000000000000..4728560597ff7406f2751a7d0240e5338cde45fc GIT binary patch literal 8256 zcmeHLdpwkB`+sJLA&Qja${1Q(?VQJ9#%VI+R5`1Z(3p9K(J(TD8Kp)CNs3y#DIt}P zs?}->Ib@wWs1W588>N!a2?_5#RNKD4-F@Ho^SaCVKI*j zLE@HB_g2+k&?@hO5_d}t)U>x7dp}jsFI6=2ob}q6-$mZuce^wzTC7Uo-Om4hwQ#p| z`I$Dg)ZpL-&pSL5dR~xVacYpstG9lV<$B&p0)f$UgN8`{w zl$~}vJ(SZqJ6MJjZ}QbPdS|UnG!1#&27=qlV=&yE8H^9{fG7^e?y+|I#eMcRH?N$- za~4_7URUFu@$4{W_o{6eZ`1|5(=)!tk8Sx{)j4tJkNMI=M6@O0qgIkBiUYKBaht0UZ- zx>*q0?uBpkI~X)FG`4aqWvX{#RD(w)%V<^<({&Tuw-ZR|Rod!~B4nnB0i_4|11<0ZuPyP*eH1wzMR(GP?u&oT(tD@ zcZTWb7f$0TZ9@)hbm&Nxiw}#tZz>2oBRpsW=^R-0RKdbRzA7=b`1#A{nKui@S{n{@ z%p_+=?0P=8>MW?c7#^rEAD5*xwjdP8;s^p^oH#Tb6dnXw*^0wi?6t569S8^U!mKg< zMWq-tk7JElVd_F~31`5;Jg1ll*ek}>n;o;3P32&0ZIrFVGyo6^i&$uJC_hX{6I){@ zacSULwv5N1Cq+bStua0>?r4S}0!CAC6dVC-FXly&FgD6)s|XI4=E-z;4*|YeV}eDZ za2g&T9UYB}CgTJVL3kpSO2rdMcoGQ%LK1U4Bd(c)>Id|d-fqlv3 z(o{=%!Rw~JO?~C_CVL5up6m)5i#-*BkQD`UCjA6hQ(Np{R#*@W#>e}5{jkpagIXYP z$fmHF85?VE4mz7cGUZ^Yq(C;-)Pg{?u;3CcIA;HZE);M@(X0q~Q4rt}a0Tjfk}LE# zQz@MPduO79VOf3vV^|^)`x|4F_l)7+M~s(EjSnGP;r|aQR+9psj2WOewFbr)n1=X| zxFu~+Pw zj+muvhMCdc1wk`=oSBQf#Vvj9lBCqv`qhK|a@X_3>^t{@cZDWCP*0enw%VPw$iw|R zK`F%!)BvaylZtEsv#5)zMmzfNP;>~AM}z-h zsDVp93X($d5FJAP(e|G#|Hrld!iB%E^*6NsI@~|A^}o{k*9rL}4?kb)-%I{K+xmR1 zf4|T^we`Q!`n*>7;Njk{e|$KBZ9w>lQ)C6f@g{RLf%l!iOf~t#u~J3A>i;V=|eceYbrI zA}U?SS{%QMTzdIk^=k(O1d6_@E&Tey;j1%`ZFqhwc64jk5g{`-p!k?esblhy(WAqaN0zt+{sblvJ4{bfL84g6bH#4 zN^`Y@nO22-UdY6TWkCluYIi<=NtbPCD`bVQ&h`Ip+&n;hDo!+*L5E1EqWkafPbf`D zIOWkJE<@WNgpi!qNdE`PF}%~b9zV@pLpKM@fHE47(EfEf;pjHsGAMQFKIeUUi24C! zy%L)4fbZ0?LL`MNqj;V2YO)GXi7HNhy06)Gea{XpF_e9y4XIF*cFw@kczop9sihOP z;-_&hB9N#Q!}Yt;Q%8n(COB1P^9tJyoF96E*vC01kf5etO;r?13(`&-j- zBqU(*1*ZYuzx&A9OfCv^eD~F?uuOG1ZmpSoYFcs}qNd)yXtRyNqpq0yq=o+bW40-3 zgPxEml_ew;hrN|tj_zxm%aQ5LgN!hzg4#7zSFC;>C(EHfl29Ivit`-!dg|r-upzcd z-$5Gn2oU}Qu7!GO+E?mzp>HzsBCl~~TF4xkB~>rcRBQPT@N(31h<8^9(b8LaRD&*6 z@yi#AklN~XtQ(JmCBboua}&n|*X5xn7dy^GSDw{+)c3wVAI}%O?Z3p{tk{WgHk-kt+2kRljC$Ms6#{5>!!Ip^0o@}@ItR(?rweGpPBqt zU6=mL8ryu2>*eV8NgDd{Mp~Ar%F>-*ct3WNu}f{<*jTN{a9)3$Z%Cupx#W1?Fp4}< zP5S089OWl!Yf@`HTY62K7n{Vrv8!q=&P=xV^T|w!qbMXKanpp;FpNV93dy?XUSGL` zqDvF=Tg!7iY`IL2O|^E}YRmQo=wmN-@H>-wX|ZAaqMKLI%{}ZjkWEr4wG{QbMOJY3H`hX+1{0%IaAbK(^N94u{PH;CFgAjGd{wJTjJ0 zn5DDDtd}r&IC%exAe20s9+H)ND@%7u8u##LE7T0t8=8tgZ6$q=MJ=X`%( zboHC)j^vrg0@kWrn=oEq&#QYoY!nZQIDeltE<@AcMg|L2Kg6*VbMnuM)@vhB?gw|? z`;}qjzmkPIpZ(^}H{S;E6|db?TqL)@@#MZ?BldDD-Hed<@gXJg9r1ei60KXVP~5jq z+txZ!O7pL&aeVu(dia#wnE@QfdqzGc*1c#E#vj6nPUw7v?Om?BxqiHuj7XW+Xg!)} zyI^r|!)c83UMGbaM1^@>@@h_J5N!O;CViK|w(=r&zyBC}^iBMUj#r0SC-_^oGg&$3 zLhT_WRdjE@Mpq*|q_=s)DIM)?k!$)=K9=&ua+PtkAem`stjw;t9FYc5|oQ3NB= z*cngO?;YNf%=0V4(B2rV0d%|Xe>;;#^zRtnYS`JJeu))p>^w_9WV@~$0_DBixhGRg zugRK+8n#tlxPnOTeX=CtsZuHKOKD_=(03Zgi8_VSpzt&seA?M+9jD`IzQ z>LQ~aDZRVY671Kdib2uQjIWdR3?gURtU(3u+_(kP961#1msg<)b{-B!H#K_<-QK{U z?-Awd%(ZO7lH~&qn8!i$Od}r{4=UO>y?E7CcY&K`I9v}#);#h^?x4p|N^8k&|H4<6 zHoE4yvO0FEn`Yvi1mz50Gz<61Kl{4i0E$kB>`GKD43%%?o|ntIeiuU0=fy-tiUzLK zfo`tZSKhGpNb8PYRIdbGnx>Nad~=s3A=SQmyt;CC&|U4#$zUK!mG7P(Kl%91ggj%Q zRI3}LcduXgLCCII(qdAsgoe5oSC5-IlqsRz%ct+wpgV0pGHTE2Qn2h!(*Oq2ZLPHQ zM^*C~nE@%52>HcPSJoj>CozZ@*JY~1_RZUEX-QwfDOT2R0J5h|GxWqnbwL(T?feek zG{dF|_5HZ^N5%byGZ&#10Vmsa!@Beu z%9E5>881bFZ9YceK7)4S)^0+dwOW7K5V}@@U2}+EVjHvcU0|IZgI?vd{h1JnN_A;3 zpSG@Yt5Uo)sk!gaT!$J9;9Ikw_|b_=L05i>m#Q>BESu{wF|(u55j5w+!0wDIDb!h* zBJgtYWIZ@(qK5!YD$Rg_5EvlNjknErrFeuP) zp9l&wIK6AVfx27quoi>v@GN2=+hd~YbUcVLv_UvA#%@R#=pspWuAo&9-#cDNyC2G@ z$fM|qH`3`+@3&3&+ba!ZeRa5hkz!RqM=KvylLBV>+q_<>?v|rtgAvoUow_uX+;s}| z{q%gzapu6Hnl#0var?}~cw1W-g+QSpL$|`YH1yBKZ<7o?%K?EWNrM$2Nz+2wUhzeJ zb=7J-Np>G-{6_n$PYk^Caf${yy9Adx2_!(-(W(NI8D>cDOMRO*QE3L@35H-yR@fVD zua0#)YbM`Ke_853QBhq5T6eT5iMUj$0915)cZ1uG!piF^2{~t^zk_|UG-!@0auF_Zq!<@V)cufeWxbD{q6K*-%`4ss8_g{A3IKD zXoDJLlwHNcLFbtTkc! z1PFq(TwNSIAxK3AL5N34bs!H zMw;HAs~6fd&f{g%D(e(8cEpCMap~7@?tZUv6}MZAy3W*2z<68;<|g7G;v@ zILp;`@AAY$h5A!^h3@kzwO9E0?A}@=r-Je}aP93qTXUY^8g%gm!zYcCklW4-Q`nE@`Y;8Gm}jQiNg~NLiiZoINNU3fDqQ9 zU}I*?HZ|QU3*5Dq+UA<|;T7TANtl9lnkJTA4o50iTIFPu%70DNMm}m_kjaM0*F3^w z?%&L=EkP_hK3rn6Vm;wW=8f#}X}c-&u%#wex%=xDJeV{wb77Qc-|E7LThzZd$`EY- zz*u*<>Su2J5)rC)$r|Mi@81*?XVxdi|e*rMWzwG zPw64;Z~oX3^TzJ=M7;u9*FfXzALc!ZOI%p;+|M$;-o3uy_{yg!s(DLm@z2vrL`CC# z-6{9$7j5;+$L>pfZ}#D^n)HF)ivUW&FSH}tsBI?#1^y#YKf!bTvD#)#UeVCN;jIs~ zTWDfl_P3<%pzWf#puM~qi>WLj-;~J~25?Lx_#)7E5M*T&A!4#ZI8r!(6T}tJ&|M|v zXqd~Up;wR?IEKic6U=pq5_6VDE%9PSg|H}Ww2k!ys|YFp;B%x*ID*d;NT?Asv=WyJ z-YdjdG^{j{hS1R73=h~|DCWRKQ=%yj;~2pWBcQD(z*b^*Al1{s=_3TVqM?JOQV|u4 z4G#}D4L36tii5Ct3Wb8j5wHXT23TMukpd|*0wa(ZDwn8yug%VJvkC# zsF=lZ4C4r-#$Q4>yD~gJBPdD=;_^kxq5yfn3&v)BwiAVlc}g2L3(Mhg_yAM_^27hm zUdj#pjjZ40rqC$M`7#j@?lb=HtUuMRGzL}-2Gv2x3RMJkb)caY`BK?J7MD#`zQtLv zEzF5b3IWL)1zpMHeVLU4iVdL1EEbt;Mj>O! z1P&WR#F3a73WdnRumXu@6eixxf=C(7MsW^QTMt(nnqZ1kf-5B+Jf<{IDCX193?>V9 zU;3NIi_7OMl`<8q;ms))78d3N@PmlQ5zT)yTFwzmK*uRS@iuuLUArsPz_*~sc2MytdxUdsPg)0H^BbB1jky^@G zH+nUC$>S<#35JzZL1nT=OOP5He6ePL->3|%FR%@(m=xh z%h$)l_+N&A;jcl)#P3(SzS1=&2F7IkwY$F3H6{keWc;(Hlu&8 zZ5#wm-0A9I>lJbL)%_6OzR;N!Jr8g0;LqH)%Fabo_l9V){KDq%@&ze*&1_ra#dcM& z+yTMVdxD;;zhzM)RlcaSG#&NEk-P>@_^DH~`p(IewFC#%Ysdw=lXcM(PAn`i^jo|m zRm&;7_F3fqj*$7;razx1>2uX(^H5mL20|g%P zk&p~hgXoY71Ve~FQvVzAf8Fa(QuvctWB2+Kus>nr|K|1A2l7X$jNR++9sWPZ8oSru zZ?rFC{lC1%HVU6I9J|-p$4(Jz>|QXm8Tzwf{#3y~4fVef|HX#*54^_SrwXpe?lty4 zRdD^cyy(8Zk=IFEI8C*urv=5%LZeT0e&{qUkNpsSiexf>!-}^N+qY+i-0^z%tj@ZpZRz9PHd^`wz(jBBoszC(qgY|RVO*Ew^^!DK9ej7+-OS-s z*5NGOI&A@g2kK~Ry+XS>_f3~&pi_|5$SrEZrubkp6`A-$zx>6#!LDXPVQj(EbAEtU z`sOu}Pcj-yBPAzR=6$=v)HicrI9B{Rd1OG``$%tfdn z?^%P-5u}YwSE8mly9SR1CFr(nqxL@OTUf?Hf~)6s`PlOP15fH6w;2e`k-^p4Rh_EH z?Y&nR-IJYaTIb~CbWjsyLw<@dQ-Q;^03LZ`y zDSM`#0H}utJ%24@b2a_KPuhY(Ld7mv=M!HoF%G8pA!b+aq01^aX07i#)RI2O2Co*3 zbFQGDI^f=Yh`Y8cw=w^zn*w5X&%w4Srh{2mClOPP{b6#;ykY$p>3hnBAma^$Y3pwW zw$$t|jjDN7A636oOJ#bGeSxvRzx-m|i~ggV7EFyJ<&U&mD}a)$`$VtKmnXG2J|rTy zwj=d}gZqj?zll6MRJ^eDk!ADR#B)uM%GYlfs-)y9J z;oYTzSG`ULbNrTet`4McAJ7Ojpl7$w)2(;D!YGUJNoj7YzO{Xm7VNRKFWJVkyZ!nK z#~qydVb&g7H8?$D&~~kppoluLFf`fLW3WTyTp{aA=m{zB$=5HtcqGGPvAL|G(CgdD z_OhhL*bYp6R8xVL3Ie&k;m*lb2MP)rXhZ%f_3PuN`0Q}JI&P=F`uD9!{l=Q_IY( z(ZQX0u`T|((c-z=MBT|wY!XNldQ!$2qcp3a`k2H(Nlgw6NxFt{w}-jou@B(dpgbJsyy^m zc-zfvqd`72t7RV4qfyi#Uh7iPwUWHrcYYS4F<~;;=kh(DOYS zuHfG+xm=B^V9p^QoN21;r;W`I#_8s8eZ3m2X7KlQWR82h3e^bNT`{~>slAmLxja1O z&Bz59B?Gg05G`tdhVN~AtbSn)sINz3d)1yjcRCA$9_>A=nV?-RXoTYXRNbDQi~R0> z_b$pAXJt-=yL}Poq_rdsH8{xzo^yLR6#w>wL^J;W=~<~V+d;&WJ9qKq2Tg@{;KnQJ zs0oKXkLj!6OuHsp=?sKkGed^uA~wot!Tb@~^um{un5xKDZ5OwY8lHWHI;zSjeeW}~ z1Ju5Ql-bvmmzg$#AMWhl*Ph-RdPymmJ3igifR2izSJvUXV)9H1Z_w4?8;djC90w7T ze(vv9wPdCDhM1{be(2r?F=7mDmDc3Ba$!tng-)0q0$zGU@T(1w*om%j0s zZ9LK!vY2{pov6t|B{4a=V!TaucAH(1lCj3)S2oQQA{D(#cY6@^p=j63`QG5}{g~sU zv-QJ0+cf2BrRu2ZL-5%lRW~Q9UX~)UkGK2!1#X7|kWky!2FUJ!;p)`54O=tAs_J|C zZ%j#;k5g0*J?^bu6!#<7c}57zv1~~b1U{h}r|QZJdC*U_rSG(5rDqWvqil}yMr01r zFO%G>L+siI$a$d)?wdiDb~m4VB9Y@q6{>r=CmT{jtppcsLw z$btSxAmgKZ?1L~VI|uGBU8bPKmIqP--yU?Qrbjh57t7e3Vu@Ne0mc znl`@4+^l^;K;2vOPCI*DKJC0Y7)VujN(~O&+@^1BP>-AHv*Xf*eSSnu1oAp(Km(UE zd~?K~?K5Rlm>*C7ZSjuCNnmNolVpYo-GEyO&