From 9da73ebeb581e294dfe047d4a0160929544e7b68 Mon Sep 17 00:00:00 2001 From: TieJean Date: Sun, 24 Dec 2023 16:01:18 -0600 Subject: [PATCH] debugging --- CMakeLists.txt | 4 + src/evaluation/tum/debug.py | 140 ++++++++++++++++++ src/evaluation/tum/interpolate_tum_gt.py | 80 ++++++++++ .../videos/get_visualized_gt_trajectory.cpp | 92 ++++++++++++ src/evaluation/videos/map.yaml | 6 + src/evaluation/videos/postprocess_markers.py | 108 ++++++++++++++ src/evaluation/videos/publish_map.py | 35 +++++ 7 files changed, 465 insertions(+) create mode 100644 src/evaluation/tum/debug.py create mode 100644 src/evaluation/tum/interpolate_tum_gt.py create mode 100644 src/evaluation/videos/get_visualized_gt_trajectory.cpp create mode 100644 src/evaluation/videos/map.yaml create mode 100644 src/evaluation/videos/postprocess_markers.py create mode 100644 src/evaluation/videos/publish_map.py diff --git a/CMakeLists.txt b/CMakeLists.txt index 33b931ff..9dee794e 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -179,6 +179,10 @@ target_link_libraries(convert_object_detections_from_yaml ut_vslam ${LIBS} yaml- ROSBUILD_ADD_EXECUTABLE(format_tum_gt src/evaluation/tum/format_tum_gt.cpp) target_link_libraries(format_tum_gt ut_vslam ${LIBS}) +ROSBUILD_ADD_EXECUTABLE(get_visualized_gt_trajectory src/evaluation/videos/get_visualized_gt_trajectory.cpp) +target_link_libraries(get_visualized_gt_trajectory ut_vslam ${LIBS}) + + IF (GENERATE_UNITTESTS) MESSAGE("Adding test executable") SET(UT_VSLAM_UNITTEST_NAME "ut_vslam_tests" diff --git a/src/evaluation/tum/debug.py b/src/evaluation/tum/debug.py new file mode 100644 index 00000000..0ab612b0 --- /dev/null +++ b/src/evaluation/tum/debug.py @@ -0,0 +1,140 @@ +import json +from scipy.spatial.transform import Rotation as R +import numpy as np +from collections import OrderedDict + +kTimeFilepath = "/home/tiejean/Documents/mnt/oslam/orb_post_process/sparsified_ut_vslam_in/tum_fr2_desk_obj_fast/freiburg2_desk/timestamps/timestamps_only.txt" + +kObViSLAMJsonFilepath = "/home/tiejean/Documents/mnt/oslam/ut_vslam_results/tum_fr2_desk/tum_fr2_desk_obj_fast/0_freiburg2_desk/ut_vslam_out/robot_pose_results.json" +kObViSLAMCsvFilepath = "/home/tiejean/Documents/mnt/oslam/ut_vslam_results/tum_fr2_desk/tum_fr2_desk_obj_fast/0_freiburg2_desk/postprocessing/trajectory.csv" + +kGTCsvFilepath = "/home/tiejean/Documents/mnt/oslam/ut_vslam_results/tum_fr2_desk/tum_fr2_desk_obj_fast/0_freiburg2_desk/postprocessing/interpolated_lego_loam_poses.csv" + +def load_timestamps(filepath:str) -> list: + fp = open(filepath, "r") + if fp.closed: + raise FileNotFoundError("Failed to open file " + filepath) + timestamps = [] + for line in fp.readlines()[1:]: + tokens = [token.strip() for token in line.split(",")] + # tstamp = float(tokens[0]) + float(tokens[1]) * 1e-9 + tstamp = (int(tokens[0]), int(tokens[1])) + timestamps.append(tstamp) + fp.close() + return timestamps + +def json2cvs(filepath_time:str, filepath_json:str, filepath_csv: str): + timestamps = load_timestamps(filepath_time) + + framed_poses = [] + fp_json = open(filepath_json, "r") + if fp_json.closed: + raise FileNotFoundError("Failed to open file " + filepath_json) + data = json.load(fp_json) + for framed_pose in data["robot_poses"]["robot_pose_results_map"]: + fid = int(framed_pose["frame_id"]) + transl = np.array(framed_pose["pose"]["transl"]["Data"]) + rotvec = framed_pose["pose"]["rot"]["angle"] * np.array(framed_pose["pose"]["rot"]["axis"]["Data"]) + quat = R.from_rotvec(rotvec).as_quat() + framed_poses.append((fid, transl, quat)) + framed_poses = sorted(framed_poses, key=lambda x: x[0]) + + fp_csv = open(filepath_csv, "w") + if fp_csv.closed: + raise FileNotFoundError("Failed to open file " + filepath_csv) + fp_csv.write("seconds, nanoseconds, lost, transl_x, transl_y, transl_z, quat_x, quat_y, quat_z, quat_w\n") + for fid, transl, quat in framed_poses: + fp_csv.write(str(timestamps[fid][0]) + ", " + str(timestamps[fid][1])) + fp_csv.write(", 0") # hardcoding lost to be 0 + fp_csv.write(", " + str(transl[0]) + ", " + str(transl[1]) + ", " + str(transl[2]) \ + + ", " + str(quat[0]) + ", " + str(quat[1]) + ", " + str(quat[2]) + ", " + str(quat[3]) ) + fp_csv.write("\n") + fp_json.close(); fp_csv.close() + +def load_obvi_slam(filepath:str): + fp = open(filepath, "r") + if fp.closed: + raise FileNotFoundError("Failed to open file " + filepath) + timestamps = [] + poses = [] + for line in fp.readlines()[1:]: + tokens = [token.strip() for token in line.split(",")] + sec = int(tokens[0]) + nsec = int(tokens[1]) + timestamps.append((sec, nsec)) + pose = [float(token) for token in tokens[3:]] + poses.append(pose) + fp.close() + return timestamps, poses + +def load_lego_loam(filepath:str): + fp = open(filepath, "r") + if fp.closed: + raise FileNotFoundError("Failed to open file " + filepath) + timestamps = [] + poses = [] + for line in fp.readlines()[1:]: + tokens = [token.strip() for token in line.split(",")] + sec = int(tokens[0]) + nsec = int(tokens[1]) + timestamps.append((sec, nsec)) + pose = [float(token) for token in tokens[2:]] + poses.append(pose) + fp.close() + return timestamps, poses + +def pose2transform(pose): + transform = np.eye(4) + transform[:3,:3] = R.from_quat(pose[3:]).as_matrix() + transform[:3,3] = pose[:3] + return transform + +def transform2pose(transform): + transl = transform[:3,3] + quat = R.from_matrix(transform[:3,:3]).as_quat() + pose = [] + for x in transl: + pose.append(x) + for x in quat: + pose.append(x) + return pose + +def applyRoffset(Rot, pose): + # Rt = np.eye(4) + # Rt[:3,:3] = Rot + # transform = pose2transform(pose) + # transform = transform @ Rt + transl = np.array(pose[:3]) + rot = R.from_quat(np.array(pose[3:])) + transl = Rot @ transl + # quat = R.from_matrix(Rot @ quat.as_matrix()).as_quat() + return np.concatenate([transl, rot.as_quat()]).tolist() + + +if __name__ == "__main__": + Roffset = np.array([[ 0., 0., 1.], + [-1., 0., 0.], + [ 0., -1., 0.]]) + # json2cvs(kTimeFilepath, kObViSLAMJsonFilepath, kObViSLAMCsvFilepath) + timestamps_lego, poses_lego = load_lego_loam(kGTCsvFilepath) + fp_lego = open(kGTCsvFilepath, "w") + if fp_lego.closed: + raise FileNotFoundError("Failed to open file " + kGTCsvFilepath) + fp_lego.write("seconds, nanoseconds, transl_x, transl_y, transl_z, quat_x, quat_y, quat_z, quat_w\n") + for timestamp, pose in zip(timestamps_lego, poses_lego): + new_pose = applyRoffset(Roffset, pose) + transl, quat = new_pose[:3], new_pose[3:] + fp_lego.write(str(timestamp[0]) + ", " + str(timestamp[1])) + fp_lego.write(", " + str(transl[0]) + ", " + str(transl[1]) + ", " + str(transl[2]) \ + + ", " + str(quat[0]) + ", " + str(quat[1]) + ", " + str(quat[2]) + ", " + str(quat[3]) ) + fp_lego.write("\n") + fp_lego.close() + # poses_lego = load_lego_loam(kGTCsvFilepath) + # for pose_obvi, pose_lego in zip(poses_obvi, poses_lego): + # pose_obvi = poses_obvi[50] + # pose_lego = poses_lego[50] + # print("pose_lego: ", pose_lego) + # pose_lego = applyRoffset(Roffset, pose_lego) + # print("pose_lego: ", pose_lego) + # print("pose_obvi: ", pose_obvi) + # exit() \ No newline at end of file diff --git a/src/evaluation/tum/interpolate_tum_gt.py b/src/evaluation/tum/interpolate_tum_gt.py new file mode 100644 index 00000000..33c587cd --- /dev/null +++ b/src/evaluation/tum/interpolate_tum_gt.py @@ -0,0 +1,80 @@ +import argparse +import numpy as np +from collections import OrderedDict + +def parse_args(): + parser = argparse.ArgumentParser(description="Convert object detections from OA-SLAM to ObVi-SLAM format") + parser.add_argument("--inpath", required=True, type=str, help="input detection file .json") + parser.add_argument("--tpath", required=True, type=str, help="") + parser.add_argument("--outpath", required=True, type=str, help="output detection file .csv") + args = parser.parse_args() + return args + +def load_stamped_poses(filepath:str) -> OrderedDict: + fp = open(filepath, "r") + if fp.closed: + raise FileNotFoundError("Failed to open file " + filepath) + stamped_poses = OrderedDict() + for line in fp.readlines(): + tokens = [token.strip() for token in line.split()] + tstamp = float(tokens[0]) + stamped_poses[tstamp] = tokens[1:] + fp.close() + return stamped_poses + +def load_timestamps(filepath:str) -> list: + fp = open(filepath, "r") + if fp.closed: + raise FileNotFoundError("Failed to open file " + filepath) + timestamps = [] + for line in fp.readlines()[1:]: + tokens = [token.strip() for token in line.split(",")] + # tstamp = float(tokens[0]) + float(tokens[1]) * 1e-9 + tstamp = (int(tokens[0]), int(tokens[1])) + timestamps.append(tstamp) + fp.close() + return timestamps + +def associate(stamped_poses:OrderedDict, timestamps:list) -> list: + all_timestamps = np.array(list(stamped_poses.keys()), dtype=float) + all_poses = np.array(list(stamped_poses.values())) + associated_stamped_poses = []; tdiffs = [] + + for timestamp in timestamps: + timestamp_d = timestamp[0] + timestamp[1]*1e-9 + idx = np.argmin(np.abs(all_timestamps - timestamp_d)) + stamp = all_timestamps[idx] + pose = all_poses[idx] + tdiff = np.abs(stamp-timestamp_d); tdiffs.append(tdiff) + associated_stamped_poses.append((timestamp, pose)) + return associated_stamped_poses, tdiffs + +def save_stamped_poses(filepath:str, associated_stamped_poses:list): + kDelim = ", " + + fp = open(filepath, "w") + if fp.closed: + raise FileNotFoundError("Failed to open file " + filepath) + fp.write("seconds, nanoseconds, transl_x, transl_y, transl_z, quat_x, quat_y, quat_z, quat_w\n") + for stamped_pose in associated_stamped_poses: + stamp = stamped_pose[0] + pose = stamped_pose[1] + fp.write(str(stamp[0]) + kDelim + + str(stamp[1]) + kDelim + + str(pose[0]) + kDelim + + str(pose[1]) + kDelim + + str(pose[2]) + kDelim + + str(pose[4]) + kDelim + + str(pose[5]) + kDelim + + str(pose[6]) + kDelim + + str(pose[3])) + fp.write("\n") + fp.close() + + +if __name__ == "__main__": + args = parse_args() + stamped_poses = load_stamped_poses(args.inpath) + timestamps = load_timestamps(args.tpath) + associated_stamped_poses, tdiffs = associate(stamped_poses, timestamps) + save_stamped_poses(args.outpath, associated_stamped_poses) \ No newline at end of file diff --git a/src/evaluation/videos/get_visualized_gt_trajectory.cpp b/src/evaluation/videos/get_visualized_gt_trajectory.cpp new file mode 100644 index 00000000..a6b5cfa8 --- /dev/null +++ b/src/evaluation/videos/get_visualized_gt_trajectory.cpp @@ -0,0 +1,92 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +using namespace pose; +using namespace vslam_types_refactor; +namespace vtr = vslam_types_refactor; + +DEFINE_string(ground_truth_trajectory_file, + "", + "File containing ground truth for the trajectory"); +DEFINE_string(ground_truth_extrinsics_file, + "", + "File containing the " + "extrinsics that relate the ground truth trajectory frame to the" + " frame that is estimated here."); +DEFINE_string(nodes_by_timestamp_file, + "", + "File containing the timestamp-node mapping"); +DEFINE_string(output_file, + "", + "File to which to output the interpolated poses " + "for all timestamps (i.e. also contains smoothed odom)"); + + +int main(int argc, char **argv) { + google::InitGoogleLogging(argv[0]); + google::ParseCommandLineFlags(&argc, &argv, true); + FLAGS_logtostderr = true; + + std::vector nodes_by_timestamps_vec; + file_io::readNodeIdsAndTimestampsFromFile(FLAGS_nodes_by_timestamp_file, + nodes_by_timestamps_vec); + + std::optional>> gt_trajectory = + vtr::getGtTrajectory(nodes_by_timestamps_vec.size()-1, + FLAGS_ground_truth_trajectory_file, + FLAGS_ground_truth_extrinsics_file, + FLAGS_nodes_by_timestamp_file); + + std::vector>> stamped_poses; + for (int i = 0; i < nodes_by_timestamps_vec.size(); ++i) { + stamped_poses.emplace_back(std::make_pair(nodes_by_timestamps_vec.at(i).seconds_, + nodes_by_timestamps_vec.at(i).nano_seconds_), + gt_trajectory.value().at(i)); + + + } + + LOG(INFO) << "nodes_by_timestamps_vec size: " << nodes_by_timestamps_vec.size(); + LOG(INFO) << "gt_trajectory size: " << gt_trajectory.value().size(); + LOG(INFO) << "stamped_poses size: " << stamped_poses.size(); + + if (!FLAGS_output_file.empty()) { + LOG(INFO) << "Writing to " + FLAGS_output_file; + file_io::writePose3dsWithTimestampToFile( + FLAGS_output_file, stamped_poses); + } +} \ No newline at end of file diff --git a/src/evaluation/videos/map.yaml b/src/evaluation/videos/map.yaml new file mode 100644 index 00000000..56405ae3 --- /dev/null +++ b/src/evaluation/videos/map.yaml @@ -0,0 +1,6 @@ +image: /home/tiejean/Desktop/EER0.png +resolution: .055 +origin: [-21.5, -21.5, 0.0] +occupied_thresh: 0.65 +free_thresh: 0 +negate: 0 \ No newline at end of file diff --git a/src/evaluation/videos/postprocess_markers.py b/src/evaluation/videos/postprocess_markers.py new file mode 100644 index 00000000..27ca7beb --- /dev/null +++ b/src/evaluation/videos/postprocess_markers.py @@ -0,0 +1,108 @@ +import rosbag +import numpy as np +import copy +from geometry_msgs.msg import Point + +kBagname = "0__2023_05_11_18_35_54" +kPrefix = "/evaluation_2023_07_v1_update_rev_lowerererish_conv_thresholds_manual_feat_adj_tighterer_vo_v4_" + kBagname + "/" + +kBagpathIn = "/home/tiejean/Documents/mnt/oslam/visualization/0__2023_05_11_18_35_54.bag" +kBagpathOut = "/home/tiejean/Documents/mnt/oslam/visualization/0__2023_05_11_18_35_54_postprocessed2.bag" + +kLeGOLOAMFilepath = "/home/tiejean/Documents/mnt/oslam/ut_vslam_results/postprocessing/interpolated_lego_loam_poses_bag_0_viz.cvs" + +def load_lego_loam(filepath: str): + fp = open(filepath, "r") + if fp.closed: + raise FileNotFoundError("Failed to open file " + filepath) + # stamped_poses = {} + poses = [] + for line in fp.readlines()[1:]: + tokens = [token.strip() for token in line.split(",")] + sec, nsec = int(tokens[0]), int(tokens[1]) + pose = [float(token) for token in tokens[2:]] + poses.append(pose) + # stamped_poses[(sec, nsec)] = pose + fp.close() + # return stamped_poses + return poses + + +if __name__ == "__main__": + interesting_topics = ["/est_pose", "/est_ellipsoids", "/est_/latest/cam_1_bb/image_raw", "/tf"] + interesting_topics = [kPrefix + topic for topic in interesting_topics] + + # stamped_ref_poses = load_lego_loam(kLeGOLOAMFilepath) + ref_poses = load_lego_loam(kLeGOLOAMFilepath) + bagIn = rosbag.Bag(kBagpathIn, 'r') + bagOut = rosbag.Bag(kBagpathOut, 'w') + + all_gt_dtimes = [] + all_gt_msgs = [] + for topic, msg, t in bagIn.read_messages(topics=(kPrefix + "/gt_pose")): + if msg.type != 4: + continue + sec, nsec = msg.header.stamp.secs, msg.header.stamp.nsecs + all_gt_dtimes.append(sec + nsec * 1e-9) + all_gt_msgs.append(msg) + + for topic, msg, t in bagIn.read_messages(topics=[kPrefix + "/gt_pose", kPrefix + "/est_pose"]): + if topic == (kPrefix + "/est_pose"): + if msg.type != 4: + continue + sec, nsec = msg.header.stamp.secs, msg.header.stamp.nsecs + dtime = sec + nsec * 1e-9 + + msgEst = copy.deepcopy(msg) + tdiffs_gt = np.array(all_gt_dtimes)-dtime; idx_gt = np.argmin(np.abs(tdiffs_gt)) + msgGt = copy.deepcopy(all_gt_msgs[idx_gt]) + + nEstPoses, nGtPoses = len(msgEst.points), len(msgGt.points) + if (nGtPoses >= nEstPoses): + # cap the number of poses to msgGt to be nEstPoses + msgGt.points = msgGt.points[:nEstPoses] + else: + for pose in ref_poses[nGtPoses:nEstPoses]: + # msgGt.points.append(Point(pose[0]+0.46618, pose[1], pose[2])) + msgGt.points.append(Point(pose[0], pose[1], pose[2])) + + bagOut.write(kPrefix + "/gt_pose", msgGt, t) + bagOut.write(kPrefix + "/est_pose", msgEst, t) + + + # for topic, msg, t in bagIn.read_messages(): + # if topic == (kPrefix + "/gt_pose"): + # continue + # if topic == (kPrefix + "/est_pose"): + # import pdb; pdb.set_trace() + # msgEst = msg + # msgEst.color.r = 0 + # msgEst.color.g = 1 + # msgEst.color.b = 1 + # msgEst.type = 4 + # msgEst.scale.x *= 4 + # msgEst.scale.y *= 8 + # msgEst.scale.z *= 4 + # bagOut.write(topic, msgEst, t) + + # sec, nsec = msgEst.header.stamp.secs, msgEst.header.stamp.nsecs + # dtime = sec + nsec * 1e-9 + # tdiffs = np.array(all_gt_dtimes)-dtime + # idx = np.argmin(np.abs(tdiffs)) # Reference GT message: all_gt_msgs[idx] + + # msgGt = copy.deepcopy(all_gt_msgs[idx]) + # msgGt.color.r = 0 + # msgGt.color.g = 1 + # msgGt.color.b = 1 + # msgGt.type = 4 + # msgGt.scale.x *= 8 + # msgGt.scale.y *= 16 + # msgGt.scale.z *= 8 + # msgGt.color.r = 0 + # msgGt.color.g = .5 + # msgGt.color.b = 1 + # bagOut.write(kPrefix + "/gt_pose", msgGt, t) + + # else: + # bagOut.write(topic, msg, t) + bagIn.close(); bagOut.close() diff --git a/src/evaluation/videos/publish_map.py b/src/evaluation/videos/publish_map.py new file mode 100644 index 00000000..ec399f50 --- /dev/null +++ b/src/evaluation/videos/publish_map.py @@ -0,0 +1,35 @@ +import rospy +import cv2 +from nav_msgs.msg import OccupancyGrid +import numpy as np + +kImage0 = "/home/tiejean/Desktop/EER0.png" + +if __name__ == "__main__": + pub = rospy.Publisher('/satelliteView', OccupancyGrid, queue_size=10) + rospy.init_node('map', anonymous=True) + rate = rospy.Rate(10) + + msg = OccupancyGrid() + img = cv2.imread(kImage0, cv2.IMREAD_GRAYSCALE) + + msg.header.stamp = rospy.get_rostime() + msg.header.frame_id = "map" + msg.info.map_load_time = rospy.get_rostime() + msg.info.resolution = .055 + msg.info.width = img.shape[1] + msg.info.height = img.shape[0] + msg.info.origin.position.x = -21.5 + msg.info.origin.position.y = -21 + data = img + data = cv2.rotate(data, cv2.ROTATE_180) + data = cv2.flip(data, 1) + data = ((1-data/255)*100).astype(np.uint8) + data = data.flatten() + msg.data = (data).tolist() + + # import pdb; pdb.set_trace() + + while not rospy.is_shutdown(): + pub.publish(msg) + rate.sleep() \ No newline at end of file