Skip to content

Commit

Permalink
debugging
Browse files Browse the repository at this point in the history
  • Loading branch information
TieJean committed Dec 24, 2023
1 parent 3672d30 commit 9da73eb
Show file tree
Hide file tree
Showing 7 changed files with 465 additions and 0 deletions.
4 changes: 4 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -179,6 +179,10 @@ target_link_libraries(convert_object_detections_from_yaml ut_vslam ${LIBS} yaml-
ROSBUILD_ADD_EXECUTABLE(format_tum_gt src/evaluation/tum/format_tum_gt.cpp)
target_link_libraries(format_tum_gt ut_vslam ${LIBS})

ROSBUILD_ADD_EXECUTABLE(get_visualized_gt_trajectory src/evaluation/videos/get_visualized_gt_trajectory.cpp)
target_link_libraries(get_visualized_gt_trajectory ut_vslam ${LIBS})


IF (GENERATE_UNITTESTS)
MESSAGE("Adding test executable")
SET(UT_VSLAM_UNITTEST_NAME "ut_vslam_tests"
Expand Down
140 changes: 140 additions & 0 deletions src/evaluation/tum/debug.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,140 @@
import json
from scipy.spatial.transform import Rotation as R
import numpy as np
from collections import OrderedDict

kTimeFilepath = "/home/tiejean/Documents/mnt/oslam/orb_post_process/sparsified_ut_vslam_in/tum_fr2_desk_obj_fast/freiburg2_desk/timestamps/timestamps_only.txt"

kObViSLAMJsonFilepath = "/home/tiejean/Documents/mnt/oslam/ut_vslam_results/tum_fr2_desk/tum_fr2_desk_obj_fast/0_freiburg2_desk/ut_vslam_out/robot_pose_results.json"
kObViSLAMCsvFilepath = "/home/tiejean/Documents/mnt/oslam/ut_vslam_results/tum_fr2_desk/tum_fr2_desk_obj_fast/0_freiburg2_desk/postprocessing/trajectory.csv"

kGTCsvFilepath = "/home/tiejean/Documents/mnt/oslam/ut_vslam_results/tum_fr2_desk/tum_fr2_desk_obj_fast/0_freiburg2_desk/postprocessing/interpolated_lego_loam_poses.csv"

def load_timestamps(filepath:str) -> list:
fp = open(filepath, "r")
if fp.closed:
raise FileNotFoundError("Failed to open file " + filepath)
timestamps = []
for line in fp.readlines()[1:]:
tokens = [token.strip() for token in line.split(",")]
# tstamp = float(tokens[0]) + float(tokens[1]) * 1e-9
tstamp = (int(tokens[0]), int(tokens[1]))
timestamps.append(tstamp)
fp.close()
return timestamps

def json2cvs(filepath_time:str, filepath_json:str, filepath_csv: str):
timestamps = load_timestamps(filepath_time)

framed_poses = []
fp_json = open(filepath_json, "r")
if fp_json.closed:
raise FileNotFoundError("Failed to open file " + filepath_json)
data = json.load(fp_json)
for framed_pose in data["robot_poses"]["robot_pose_results_map"]:
fid = int(framed_pose["frame_id"])
transl = np.array(framed_pose["pose"]["transl"]["Data"])
rotvec = framed_pose["pose"]["rot"]["angle"] * np.array(framed_pose["pose"]["rot"]["axis"]["Data"])
quat = R.from_rotvec(rotvec).as_quat()
framed_poses.append((fid, transl, quat))
framed_poses = sorted(framed_poses, key=lambda x: x[0])

fp_csv = open(filepath_csv, "w")
if fp_csv.closed:
raise FileNotFoundError("Failed to open file " + filepath_csv)
fp_csv.write("seconds, nanoseconds, lost, transl_x, transl_y, transl_z, quat_x, quat_y, quat_z, quat_w\n")
for fid, transl, quat in framed_poses:
fp_csv.write(str(timestamps[fid][0]) + ", " + str(timestamps[fid][1]))
fp_csv.write(", 0") # hardcoding lost to be 0
fp_csv.write(", " + str(transl[0]) + ", " + str(transl[1]) + ", " + str(transl[2]) \
+ ", " + str(quat[0]) + ", " + str(quat[1]) + ", " + str(quat[2]) + ", " + str(quat[3]) )
fp_csv.write("\n")
fp_json.close(); fp_csv.close()

def load_obvi_slam(filepath:str):
fp = open(filepath, "r")
if fp.closed:
raise FileNotFoundError("Failed to open file " + filepath)
timestamps = []
poses = []
for line in fp.readlines()[1:]:
tokens = [token.strip() for token in line.split(",")]
sec = int(tokens[0])
nsec = int(tokens[1])
timestamps.append((sec, nsec))
pose = [float(token) for token in tokens[3:]]
poses.append(pose)
fp.close()
return timestamps, poses

def load_lego_loam(filepath:str):
fp = open(filepath, "r")
if fp.closed:
raise FileNotFoundError("Failed to open file " + filepath)
timestamps = []
poses = []
for line in fp.readlines()[1:]:
tokens = [token.strip() for token in line.split(",")]
sec = int(tokens[0])
nsec = int(tokens[1])
timestamps.append((sec, nsec))
pose = [float(token) for token in tokens[2:]]
poses.append(pose)
fp.close()
return timestamps, poses

def pose2transform(pose):
transform = np.eye(4)
transform[:3,:3] = R.from_quat(pose[3:]).as_matrix()
transform[:3,3] = pose[:3]
return transform

def transform2pose(transform):
transl = transform[:3,3]
quat = R.from_matrix(transform[:3,:3]).as_quat()
pose = []
for x in transl:
pose.append(x)
for x in quat:
pose.append(x)
return pose

def applyRoffset(Rot, pose):
# Rt = np.eye(4)
# Rt[:3,:3] = Rot
# transform = pose2transform(pose)
# transform = transform @ Rt
transl = np.array(pose[:3])
rot = R.from_quat(np.array(pose[3:]))
transl = Rot @ transl
# quat = R.from_matrix(Rot @ quat.as_matrix()).as_quat()
return np.concatenate([transl, rot.as_quat()]).tolist()


if __name__ == "__main__":
Roffset = np.array([[ 0., 0., 1.],
[-1., 0., 0.],
[ 0., -1., 0.]])
# json2cvs(kTimeFilepath, kObViSLAMJsonFilepath, kObViSLAMCsvFilepath)
timestamps_lego, poses_lego = load_lego_loam(kGTCsvFilepath)
fp_lego = open(kGTCsvFilepath, "w")
if fp_lego.closed:
raise FileNotFoundError("Failed to open file " + kGTCsvFilepath)
fp_lego.write("seconds, nanoseconds, transl_x, transl_y, transl_z, quat_x, quat_y, quat_z, quat_w\n")
for timestamp, pose in zip(timestamps_lego, poses_lego):
new_pose = applyRoffset(Roffset, pose)
transl, quat = new_pose[:3], new_pose[3:]
fp_lego.write(str(timestamp[0]) + ", " + str(timestamp[1]))
fp_lego.write(", " + str(transl[0]) + ", " + str(transl[1]) + ", " + str(transl[2]) \
+ ", " + str(quat[0]) + ", " + str(quat[1]) + ", " + str(quat[2]) + ", " + str(quat[3]) )
fp_lego.write("\n")
fp_lego.close()
# poses_lego = load_lego_loam(kGTCsvFilepath)
# for pose_obvi, pose_lego in zip(poses_obvi, poses_lego):
# pose_obvi = poses_obvi[50]
# pose_lego = poses_lego[50]
# print("pose_lego: ", pose_lego)
# pose_lego = applyRoffset(Roffset, pose_lego)
# print("pose_lego: ", pose_lego)
# print("pose_obvi: ", pose_obvi)
# exit()
80 changes: 80 additions & 0 deletions src/evaluation/tum/interpolate_tum_gt.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
import argparse
import numpy as np
from collections import OrderedDict

def parse_args():
parser = argparse.ArgumentParser(description="Convert object detections from OA-SLAM to ObVi-SLAM format")
parser.add_argument("--inpath", required=True, type=str, help="input detection file .json")
parser.add_argument("--tpath", required=True, type=str, help="")
parser.add_argument("--outpath", required=True, type=str, help="output detection file .csv")
args = parser.parse_args()
return args

def load_stamped_poses(filepath:str) -> OrderedDict:
fp = open(filepath, "r")
if fp.closed:
raise FileNotFoundError("Failed to open file " + filepath)
stamped_poses = OrderedDict()
for line in fp.readlines():
tokens = [token.strip() for token in line.split()]
tstamp = float(tokens[0])
stamped_poses[tstamp] = tokens[1:]
fp.close()
return stamped_poses

def load_timestamps(filepath:str) -> list:
fp = open(filepath, "r")
if fp.closed:
raise FileNotFoundError("Failed to open file " + filepath)
timestamps = []
for line in fp.readlines()[1:]:
tokens = [token.strip() for token in line.split(",")]
# tstamp = float(tokens[0]) + float(tokens[1]) * 1e-9
tstamp = (int(tokens[0]), int(tokens[1]))
timestamps.append(tstamp)
fp.close()
return timestamps

def associate(stamped_poses:OrderedDict, timestamps:list) -> list:
all_timestamps = np.array(list(stamped_poses.keys()), dtype=float)
all_poses = np.array(list(stamped_poses.values()))
associated_stamped_poses = []; tdiffs = []

for timestamp in timestamps:
timestamp_d = timestamp[0] + timestamp[1]*1e-9
idx = np.argmin(np.abs(all_timestamps - timestamp_d))
stamp = all_timestamps[idx]
pose = all_poses[idx]
tdiff = np.abs(stamp-timestamp_d); tdiffs.append(tdiff)
associated_stamped_poses.append((timestamp, pose))
return associated_stamped_poses, tdiffs

def save_stamped_poses(filepath:str, associated_stamped_poses:list):
kDelim = ", "

fp = open(filepath, "w")
if fp.closed:
raise FileNotFoundError("Failed to open file " + filepath)
fp.write("seconds, nanoseconds, transl_x, transl_y, transl_z, quat_x, quat_y, quat_z, quat_w\n")
for stamped_pose in associated_stamped_poses:
stamp = stamped_pose[0]
pose = stamped_pose[1]
fp.write(str(stamp[0]) + kDelim +
str(stamp[1]) + kDelim +
str(pose[0]) + kDelim +
str(pose[1]) + kDelim +
str(pose[2]) + kDelim +
str(pose[4]) + kDelim +
str(pose[5]) + kDelim +
str(pose[6]) + kDelim +
str(pose[3]))
fp.write("\n")
fp.close()


if __name__ == "__main__":
args = parse_args()
stamped_poses = load_stamped_poses(args.inpath)
timestamps = load_timestamps(args.tpath)
associated_stamped_poses, tdiffs = associate(stamped_poses, timestamps)
save_stamped_poses(args.outpath, associated_stamped_poses)
92 changes: 92 additions & 0 deletions src/evaluation/videos/get_visualized_gt_trajectory.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
#include <analysis/cumulative_timer_constants.h>
#include <analysis/cumulative_timer_factory.h>
#include <base_lib/basic_utils.h>
#include <base_lib/pose_utils.h>
#include <debugging/ground_truth_utils.h>
#include <file_io/bounding_box_by_node_id_io.h>
#include <file_io/bounding_box_by_timestamp_io.h>
#include <file_io/camera_extrinsics_with_id_io.h>
#include <file_io/camera_info_io_utils.h>
#include <file_io/camera_intrinsics_with_id_io.h>
#include <file_io/cv_file_storage/config_file_storage_io.h>
#include <file_io/cv_file_storage/long_term_object_map_file_storage_io.h>
#include <file_io/cv_file_storage/object_and_reprojection_feature_pose_graph_file_storage_io.h>
#include <file_io/cv_file_storage/output_problem_data_file_storage_io.h>
#include <file_io/cv_file_storage/vslam_basic_types_file_storage_io.h>
#include <file_io/node_id_and_timestamp_io.h>
#include <file_io/pose_3d_with_timestamp_io.h>
#include <file_io/pose_io_utils.h>
#include <gflags/gflags.h>
#include <glog/logging.h>
#include <refactoring/bounding_box_frontend/bounding_box_retriever.h>
#include <refactoring/bounding_box_frontend/feature_based_bounding_box_front_end.h>
#include <refactoring/configuration/full_ov_slam_config.h>
#include <refactoring/image_processing/image_processing_utils.h>
#include <refactoring/long_term_map/long_term_map_factor_creator.h>
#include <refactoring/offline/offline_problem_data.h>
#include <refactoring/offline/offline_problem_runner.h>
#include <refactoring/output_problem_data.h>
#include <refactoring/output_problem_data_extraction.h>
#include <refactoring/visual_feature_frontend/visual_feature_front_end.h>
#include <refactoring/visual_feature_processing/orb_output_low_level_feature_reader.h>
#include <refactoring/visualization/ros_visualization.h>
#include <refactoring/visualization/save_to_file_visualizer.h>
#include <ros/ros.h>
#include <run_optimization_utils/optimization_runner.h>
#include <sensor_msgs/Image.h>

using namespace pose;
using namespace vslam_types_refactor;
namespace vtr = vslam_types_refactor;

DEFINE_string(ground_truth_trajectory_file,
"",
"File containing ground truth for the trajectory");
DEFINE_string(ground_truth_extrinsics_file,
"",
"File containing the "
"extrinsics that relate the ground truth trajectory frame to the"
" frame that is estimated here.");
DEFINE_string(nodes_by_timestamp_file,
"",
"File containing the timestamp-node mapping");
DEFINE_string(output_file,
"",
"File to which to output the interpolated poses "
"for all timestamps (i.e. also contains smoothed odom)");


int main(int argc, char **argv) {
google::InitGoogleLogging(argv[0]);
google::ParseCommandLineFlags(&argc, &argv, true);
FLAGS_logtostderr = true;

std::vector<file_io::NodeIdAndTimestamp> nodes_by_timestamps_vec;
file_io::readNodeIdsAndTimestampsFromFile(FLAGS_nodes_by_timestamp_file,
nodes_by_timestamps_vec);

std::optional<std::vector<vtr::Pose3D<double>>> gt_trajectory =
vtr::getGtTrajectory(nodes_by_timestamps_vec.size()-1,
FLAGS_ground_truth_trajectory_file,
FLAGS_ground_truth_extrinsics_file,
FLAGS_nodes_by_timestamp_file);

std::vector<std::pair<Timestamp, Pose3D<double>>> stamped_poses;
for (int i = 0; i < nodes_by_timestamps_vec.size(); ++i) {
stamped_poses.emplace_back(std::make_pair(nodes_by_timestamps_vec.at(i).seconds_,
nodes_by_timestamps_vec.at(i).nano_seconds_),
gt_trajectory.value().at(i));


}

LOG(INFO) << "nodes_by_timestamps_vec size: " << nodes_by_timestamps_vec.size();
LOG(INFO) << "gt_trajectory size: " << gt_trajectory.value().size();
LOG(INFO) << "stamped_poses size: " << stamped_poses.size();

if (!FLAGS_output_file.empty()) {
LOG(INFO) << "Writing to " + FLAGS_output_file;
file_io::writePose3dsWithTimestampToFile(
FLAGS_output_file, stamped_poses);
}
}
6 changes: 6 additions & 0 deletions src/evaluation/videos/map.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
image: /home/tiejean/Desktop/EER0.png
resolution: .055
origin: [-21.5, -21.5, 0.0]
occupied_thresh: 0.65
free_thresh: 0
negate: 0
Loading

0 comments on commit 9da73eb

Please sign in to comment.