Skip to content

Commit

Permalink
Had to use a String for the error
Browse files Browse the repository at this point in the history
Otherwise, there are lifetime issues because 'a must outlive static for some reason and that can't happen since we read the names from the loaded file.
  • Loading branch information
ChristopherRabotin committed Sep 14, 2023
1 parent cd2e768 commit cda532d
Show file tree
Hide file tree
Showing 15 changed files with 203 additions and 231 deletions.
2 changes: 1 addition & 1 deletion src/almanac/spk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ impl<'a: 'b, 'b> Almanac<'a> {
&self,
id: i32,
epoch: Epoch,
) -> Result<(&SPKSummaryRecord, usize, usize), DAFError<'a>> {
) -> Result<(&SPKSummaryRecord, usize, usize), DAFError> {
// TODO: Consider a return type here
for (spk_no, maybe_spk) in self
.spk_data
Expand Down
221 changes: 94 additions & 127 deletions src/bin/anise/main.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
extern crate pretty_env_logger;
use std::env::{set_var, var};

use snafu::prelude::*;

use anise::cli::args::{Actions, Args};
use anise::cli::inspect::{BpcRow, SpkRow};
use anise::cli::CliErrors;
use anise::cli::{CliErrors, SomeDAFSnafu, SomeFileRecordSnafu};
use anise::file2heap;
use anise::naif::daf::{FileRecord, NAIFRecord, NAIFSummaryRecord};
use anise::naif::kpl::parser::convert_tpc;
Expand All @@ -13,7 +15,7 @@ use anise::structure::metadata::Metadata;
use anise::structure::planetocentric::PlanetaryData;
use anise::structure::spacecraft::SpacecraftData;
use clap::Parser;
use log::{error, info};
use log::info;
use tabled::{settings::Style, Table};
use zerocopy::FromBytes;

Expand Down Expand Up @@ -44,14 +46,15 @@ fn main() -> Result<(), CliErrors> {
DataSetType::NotApplicable => unreachable!("no such ANISE data yet"),
DataSetType::SpacecraftData => {
// Decode as spacecraft data
let dataset =
DataSet::<SpacecraftData, 64>::try_from_bytes(&bytes)?;
let dataset = DataSet::<SpacecraftData, 64>::try_from_bytes(&bytes)
.map_err(|err| CliErrors::AniseError { err })?;
println!("{dataset}");
Ok(())
}
DataSetType::PlanetaryData => {
// Decode as planetary data
let dataset = DataSet::<PlanetaryData, 64>::try_from_bytes(&bytes)?;
let dataset = DataSet::<PlanetaryData, 64>::try_from_bytes(&bytes)
.map_err(|err| CliErrors::AniseError { err })?;
println!("{dataset}");
Ok(())
}
Expand All @@ -62,47 +65,28 @@ fn main() -> Result<(), CliErrors> {
FileRecord::read_from(&bytes[..FileRecord::SIZE]).unwrap();
match file_record
.identification()
.map_err(CliErrors::AniseError)?
.with_context(|_| SomeFileRecordSnafu)?
{
"PCK" => {
info!("Loading {path_str:?} as DAF/PCK");
match BPC::check_then_parse(bytes, crc32_checksum) {
Ok(_) => {
info!("[OK] Checksum matches");
Ok(())
}
Err(AniseError::IntegrityError(e)) => {
error!("CRC32 checksums differ for {path_str:?}: {e:?}");
Err(CliErrors::AniseError(AniseError::IntegrityError(e)))
}
Err(e) => {
error!("Some other error happened when loading {path_str:?}: {e:?}");
Err(CliErrors::AniseError(e))
}
}
BPC::check_then_parse(bytes, crc32_checksum)
.with_context(|_| SomeDAFSnafu)?;
info!("[OK] Checksum matches");
Ok(())
}
"SPK" => {
info!("Loading {path_str:?} as DAF/SPK");
match SPK::check_then_parse(bytes, crc32_checksum) {
Ok(_) => {
info!("[OK] Checksum matches");
Ok(())
}
Err(AniseError::IntegrityError(e)) => {
error!("CRC32 checksums differ for {path_str:?}: {e:?}");
Err(CliErrors::AniseError(AniseError::IntegrityError(e)))
}
Err(e) => {
error!("Some other error happened when loading {path_str:?}: {e:?}");
Err(CliErrors::AniseError(e))
}
}
SPK::check_then_parse(bytes, crc32_checksum)
.with_context(|_| SomeDAFSnafu)?;
info!("[OK] Checksum matches");
Ok(())
}
_ => unreachable!(),
}
}
}
Err(e) => Err(e.into()),
// TODO: Renable this
Err(e) => panic!("{e}"),
}
}
Actions::Inspect { file } => {
Expand All @@ -114,119 +98,102 @@ fn main() -> Result<(), CliErrors> {

match file_record
.identification()
.map_err(CliErrors::AniseError)?
.with_context(|_| SomeFileRecordSnafu)?
{
"PCK" => {
info!("Loading {path_str:?} as DAF/PCK");
match BPC::parse(bytes) {
Ok(pck) => {
info!("CRC32 checksum: 0x{:X}", pck.crc32());
if let Some(comments) = pck.comments()? {
println!("== COMMENTS ==\n{}== END ==", comments);
} else {
println!("(File has no comments)");
}
// Build the rows of the table
let mut rows = Vec::new();

for (sno, summary) in
pck.data_summaries().unwrap().iter().enumerate()
{
let name = pck
.name_record
.nth_name(sno, pck.file_record.summary_size());
if summary.is_empty() {
continue;
}
rows.push(BpcRow {
name,
start_epoch: format!("{:E}", summary.start_epoch()),
end_epoch: format!("{:E}", summary.end_epoch()),
duration: summary.end_epoch() - summary.start_epoch(),
interpolation_kind: format!("{}", summary.data_type_i),
frame: format!("{}", summary.frame_id),
inertial_frame: format!(
"{}",
summary.inertial_frame_id
),
});
}

let mut tbl = Table::new(rows);
tbl.with(Style::modern());
println!("{tbl}");

Ok(())
}
Err(e) => {
error!("Some other error happened when loading {path_str:?}: {e:?}");
Err(CliErrors::AniseError(e))
let pck = BPC::parse(bytes).with_context(|_| SomeDAFSnafu)?;
info!("CRC32 checksum: 0x{:X}", pck.crc32());
if let Some(comments) = pck.comments().with_context(|_| SomeDAFSnafu)? {
println!("== COMMENTS ==\n{}== END ==", comments);
} else {
println!("(File has no comments)");
}
// Build the rows of the table
let mut rows = Vec::new();

for (sno, summary) in pck.data_summaries().unwrap().iter().enumerate() {
let name = pck
.name_record
.nth_name(sno, pck.file_record.summary_size());
if summary.is_empty() {
continue;
}
rows.push(BpcRow {
name,
start_epoch: format!("{:E}", summary.start_epoch()),
end_epoch: format!("{:E}", summary.end_epoch()),
duration: summary.end_epoch() - summary.start_epoch(),
interpolation_kind: format!("{}", summary.data_type_i),
frame: format!("{}", summary.frame_id),
inertial_frame: format!("{}", summary.inertial_frame_id),
});
}

let mut tbl = Table::new(rows);
tbl.with(Style::modern());
println!("{tbl}");

Ok(())
}
"SPK" => {
info!("Loading {path_str:?} as DAF/SPK");
match SPK::parse(bytes) {
Ok(spk) => {
info!("CRC32 checksum: 0x{:X}", spk.crc32());
if let Some(comments) = spk.comments()? {
println!("== COMMENTS ==\n{}== END ==", comments);
} else {
println!("(File has no comments)");
}
// Build the rows of the table
let mut rows = Vec::new();

for (sno, summary) in
spk.data_summaries().unwrap().iter().enumerate()
{
let name = spk
.name_record
.nth_name(sno, spk.file_record.summary_size());
if summary.is_empty() {
continue;
}

rows.push(SpkRow {
name,
center: summary.center_id,
start_epoch: format!("{:E}", summary.start_epoch()),
end_epoch: format!("{:E}", summary.end_epoch()),
duration: summary.end_epoch() - summary.start_epoch(),
interpolation_kind: format!("{}", summary.data_type_i),
frame: format!("{}", summary.frame_id),
target: format!("{}", summary.target_id),
});
}

let mut tbl = Table::new(rows);
tbl.with(Style::modern());
println!("{tbl}");

Ok(())
}
Err(e) => {
error!("Some other error happened when loading {path_str:?}: {e:?}");
Err(CliErrors::AniseError(e))
let spk = SPK::parse(bytes).with_context(|_| SomeDAFSnafu)?;

info!("CRC32 checksum: 0x{:X}", spk.crc32());
if let Some(comments) = spk.comments().with_context(|_| SomeDAFSnafu)? {
println!("== COMMENTS ==\n{}== END ==", comments);
} else {
println!("(File has no comments)");
}
// Build the rows of the table
let mut rows = Vec::new();

for (sno, summary) in spk.data_summaries().unwrap().iter().enumerate() {
let name = spk
.name_record
.nth_name(sno, spk.file_record.summary_size());
if summary.is_empty() {
continue;
}

rows.push(SpkRow {
name,
center: summary.center_id,
start_epoch: format!("{:E}", summary.start_epoch()),
end_epoch: format!("{:E}", summary.end_epoch()),
duration: summary.end_epoch() - summary.start_epoch(),
interpolation_kind: format!("{}", summary.data_type_i),
frame: format!("{}", summary.frame_id),
target: format!("{}", summary.target_id),
});
}

let mut tbl = Table::new(rows);
tbl.with(Style::modern());
println!("{tbl}");

Ok(())
}
fileid => Err(CliErrors::ArgumentError(format!(
"{fileid} is not supported yet"
))),
fileid => Err(CliErrors::ArgumentError {
arg: format!("{fileid} is not supported yet"),
}),
}
}
Err(e) => Err(e.into()),
Err(e) => panic!("{e}"),
}
}
Actions::ConvertTpc {
pckfile,
gmfile,
outfile,
} => {
let dataset = convert_tpc(pckfile, gmfile).map_err(CliErrors::AniseError)?;
let dataset =
convert_tpc(pckfile, gmfile).map_err(|err| CliErrors::AniseError { err })?;

dataset.save_as(outfile, false)?;
dataset
.save_as(outfile, false)
.map_err(|err| CliErrors::AniseError { err })?;

Ok(())
}
Expand Down
35 changes: 24 additions & 11 deletions src/cli/mod.rs
Original file line number Diff line number Diff line change
@@ -1,23 +1,36 @@
extern crate clap;
extern crate tabled;
extern crate thiserror;

use snafu::prelude::*;
use std::io;

use thiserror::Error;

use crate::prelude::AniseError;
use crate::{
naif::daf::{file_record::FileRecordError, DAFError},
prelude::AniseError,
};

pub mod args;

pub mod inspect;

#[derive(Debug, Error)]
#[derive(Debug, Snafu)]
#[snafu(visibility(pub))]
pub enum CliErrors {
#[error("File not found or unreadable")]
FileNotFound(#[from] io::Error),
#[error("ANISE error encountered")]
AniseError(#[from] AniseError),
#[error("{0}")]
ArgumentError(String),
/// File not found or unreadable
FileNotFound {
source: io::Error,
},
/// ANISE error encountered"
SomeDAF {
source: DAFError,
},
SomeFileRecord {
source: FileRecordError,
},
ArgumentError {
arg: String,
},
AniseError {
err: AniseError,
},
}
4 changes: 2 additions & 2 deletions src/ephemerides/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,14 +39,14 @@ pub enum EphemerisError<'a> {
#[snafu(display("trying {action} caused {source}"))]
UnderlyingDAF {
action: &'a str,
source: DAFError<'a>,
source: DAFError,
},
#[snafu(display("trying {action} caused {source}"))]
UnderlyingPhysics {
action: &'a str,
source: PhysicsError,
},
UnderlyingInterpolation {
source: InterpolationError<'a>,
source: InterpolationError,
},
}
Loading

0 comments on commit cda532d

Please sign in to comment.