diff --git a/src/bin/anise/main.rs b/src/bin/anise/main.rs index adaec111..dc1f1ea7 100644 --- a/src/bin/anise/main.rs +++ b/src/bin/anise/main.rs @@ -5,7 +5,7 @@ use snafu::prelude::*; use anise::cli::args::{Actions, Args}; use anise::cli::inspect::{BpcRow, SpkRow}; -use anise::cli::{AniseSnafu, CliDAFSnafu, CliErrors, CliFileRecordSnafu}; +use anise::cli::{AniseSnafu, CliDAFSnafu, CliDataSetSnafu, CliErrors, CliFileRecordSnafu}; use anise::file2heap; use anise::naif::daf::{FileRecord, NAIFRecord, NAIFSummaryRecord}; use anise::naif::kpl::parser::convert_tpc; @@ -46,14 +46,14 @@ fn main() -> Result<(), CliErrors> { DataSetType::SpacecraftData => { // Decode as spacecraft data let dataset = DataSet::::try_from_bytes(&bytes) - .map_err(|source| CliErrors::AniseError { source })?; + .with_context(|_| CliDataSetSnafu)?; println!("{dataset}"); Ok(()) } DataSetType::PlanetaryData => { // Decode as planetary data let dataset = DataSet::::try_from_bytes(&bytes) - .map_err(|source| CliErrors::AniseError { source })?; + .with_context(|_| CliDataSetSnafu)?; println!("{dataset}"); Ok(()) } @@ -178,12 +178,11 @@ fn main() -> Result<(), CliErrors> { gmfile, outfile, } => { - let dataset = - convert_tpc(pckfile, gmfile).map_err(|source| CliErrors::AniseError { source })?; + let dataset = convert_tpc(pckfile, gmfile).with_context(|_| CliDataSetSnafu)?; dataset .save_as(outfile, false) - .map_err(|source| CliErrors::AniseError { source })?; + .with_context(|_| CliDataSetSnafu)?; Ok(()) } diff --git a/src/cli/mod.rs b/src/cli/mod.rs index a2bfeb16..f5e7bdab 100644 --- a/src/cli/mod.rs +++ b/src/cli/mod.rs @@ -6,7 +6,7 @@ use std::io; use crate::{ naif::daf::{file_record::FileRecordError, DAFError}, - prelude::AniseError, + structure::dataset::DataSetError, }; pub mod args; @@ -30,7 +30,7 @@ pub enum CliErrors { ArgumentError { arg: String, }, - AniseError { - source: AniseError, + CliDataSet { + source: DataSetError, }, } diff --git a/src/errors.rs b/src/errors.rs index 37a3b835..d15bcffb 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -16,6 +16,7 @@ use crate::structure::semver::Semver; use crate::NaifId; use core::convert::From; use core::fmt; +use der::Error as DerError; use std::io::ErrorKind as IOErrorKind; #[derive(Clone, PartialEq, Debug)] @@ -86,8 +87,14 @@ pub enum DecodingError { #[snafu(backtrace)] source: IntegrityError, }, + #[snafu(display("decoding DER failed: {err}"))] + DecodingDer { err: DerError }, #[snafu(display("somehow casting the data failed"))] Casting, + #[snafu(display("could not load ANISE data version {got}, expected {exp}"))] + AniseVersion { got: Semver, exp: Semver }, + #[snafu(display("data could not be parsed as {kind} despite ANISE version matching (should be loaded as another type?)"))] + Obscure { kind: &'static str }, } #[derive(Copy, Clone, PartialEq, Eq, Debug)] diff --git a/src/naif/kpl/mod.rs b/src/naif/kpl/mod.rs index da70b8c8..850315e5 100644 --- a/src/naif/kpl/mod.rs +++ b/src/naif/kpl/mod.rs @@ -12,7 +12,7 @@ use core::str::FromStr; use std::fmt::Debug; use std::{collections::HashMap, hash::Hash}; -use crate::prelude::AniseError; +use snafu::{whatever, Whatever}; use self::parser::Assignment; @@ -38,10 +38,10 @@ pub enum KPLValue { } impl KPLValue { - pub fn to_vec_f64(&self) -> Result, AniseError> { + pub fn to_vec_f64(&self) -> Result, Whatever> { match self { KPLValue::Matrix(data) => Ok(data.clone()), - _ => Err(AniseError::ParameterNotSpecified), + _ => whatever!("can only convert matrices to vec of f64"), } } } @@ -90,7 +90,7 @@ pub enum Parameter { } impl FromStr for Parameter { - type Err = AniseError; + type Err = Whatever; fn from_str(s: &str) -> Result { match s { @@ -115,12 +115,10 @@ impl FromStr for Parameter { "UNITS" => Ok(Self::Units), "AXES" => Ok(Self::Axes), "GMLIST" | "NAME" | "SPEC" => { - // This is a known unsupported parameter - Err(AniseError::ParameterNotSpecified) + whatever!("unsupported parameter `{s}`") } _ => { - println!("WHAT IS `{s}` ?"); - Err(AniseError::ParameterNotSpecified) + whatever!("unknown parameter `{s}`") } } } diff --git a/src/naif/kpl/parser.rs b/src/naif/kpl/parser.rs index 5e7e6c2f..6098a1aa 100644 --- a/src/naif/kpl/parser.rs +++ b/src/naif/kpl/parser.rs @@ -19,8 +19,7 @@ use log::{error, info, warn}; use crate::naif::kpl::tpc::TPCItem; use crate::naif::kpl::Parameter; -use crate::prelude::AniseError; -use crate::structure::dataset::{DataSet, DataSetBuilder, DataSetType}; +use crate::structure::dataset::{DataSet, DataSetBuilder, DataSetError, DataSetType}; use crate::structure::metadata::Metadata; use crate::structure::planetocentric::ellipsoid::Ellipsoid; use crate::structure::planetocentric::phaseangle::PhaseAngle; @@ -78,7 +77,7 @@ impl Assignment { pub fn parse_file, I: KPLItem>( file_path: P, show_comments: bool, -) -> Result, AniseError> { +) -> Result, DataSetError> { let file = File::open(file_path).expect("Failed to open file"); let reader = BufReader::new(file); @@ -135,7 +134,7 @@ pub fn parse_file, I: KPLItem>( pub fn convert_tpc<'a, P: AsRef>( pck: P, gm: P, -) -> Result, AniseError> { +) -> Result, DataSetError> { let mut buf = vec![]; let mut dataset_builder = DataSetBuilder::default(); diff --git a/src/structure/dataset.rs b/src/structure/dataset.rs index baf5164e..2f32b8a5 100644 --- a/src/structure/dataset.rs +++ b/src/structure/dataset.rs @@ -8,22 +8,26 @@ * Documentation: https://nyxspace.com/ */ use super::{ - lookuptable::{Entry, LookUpTable}, + lookuptable::{Entry, LookUpTable, LutError}, metadata::Metadata, semver::Semver, ANISE_VERSION, }; -use crate::{errors::IntegrityError, prelude::AniseError, NaifId}; +use crate::{ + errors::{DecodingError, IntegrityError}, + NaifId, +}; use core::fmt; use core::marker::PhantomData; +use core::ops::Deref; use der::{asn1::OctetStringRef, Decode, Encode, Reader, Writer}; use log::{error, trace}; -use std::ops::Deref; +use snafu::prelude::*; macro_rules! io_imports { () => { use std::fs::File; - use std::io::Write; + use std::io::{Error as IOError, ErrorKind as IOErrorKind, Write}; use std::path::Path; use std::path::PathBuf; }; @@ -31,6 +35,31 @@ macro_rules! io_imports { io_imports!(); +#[derive(Debug, Snafu)] +#[snafu(visibility(pub(crate)))] +pub enum DataSetError { + #[snafu(display("when {action} {source}"))] + DataSetLut { + action: &'static str, + source: LutError, + }, + #[snafu(display("when {action} {source}"))] + DataSetIntegrity { + action: &'static str, + source: IntegrityError, + }, + #[snafu(display("when {action} {source}"))] + DataDecoding { + action: &'static str, + source: DecodingError, + }, + #[snafu(display("input/output error while {action}"))] + IO { + action: &'static str, + source: IOError, + }, +} + #[derive(Clone, Copy, PartialEq, Eq, Debug)] #[repr(u8)] pub enum DataSetType { @@ -75,7 +104,9 @@ impl<'a> Decode<'a> for DataSetType { } /// The kind of data that can be encoded in a dataset -pub trait DataSetT<'a>: Encode + Decode<'a> {} +pub trait DataSetT<'a>: Encode + Decode<'a> { + const NAME: &'static str; +} /// A DataSet is the core structure shared by all ANISE binary data. #[derive(Clone, Default, PartialEq, Eq, Debug)] @@ -102,7 +133,7 @@ impl<'a, T: DataSetT<'a>, const ENTRIES: usize> DataSetBuilder<'a, T, ENTRIES> { data: T, id: Option, name: Option<&'a str>, - ) -> Result<(), AniseError> { + ) -> Result<(), DataSetError> { let mut this_buf = vec![]; data.encode_to_vec(&mut this_buf).unwrap(); // Build this entry data. @@ -112,20 +143,38 @@ impl<'a, T: DataSetT<'a>, const ENTRIES: usize> DataSetBuilder<'a, T, ENTRIES> { }; if id.is_some() && name.is_some() { - self.dataset.lut.append(id.unwrap(), name.unwrap(), entry)?; + self.dataset + .lut + .append(id.unwrap(), name.unwrap(), entry) + .with_context(|_| DataSetLutSnafu { + action: "pushing data with ID and name", + })?; } else if id.is_some() { - self.dataset.lut.append_id(id.unwrap(), entry)?; + self.dataset + .lut + .append_id(id.unwrap(), entry) + .with_context(|_| DataSetLutSnafu { + action: "pushing data with ID only", + })?; } else if name.is_some() { - self.dataset.lut.append_name(name.unwrap(), entry)?; + self.dataset + .lut + .append_name(name.unwrap(), entry) + .with_context(|_| DataSetLutSnafu { + action: "pushing data with name only", + })?; } else { - return Err(AniseError::ItemNotFound); + return Err(DataSetError::DataSetLut { + action: "pushing data", + source: LutError::NoKeyProvided, + }); } buf.extend_from_slice(&this_buf); Ok(()) } - pub fn finalize(mut self, buf: &'a [u8]) -> Result, AniseError> { + pub fn finalize(mut self, buf: &'a [u8]) -> Result, DataSetError> { self.dataset.bytes = buf; self.dataset.set_crc32(); Ok(self.dataset) @@ -134,41 +183,52 @@ impl<'a, T: DataSetT<'a>, const ENTRIES: usize> DataSetBuilder<'a, T, ENTRIES> { impl<'a, T: DataSetT<'a>, const ENTRIES: usize> DataSet<'a, T, ENTRIES> { /// Try to load an Anise file from a pointer of bytes - pub fn try_from_bytes(bytes: &'a [u8]) -> Result { + pub fn try_from_bytes(bytes: &'a [u8]) -> Result { match Self::from_der(bytes) { Ok(ctx) => { trace!("[try_from_bytes] loaded context successfully"); // Check the full integrity on load of the file. - // TODO: Raise this error - ctx.check_integrity().unwrap(); + ctx.check_integrity() + .with_context(|_| DataSetIntegritySnafu { + action: "loading data set from bytes", + })?; Ok(ctx) } - Err(e) => { + Err(_) => { // If we can't load the file, let's try to load the version only to be helpful - match bytes.get(0..5) { - Some(semver_bytes) => match Semver::from_der(semver_bytes) { - Ok(file_version) => { - if file_version == ANISE_VERSION { - error!("[try_from_bytes] context bytes corrupted but ANISE library version match"); - Err(AniseError::DecodingError(e)) - } else { - error!( - "[try_from_bytes] context bytes and ANISE library version mismatch" - ); - Err(AniseError::IncompatibleVersion { + let semver_bytes = bytes + .get(0..5) + .ok_or_else(|| DecodingError::InaccessibleBytes { + start: 0, + end: 5, + size: bytes.len(), + }) + .with_context(|_| DataDecodingSnafu { + action: "checking data set version", + })?; + match Semver::from_der(semver_bytes) { + Ok(file_version) => { + if file_version == ANISE_VERSION { + Err(DataSetError::DataDecoding { + action: "loading from bytes", + source: DecodingError::Obscure { kind: T::NAME }, + }) + } else { + Err(DataSetError::DataDecoding { + action: "checking data set version", + source: DecodingError::AniseVersion { got: file_version, exp: ANISE_VERSION, - }) - } + }, + }) } - Err(e) => { - error!("[try_from_bytes] context bytes not in ANISE format"); - Err(AniseError::DecodingError(e)) - } - }, - None => { - error!("[try_from_bytes] context bytes way too short (less than 5 bytes)"); - Err(AniseError::DecodingError(e)) + } + Err(err) => { + error!("context bytes not in ANISE format"); + Err(DataSetError::DataDecoding { + action: "loading SemVer", + source: DecodingError::DecodingDer { err }, + }) } } } @@ -221,44 +281,69 @@ impl<'a, T: DataSetT<'a>, const ENTRIES: usize> DataSet<'a, T, ENTRIES> { } } - pub fn get_by_id(&self, id: NaifId) -> Result { + pub fn get_by_id(&self, id: NaifId) -> Result { if let Some(entry) = self.lut.by_id.get(&id) { // Found the ID - match T::from_der(&self.bytes[entry.as_range()]) { - Ok(data) => Ok(data), - Err(e) => { - println!("{e:?}"); - dbg!(&self.bytes[entry.as_range()]); - Err(AniseError::MalformedData(entry.start_idx as usize)) - } - } + let bytes = self + .bytes + .get(entry.as_range()) + .ok_or_else(|| entry.decoding_error()) + .with_context(|_| DataDecodingSnafu { + action: "fetching by ID", + })?; + T::from_der(bytes) + .map_err(|err| DecodingError::DecodingDer { err }) + .with_context(|_| DataDecodingSnafu { + action: "fetching by ID", + }) } else { - Err(AniseError::ItemNotFound) + Err(DataSetError::DataSetLut { + action: "fetching by ID", + source: LutError::UnknownId { id }, + }) } } - pub fn get_by_name(&self, id: NaifId) -> Result { - if let Some(entry) = self.lut.by_id.get(&id) { - // Found the ID - if let Ok(data) = T::from_der(&self.bytes[entry.as_range()]) { - Ok(data) - } else { - Err(AniseError::MalformedData(entry.start_idx as usize)) - } + pub fn get_by_name(&self, name: &str) -> Result { + if let Some(entry) = self.lut.by_name.get(&name) { + // Found the name + let bytes = self + .bytes + .get(entry.as_range()) + .ok_or_else(|| entry.decoding_error()) + .with_context(|_| DataDecodingSnafu { + action: "fetching by name", + })?; + T::from_der(bytes) + .map_err(|err| DecodingError::DecodingDer { err }) + .with_context(|_| DataDecodingSnafu { + action: "fetching by name", + }) } else { - Err(AniseError::ItemNotFound) + Err(DataSetError::DataSetLut { + action: "fetching by ID", + source: LutError::UnknownName { + name: name.to_string(), + }, + }) } } /// Saves this dataset to the provided file /// If overwrite is set to false, and the filename already exists, this function will return an error. - pub fn save_as(&self, filename: PathBuf, overwrite: bool) -> Result<(), AniseError> { + pub fn save_as(&self, filename: PathBuf, overwrite: bool) -> Result<(), DataSetError> { use log::{info, warn}; if Path::new(&filename).exists() { if !overwrite { - return Err(AniseError::FileExists); + return Err(DataSetError::IO { + source: IOError::new( + IOErrorKind::AlreadyExists, + "file exists and overwrite flag set to false", + ), + action: "creating data set file", + }); } else { warn!("[save_as] overwriting {}", filename.display()); } @@ -268,17 +353,26 @@ impl<'a, T: DataSetT<'a>, const ENTRIES: usize> DataSet<'a, T, ENTRIES> { match File::create(&filename) { Ok(mut file) => { - if let Err(e) = self.encode_to_vec(&mut buf) { - return Err(AniseError::DecodingError(e)); + if let Err(err) = self.encode_to_vec(&mut buf) { + return Err(DataSetError::DataDecoding { + action: "encoding data set", + source: DecodingError::DecodingDer { err }, + }); } - if let Err(e) = file.write_all(&buf) { - Err(e.kind().into()) + if let Err(source) = file.write_all(&buf) { + Err(DataSetError::IO { + source, + action: "writing data set to file", + }) } else { info!("[OK] dataset saved to {}", filename.display()); Ok(()) } } - Err(e) => Err(e.kind().into()), + Err(source) => Err(DataSetError::IO { + source, + action: "creating data set file", + }), } } } diff --git a/src/structure/lookuptable.rs b/src/structure/lookuptable.rs index ab9cf9f4..34ec26f3 100644 --- a/src/structure/lookuptable.rs +++ b/src/structure/lookuptable.rs @@ -13,8 +13,28 @@ use der::{ }; use heapless::FnvIndexMap; use log::warn; - -use crate::{prelude::AniseError, NaifId}; +use snafu::prelude::*; + +use crate::{errors::DecodingError, NaifId}; + +#[derive(Debug, Snafu)] +#[snafu(visibility(pub(crate)))] +pub enum LutError { + #[snafu(display( + "ID LUT is full with all {max_slots} taken (increase ENTRIES at build time)" + ))] + IdLutFull { max_slots: usize }, + #[snafu(display( + "Names LUT is full with all {max_slots} taken (increase ENTRIES at build time)" + ))] + NameLutFull { max_slots: usize }, + #[snafu(display("must provide either an ID or a name for a loop up, but provided neither"))] + NoKeyProvided, + #[snafu(display("ID {id} not in look up table"))] + UnknownId { id: NaifId }, + #[snafu(display("name {name} not in look up table"))] + UnknownName { name: String }, +} /// A lookup table entry contains the start and end indexes in the data array of the data that is sought after. /// @@ -31,6 +51,14 @@ impl Entry { pub(crate) fn as_range(&self) -> core::ops::Range { self.start_idx as usize..self.end_idx as usize } + /// Returns a pre-populated decoding error + pub(crate) fn decoding_error(&self) -> DecodingError { + DecodingError::InaccessibleBytes { + start: self.start_idx as usize, + end: self.end_idx as usize, + size: (self.end_idx - self.start_idx) as usize, + } + } } impl Encode for Entry { @@ -66,27 +94,27 @@ pub struct LookUpTable<'a, const ENTRIES: usize> { } impl<'a, const ENTRIES: usize> LookUpTable<'a, ENTRIES> { - pub fn append(&mut self, id: i32, name: &'a str, entry: Entry) -> Result<(), AniseError> { + pub fn append(&mut self, id: i32, name: &'a str, entry: Entry) -> Result<(), LutError> { self.by_id .insert(id, entry) - .map_err(|_| AniseError::StructureIsFull)?; + .map_err(|_| LutError::IdLutFull { max_slots: ENTRIES })?; self.by_name .insert(name, entry) - .map_err(|_| AniseError::StructureIsFull)?; + .map_err(|_| LutError::NameLutFull { max_slots: ENTRIES })?; Ok(()) } - pub fn append_id(&mut self, id: i32, entry: Entry) -> Result<(), AniseError> { + pub fn append_id(&mut self, id: i32, entry: Entry) -> Result<(), LutError> { self.by_id .insert(id, entry) - .map_err(|_| AniseError::StructureIsFull)?; + .map_err(|_| LutError::IdLutFull { max_slots: ENTRIES })?; Ok(()) } - pub fn append_name(&mut self, name: &'a str, entry: Entry) -> Result<(), AniseError> { + pub fn append_name(&mut self, name: &'a str, entry: Entry) -> Result<(), LutError> { self.by_name .insert(name, entry) - .map_err(|_| AniseError::StructureIsFull)?; + .map_err(|_| LutError::NameLutFull { max_slots: ENTRIES })?; Ok(()) } diff --git a/src/structure/metadata.rs b/src/structure/metadata.rs index c48b0566..4afe7c3b 100644 --- a/src/structure/metadata.rs +++ b/src/structure/metadata.rs @@ -12,7 +12,7 @@ use core::str::FromStr; use der::{asn1::Utf8StringRef, Decode, Encode, Reader, Writer}; use hifitime::Epoch; -use crate::prelude::AniseError; +use crate::errors::DecodingError; use super::{dataset::DataSetType, semver::Semver, ANISE_VERSION}; @@ -32,10 +32,26 @@ pub struct Metadata<'a> { impl<'a> Metadata<'a> { /// Only decode the anise version and dataset type - pub fn decode_header(bytes: &[u8]) -> Result { - let anise_version = Semver::from_der(&bytes[..5]).map_err(AniseError::DecodingError)?; - let dataset_type = - DataSetType::from_der(&bytes[5..8]).map_err(AniseError::DecodingError)?; + pub fn decode_header(bytes: &[u8]) -> Result { + let anise_version = + Semver::from_der( + bytes + .get(..5) + .ok_or_else(|| DecodingError::InaccessibleBytes { + start: 0, + end: 5, + size: bytes.len(), + })?, + ) + .or_else(|err| Err(DecodingError::DecodingDer { err }))?; + let dataset_type = DataSetType::from_der(bytes.get(5..8).ok_or_else(|| { + DecodingError::InaccessibleBytes { + start: 5, + end: 8, + size: bytes.len(), + } + })?) + .or_else(|err| Err(DecodingError::DecodingDer { err }))?; let me = Self { anise_version, dataset_type, diff --git a/src/structure/planetocentric/mod.rs b/src/structure/planetocentric/mod.rs index 841afa2d..7b039a7b 100644 --- a/src/structure/planetocentric/mod.rs +++ b/src/structure/planetocentric/mod.rs @@ -42,7 +42,9 @@ pub struct PlanetaryData { pub nut_prec_angles: [NutationPrecessionAngle; MAX_NUT_PREC_ANGLES], } -impl<'a> DataSetT<'a> for PlanetaryData {} +impl<'a> DataSetT<'a> for PlanetaryData { + const NAME: &'static str = "planetary data"; +} impl PlanetaryData { /// Specifies what data is available in this structure. diff --git a/src/structure/spacecraft/mod.rs b/src/structure/spacecraft/mod.rs index 34942f7a..9effd207 100644 --- a/src/structure/spacecraft/mod.rs +++ b/src/structure/spacecraft/mod.rs @@ -38,7 +38,9 @@ pub struct SpacecraftData<'a> { pub inertia: Option, } -impl<'a> DataSetT<'a> for SpacecraftData<'a> {} +impl<'a> DataSetT<'a> for SpacecraftData<'a> { + const NAME: &'static str = "spacecraft datas"; +} impl<'a> SpacecraftData<'a> { /// Specifies what data is available in this structure.