From d84df77da6f2888a0cd948d72db6277db06ee84a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Zsolt=20B=C3=B6l=C3=B6ny?= Date: Sun, 31 Mar 2024 12:50:18 +0200 Subject: [PATCH] Introduce clippy --- .github/workflows/ci.yml | 5 +++ CHANGELOG.md | 6 +++ examples/count_wikidata.rs | 13 +++--- examples/print_header.rs | 9 ++--- src/dense.rs | 65 ++++++++++++++++-------------- src/lib.rs | 81 ++++++++++++++++++-------------------- src/pbf.rs | 1 + 7 files changed, 96 insertions(+), 84 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 45c2918..2282f49 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,6 +2,9 @@ name: continuous-integration on: [push, pull_request] +env: + RUSTFLAGS: "-Dwarnings" + jobs: build_and_test: name: Build and test @@ -28,3 +31,5 @@ jobs: run: cargo build ${{ matrix.features }} - name: Test run: cargo test ${{ matrix.features }} + - name: Clippy + run: cargo clippy --all-targets --all-features diff --git a/CHANGELOG.md b/CHANGELOG.md index 918ce1d..3716a7f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,9 +6,15 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ## [Unreleased] +### Added +- Clippy step to CI + ### Changed - Updated dependencies +### Fixed +- All `clippy:all` warnings and some pedantic warnings too + ## [1.0.1] - 2023-11-11 ### Changed - Updated dependencies diff --git a/examples/count_wikidata.rs b/examples/count_wikidata.rs index bb417ad..de45ce1 100644 --- a/examples/count_wikidata.rs +++ b/examples/count_wikidata.rs @@ -35,7 +35,7 @@ fn process_primitive_block(block: pbf::PrimitiveBlock) -> Result<(), Error> { } if let Some(dense_nodes) = &group.dense { - let nodes = DenseNodeReader::new(&dense_nodes)?; + let nodes = DenseNodeReader::new(dense_nodes)?; for node in nodes { let tags = new_dense_tag_reader(string_table, node?.key_value_indices); @@ -54,17 +54,16 @@ fn parse_block(block_parser: &mut BlockParser, raw_block: RawBlock) { match block_parser.parse_block(raw_block) { Ok(block) => match block { Block::Header(header_block) => process_header_block(header_block), - Block::Primitive(primitive_block) => match process_primitive_block(primitive_block) { - Err(error) => { - error!("Error during processing a primitive block: {:?}", error) + Block::Primitive(primitive_block) => { + if let Err(error) = process_primitive_block(primitive_block) { + error!("Error during processing a primitive block: {error:?}") } - _ => {} - }, + } Block::Unknown(unknown_block) => { warn!("Skipping unknown block of size {}", unknown_block.len()) } }, - Err(error) => error!("Error during parsing a block: {:?}", error), + Err(error) => error!("Error during parsing a block: {error:?}"), } } diff --git a/examples/print_header.rs b/examples/print_header.rs index ba73755..345afd3 100644 --- a/examples/print_header.rs +++ b/examples/print_header.rs @@ -14,12 +14,9 @@ fn main() -> Result<(), Box> { while let Some(raw_block) = read_blob(&mut file) { let block = block_parser.parse_block(raw_block?)?; - match block { - Block::Header(header_block) => { - println!("{:#?}", header_block); - break; - } - _ => {} + if let Block::Header(header_block) = block { + println!("{:#?}", header_block); + break; } } diff --git a/src/dense.rs b/src/dense.rs index 5b5ef16..f7e8994 100644 --- a/src/dense.rs +++ b/src/dense.rs @@ -37,12 +37,15 @@ struct DeltaCodedValues { user_sid: u32, } +type IdDeltaIt<'a> = Iter<'a, i64>; +type LatLonDeltaIt<'a> = Zip, Iter<'a, i64>>; + /// Utility for reading delta-encoded dense nodes. pub struct DenseNodeReader<'a> { data: &'a pbf::DenseNodes, - data_it: Enumerate, Zip, Iter<'a, i64>>>>, // (data_idx, (id_delta, (lat_delta, lon_delta))) iterator - key_value_idx: usize, // Starting index of the next node's keys/values - current: DeltaCodedValues, // Current values of delta coded fields + data_it: Enumerate, LatLonDeltaIt<'a>>>, // (data_idx, (id_delta, (lat_delta, lon_delta))) iterator + key_value_idx: usize, // Starting index of the next node's keys/values + current: DeltaCodedValues, // Current values of delta coded fields } impl<'a> DenseNodeReader<'a> { @@ -70,6 +73,10 @@ impl<'a> DenseNodeReader<'a> { /// Ok(()) /// } /// ``` + /// + /// # Errors + /// + /// Will return `Err` if the latitude, longitude and ID counts in `data` do not match. pub fn new(data: &'a pbf::DenseNodes) -> Result { if data.lat.len() != data.id.len() || data.lon.len() != data.id.len() { Err(Error::LogicError(format!( @@ -131,12 +138,12 @@ impl<'a> Iterator for DenseNodeReader<'a> { }; Some(pbf::Info { - version: dense_info.version.get(data_idx).cloned(), + version: dense_info.version.get(data_idx).copied(), timestamp: delta_decode(&mut self.current.timestamp, dense_info.timestamp.get(data_idx)), changeset: delta_decode(&mut self.current.changeset, dense_info.changeset.get(data_idx)), uid: delta_decode(&mut self.current.uid, dense_info.uid.get(data_idx)), user_sid, - visible: dense_info.visible.get(data_idx).cloned(), + visible: dense_info.visible.get(data_idx).copied(), }) } None => None, @@ -176,6 +183,29 @@ impl<'a> Iterator for DenseNodeReader<'a> { } } +/// Constructs a new `TagReader` from a dense key/value index slice, and a corresponding string table. +/// +/// See [`DenseNodeReader::new`] and [`DenseNode::key_value_indices`]. +pub fn new_dense_tag_reader<'a>( + string_table: &'a pbf::StringTable, + key_value_indices: &'a [i32], +) -> TagReader<'a, impl Iterator, Result)> + 'a> { + TagReader { + string_table, + iter: key_value_indices.chunks_exact(2).map(|s| { + let convert_idx = |index: i32| -> Result { + if let Ok(index) = TryInto::::try_into(index) { + Ok(index) + } else { + Err(Error::LogicError(format!("string table index {index} is invalid"))) + } + }; + + (convert_idx(s[0]), convert_idx(s[1])) + }), + } +} + #[cfg(test)] mod dense_node_reader_tests { use super::*; @@ -200,7 +230,7 @@ mod dense_node_reader_tests { }; let reader = DenseNodeReader::new(&dense_nodes).expect("dense node reader should be created on valid data"); - let mut result: Vec = reader.filter_map(|r| r.ok()).collect(); + let mut result: Vec = reader.filter_map(Result::ok).collect(); assert_eq!(result.len(), 2); let first = &mut result[0]; @@ -270,26 +300,3 @@ mod dense_node_reader_tests { assert!(next.unwrap().is_err()); } } - -/// Constructs a new `TagReader` from a dense key/value index slice, and a corresponding string table. -/// -/// See [`DenseNodeReader::new`] and [`DenseNode::key_value_indices`]. -pub fn new_dense_tag_reader<'a>( - string_table: &'a pbf::StringTable, - key_value_indices: &'a [i32], -) -> TagReader<'a, impl Iterator, Result)> + 'a> { - TagReader { - string_table, - iter: key_value_indices.chunks_exact(2).map(|s| { - let convert_idx = |index: i32| -> Result { - if let Ok(index) = TryInto::::try_into(index) { - Ok(index) - } else { - Err(Error::LogicError(format!("string table index {} is invalid", index))) - } - }; - - (convert_idx(s[0]), convert_idx(s[1])) - }), - } -} diff --git a/src/lib.rs b/src/lib.rs index a39fe63..fa4c997 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -48,7 +48,7 @@ pub enum Error { impl std::fmt::Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{:?}", self) + write!(f, "{self:?}") } } @@ -108,8 +108,6 @@ pub fn read_blob(pbf: &mut Input) -> Option> where Input: std::io::Read, { - use pbf::BlobHeader; - let mut header_size_buffer = [0u8; 4]; if let Err(error) = pbf.read_exact(&mut header_size_buffer) { @@ -119,33 +117,44 @@ where }; } - let blob_header_size = i32::from_be_bytes(header_size_buffer); + Some(read_blob_inner(pbf, header_size_buffer)) +} + +fn read_blob_inner(pbf: &mut Input, header_size_buffer: [u8; 4]) -> Result +where + Input: std::io::Read, +{ + use pbf::BlobHeader; + + let blob_header_size: usize = i32::from_be_bytes(header_size_buffer) + .try_into() + .map_err(|_err| Error::InvalidBlobHeader)?; - if !(0..64 * 1024).contains(&blob_header_size) { - return Some(Err(Error::InvalidBlobHeader)); + if blob_header_size >= 64 * 1024 { + return Err(Error::InvalidBlobHeader); } - let mut blob = vec![0u8; blob_header_size as usize]; + let mut blob = vec![0u8; blob_header_size]; if let Err(error) = pbf.read_exact(&mut blob) { - return Some(Err(Error::IoError(error))); + return Err(Error::IoError(error)); } let blob_header = match BlobHeader::decode(&*blob) { Ok(blob_header) => blob_header, - Err(error) => return Some(Err(Error::PbfParseError(error))), + Err(error) => return Err(Error::PbfParseError(error)), }; let block_type = BlockType::from(blob_header.r#type.as_ref()); - let blob_size = blob_header.datasize; + let blob_size: usize = blob_header.datasize.try_into().map_err(|_err| Error::InvalidBlobData)?; - if !(0..32 * 1024 * 1024).contains(&blob_size) { - return Some(Err(Error::InvalidBlobData)); + if blob_size >= 32 * 1024 * 1024 { + return Err(Error::InvalidBlobData); } - blob.resize_with(blob_size as usize, Default::default); + blob.resize_with(blob_size, Default::default); if let Err(error) = pbf.read_exact(&mut blob) { - return Some(Err(Error::IoError(error))); + return Err(Error::IoError(error)); } let raw_block = RawBlock { @@ -153,7 +162,7 @@ where data: blob, }; - Some(Ok(raw_block)) + Ok(raw_block) } /// Blob compression method. @@ -193,7 +202,7 @@ impl Decompressor for DefaultDecompressor { fn decompress(method: CompressionMethod, input: &[u8], output: &mut [u8]) -> Result<(), DecompressionError> { match method { CompressionMethod::Zlib => { - let mut decoder = ZlibDecoder::new(input.as_ref()); + let mut decoder = ZlibDecoder::new(input); match decoder.read_exact(output) { Ok(_) => Ok(()), @@ -236,6 +245,10 @@ impl BlockParser { } /// Parses `raw_block` into a header, primitive or unknown block. + /// + /// # Errors + /// + /// Will return `Err` if an error occurs during PBF parsing, decompression or validation. #[allow(deprecated)] pub fn parse_block(&mut self, raw_block: RawBlock) -> Result { let blob = match pbf::Blob::decode(&*raw_block.data) { @@ -244,8 +257,8 @@ impl BlockParser { }; if let Some(uncompressed_size) = blob.raw_size { - self.block_buffer - .resize_with(uncompressed_size as usize, Default::default); + let uncompressed_size: usize = uncompressed_size.try_into().map_err(|_err| Error::InvalidBlobData)?; + self.block_buffer.resize_with(uncompressed_size, Default::default); } if let Some(blob_data) = blob.data { @@ -318,20 +331,16 @@ where if let Ok(utf8_string) = str::from_utf8(bytes) { Ok(utf8_string) } else { - Err(Error::LogicError(format!( - "string at index {} is not valid UTF-8", - index - ))) + Err(Error::LogicError(format!("string at index {index} is not valid UTF-8"))) } } else { Err(Error::LogicError(format!( - "string table index {} is out of bounds ({})", - index, + "string table index {index} is out of bounds ({})", self.string_table.s.len() ))) } } else { - Err(Error::LogicError(format!("string table index {} is invalid", index))) + Err(Error::LogicError(format!("string table index {index} is invalid"))) } }; @@ -390,28 +399,16 @@ mod tag_reader_tests { #[test] fn valid_input() { let key_vals = ["", "key1", "val1", "key2", "val2"]; - let mut string_table = pbf::StringTable::default(); - string_table.s = key_vals.iter().map(|s| s.as_bytes().to_vec()).collect(); + let string_table = pbf::StringTable { + s: key_vals.iter().map(|s| s.as_bytes().to_vec()).collect(), + }; let key_indices = [1, 3]; let value_indices = [2, 4]; let mut reader = new_tag_reader(&string_table, &key_indices, &value_indices); - match reader.next() { - Some(tags) => match tags { - (Ok("key1"), Ok("val1")) => {} - _ => assert!(false), - }, - None => assert!(false), - } - - match reader.next() { - Some(tags) => match tags { - (Ok("key2"), Ok("val2")) => {} - _ => assert!(false), - }, - None => assert!(false), - } + matches!(reader.next(), Some((Ok("key1"), Ok("val1")))); + matches!(reader.next(), Some((Ok("key2"), Ok("val2")))); assert!(reader.next().is_none()); } diff --git a/src/pbf.rs b/src/pbf.rs index 6bfac45..43a181d 100644 --- a/src/pbf.rs +++ b/src/pbf.rs @@ -1 +1,2 @@ +#![allow(clippy::pedantic)] include!(concat!(env!("OUT_DIR"), "/proto/osmpbf.rs"));