diff --git a/Cargo.toml b/Cargo.toml index 0d2272f2d10..b128a0bca55 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -144,12 +144,10 @@ rust.nonstandard_style = "deny" rust.rust_2018_idioms = "deny" rust.trivial_casts = "deny" rust.trivial_numeric_casts = "deny" -rust.unconditional_recursion = "deny" rust.unsafe_code = "deny" rust.unused = "deny" rust.unused_import_braces = "deny" rust.variant_size_differences = "deny" -rust.unused_tuple_struct_fields = "deny" rust.explicit_outlives_requirements = "deny" rust.non_ascii_idents = "deny" rust.elided_lifetimes_in_paths = "allow" @@ -159,9 +157,6 @@ rust.unused_lifetimes = "warn" # TODO: reenable # rust.unsafe_op_in_unsafe_fn = "deny" -# lower the priority to allow overriding later -clippy.all = { level = "deny", priority = -1 } - # pedantic clippy.pedantic = { level = "warn", priority = -1 } clippy.match_wildcard_for_single_variants = "allow" @@ -171,6 +166,7 @@ clippy.manual_let_else = "allow" clippy.enum_glob_use = "allow" clippy.module_name_repetitions = "allow" clippy.must_use_candidate = "allow" +clippy.missing_panics_doc = "allow" # restriction clippy.dbg_macro = "deny" diff --git a/cli/src/lib.rs b/cli/src/lib.rs index b63e0880efd..d4f9265f598 100644 --- a/cli/src/lib.rs +++ b/cli/src/lib.rs @@ -103,6 +103,7 @@ impl NetworkRelay { async fn run(mut self) { let (sender, mut receiver) = mpsc::channel(1); self.network.subscribe_to_peers_messages(sender); + // NOTE: Triggered by tokio::select #[allow(clippy::redundant_pub_crate)] loop { tokio::select! { @@ -406,7 +407,6 @@ impl Iroha { Ok(TelemetryStartStatus::NotStarted) } - #[allow(clippy::redundant_pub_crate)] fn start_listening_signal(notify_shutdown: Arc) -> Result> { let (mut sigint, mut sigterm) = signal::unix::signal(signal::unix::SignalKind::interrupt()) .and_then(|sigint| { @@ -416,6 +416,8 @@ impl Iroha { }) .wrap_err("Failed to start listening for OS signals")?; + // NOTE: Triggered by tokio::select + #[allow(clippy::redundant_pub_crate)] let handle = task::spawn(async move { tokio::select! { _ = sigint.recv() => { @@ -447,6 +449,8 @@ impl Iroha { // FIXME: don't like neither the message nor inability to throw Result to the outside .expect("Cannot proceed without working subscriptions"); + // NOTE: Triggered by tokio::select + #[allow(clippy::redundant_pub_crate)] loop { tokio::select! { Ok(()) = log_level_update.changed() => { @@ -528,7 +532,7 @@ pub fn read_config( let mut cfg = config.override_with(ConfigurationProxy::from_path(&*actual_config_path)); let config_dir = actual_config_path .parent() - .expect("If config file was read, than it should has a parent. It is a bug."); + .expect("If config file was read, than it should have a parent. It is a bug."); // careful here: `genesis.file` might be a path relative to the config file. // we need to resolve it before proceeding diff --git a/client/tests/integration/events/data.rs b/client/tests/integration/events/data.rs index 9f914bb5bb3..7c6547cff9a 100644 --- a/client/tests/integration/events/data.rs +++ b/client/tests/integration/events/data.rs @@ -6,8 +6,65 @@ use parity_scale_codec::Encode as _; use serde_json::json; use test_network::*; -use crate::wasm::utils::wasm_template; +/// Return string containing exported memory, dummy allocator, and +/// host function imports which you can embed into your wasm module. +/// +/// Memory is initialized with the given hex encoded string value +// NOTE: It's expected that hex value is of even length +#[allow(clippy::integer_division)] +pub fn wasm_template(hex_val: &str) -> String { + format!( + r#" + ;; Import host function to execute instruction + (import "iroha" "{execute_instruction}" + (func $exec_isi (param i32 i32) (result i32))) + + ;; Import host function to execute query + (import "iroha" "{execute_query}" + (func $exec_query (param i32 i32) (result i32))) + + ;; Embed ISI into WASM binary memory + (memory (export "{memory_name}") 1) + (data (i32.const 0) "{hex_val}") + + ;; Variable which tracks total allocated size + (global $mem_size (mut i32) i32.const {hex_len}) + + ;; Export mock allocator to host. This allocator never frees! + (func (export "{alloc_fn_name}") (param $size i32) (result i32) + global.get $mem_size + + (global.set $mem_size + (i32.add (global.get $mem_size) (local.get $size))) + ) + + ;; Export mock deallocator to host. This allocator does nothing! + (func (export "{dealloc_fn_name}") (param $size i32) (param $len i32) + nop) + "#, + memory_name = "memory", + alloc_fn_name = "_iroha_smart_contract_alloc", + dealloc_fn_name = "_iroha_smart_contract_dealloc", + execute_instruction = "execute_instruction", + execute_query = "execute_query", + hex_val = escape_hex(hex_val), + hex_len = hex_val.len() / 2, + ) +} + +fn escape_hex(hex_val: &str) -> String { + let mut isi_hex = String::with_capacity(3 * hex_val.len()); + + for (i, c) in hex_val.chars().enumerate() { + if i % 2 == 0 { + isi_hex.push('\\'); + } + + isi_hex.push(c); + } + isi_hex +} fn produce_instructions() -> Vec { let domains = (0..4) .map(|domain_index: usize| Domain::new(domain_index.to_string().parse().expect("Valid"))); diff --git a/client/tests/integration/events/pipeline.rs b/client/tests/integration/events/pipeline.rs index 77d99dd1b57..ba4574d46e3 100644 --- a/client/tests/integration/events/pipeline.rs +++ b/client/tests/integration/events/pipeline.rs @@ -12,7 +12,6 @@ use iroha_config::iroha::Configuration; use test_network::*; // Needed to re-enable ignored tests. -#[allow(dead_code)] const PEER_COUNT: usize = 7; #[ignore = "ignore, more in #2851"] @@ -33,7 +32,6 @@ fn transaction_with_fail_instruction_should_be_rejected() -> Result<()> { ) } -#[allow(dead_code, clippy::needless_range_loop, clippy::needless_pass_by_value)] fn test_with_instruction_and_status_and_port( instruction: Option, should_be: PipelineStatusKind, diff --git a/client/tests/integration/unregister_peer.rs b/client/tests/integration/unregister_peer.rs index 3121bf1de76..16f6ad87c10 100644 --- a/client/tests/integration/unregister_peer.rs +++ b/client/tests/integration/unregister_peer.rs @@ -112,14 +112,11 @@ fn init() -> Result<( let asset_definition_id: AssetDefinitionId = "xor#domain".parse()?; let create_asset = Register::asset_definition(AssetDefinition::quantity(asset_definition_id.clone())); - let instructions = parameters.into_iter().chain( - [ - create_domain.into(), - create_account.into(), - create_asset.into(), - ] - .into_iter(), - ); + let instructions = parameters.into_iter().chain([ + create_domain.into(), + create_account.into(), + create_asset.into(), + ]); client.submit_all_blocking(instructions)?; iroha_logger::info!("Init"); Ok(( diff --git a/client/tests/mod.rs b/client/tests/mod.rs index 46a97877ed9..ecebd561003 100644 --- a/client/tests/mod.rs +++ b/client/tests/mod.rs @@ -1,3 +1,2 @@ #[cfg(not(coverage))] mod integration; -mod wasm; diff --git a/client/tests/wasm/mod.rs b/client/tests/wasm/mod.rs deleted file mode 100644 index b5614dd8233..00000000000 --- a/client/tests/wasm/mod.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod utils; diff --git a/client/tests/wasm/utils.rs b/client/tests/wasm/utils.rs deleted file mode 100644 index 53f3516ea68..00000000000 --- a/client/tests/wasm/utils.rs +++ /dev/null @@ -1,59 +0,0 @@ -/// Return string containing exported memory, dummy allocator, and -/// host function imports which you can embed into your wasm module. -/// -/// Memory is initialized with the given hex encoded string value -// It's expected that hex value is of even length -#[allow(clippy::integer_division)] -pub fn wasm_template(hex_val: &str) -> String { - format!( - r#" - ;; Import host function to execute instruction - (import "iroha" "{execute_instruction}" - (func $exec_isi (param i32 i32) (result i32))) - - ;; Import host function to execute query - (import "iroha" "{execute_query}" - (func $exec_query (param i32 i32) (result i32))) - - ;; Embed ISI into WASM binary memory - (memory (export "{memory_name}") 1) - (data (i32.const 0) "{hex_val}") - - ;; Variable which tracks total allocated size - (global $mem_size (mut i32) i32.const {hex_len}) - - ;; Export mock allocator to host. This allocator never frees! - (func (export "{alloc_fn_name}") (param $size i32) (result i32) - global.get $mem_size - - (global.set $mem_size - (i32.add (global.get $mem_size) (local.get $size))) - ) - - ;; Export mock deallocator to host. This allocator does nothing! - (func (export "{dealloc_fn_name}") (param $size i32) (param $len i32) - nop) - "#, - memory_name = "memory", - alloc_fn_name = "_iroha_smart_contract_alloc", - dealloc_fn_name = "_iroha_smart_contract_dealloc", - execute_instruction = "execute_instruction", - execute_query = "execute_query", - hex_val = escape_hex(hex_val), - hex_len = hex_val.len() / 2, - ) -} - -fn escape_hex(hex_val: &str) -> String { - let mut isi_hex = String::with_capacity(3 * hex_val.len()); - - for (i, c) in hex_val.chars().enumerate() { - if i % 2 == 0 { - isi_hex.push('\\'); - } - - isi_hex.push(c); - } - - isi_hex -} diff --git a/client_cli/src/main.rs b/client_cli/src/main.rs index bf818f1ead0..05b9121302c 100644 --- a/client_cli/src/main.rs +++ b/client_cli/src/main.rs @@ -69,7 +69,7 @@ impl FromStr for ValueArg { .or_else(|_| s.parse::().map(Value::Ipv6Addr)) .or_else(|_| s.parse::().map(Value::Numeric)) .or_else(|_| s.parse::().map(Value::PublicKey)) - .or_else(|_| serde_json::from_str::(s).map_err(std::convert::Into::into)) + .or_else(|_| serde_json::from_str::(s).map_err(Into::into)) .map(ValueArg) } } diff --git a/config/src/path.rs b/config/src/path.rs index 488af5ded46..23f1bd80b57 100644 --- a/config/src/path.rs +++ b/config/src/path.rs @@ -69,9 +69,10 @@ impl Path { /// If the path has an extension. pub fn default(path: impl AsRef) -> Self { let path = path.as_ref().to_path_buf(); - if path.extension().is_some() { - panic!("Default config path is not supposed to have an extension. It is a bug.") - } + assert!( + path.extension().is_none(), + "Default config path is not supposed to have an extension. It is a bug." + ); Self(Default(path)) } diff --git a/core/src/block_sync.rs b/core/src/block_sync.rs index bf74dfcbbfd..d2d150d102e 100644 --- a/core/src/block_sync.rs +++ b/core/src/block_sync.rs @@ -218,7 +218,7 @@ pub mod message { .take(1 + block_sync.block_batch_size as usize) .map_while(|height| block_sync.kura.get_block_by_height(height)) .skip_while(|block| Some(block.hash()) == *latest_hash) - .map(|block| SignedBlock::clone(&block)) + .map(|block| (*block).clone()) .collect::>(); if blocks.is_empty() { diff --git a/core/src/kiso.rs b/core/src/kiso.rs index cb6d98bf05b..a7f62be4449 100644 --- a/core/src/kiso.rs +++ b/core/src/kiso.rs @@ -147,7 +147,6 @@ impl Actor { } #[cfg(test)] -#[allow(unused)] mod tests { use std::time::Duration; diff --git a/core/src/kura.rs b/core/src/kura.rs index 11dbf2c5192..2f7bed3cc50 100644 --- a/core/src/kura.rs +++ b/core/src/kura.rs @@ -31,7 +31,6 @@ const SIZE_OF_BLOCK_HASH: u64 = Hash::LENGTH as u64; #[derive(Debug)] pub struct Kura { /// The mode of initialisation of [`Kura`]. - #[allow(dead_code)] mode: Mode, /// The block storage block_store: Mutex, diff --git a/core/src/queue.rs b/core/src/queue.rs index 2872ebc9365..b06a3a6c82f 100644 --- a/core/src/queue.rs +++ b/core/src/queue.rs @@ -185,7 +185,6 @@ impl Queue { /// /// # Errors /// See [`enum@Error`] - #[allow(clippy::missing_panics_doc)] // NOTE: It's a system invariant, should never happen pub fn push(&self, tx: AcceptedTransaction, wsv: &WorldStateView) -> Result<(), Failure> { trace!(?tx, "Pushing to the queue"); if let Err(err) = self.check_tx(&tx, wsv) { @@ -250,9 +249,8 @@ impl Queue { expired_transactions: &mut Vec, ) -> Option { loop { - let Some(hash) = self.tx_hashes.pop() else { - return None; - }; + let hash = self.tx_hashes.pop()?; + let entry = match self.accepted_txs.entry(hash) { Entry::Occupied(entry) => entry, // FIXME: Reachable under high load. Investigate, see if it's a problem. diff --git a/core/src/smartcontracts/isi/account.rs b/core/src/smartcontracts/isi/account.rs index eff4dbaebbc..ec6ce9d0a6a 100644 --- a/core/src/smartcontracts/isi/account.rs +++ b/core/src/smartcontracts/isi/account.rs @@ -581,7 +581,7 @@ pub mod query { let id = &self.id; let key = &self.key; iroha_logger::trace!(%id, %key); - wsv.map_account(id, |account| account.metadata.get(key).map(Clone::clone))? + wsv.map_account(id, |account| account.metadata.get(key).cloned())? .ok_or_else(|| FindError::MetadataKey(key.clone()).into()) .map(Into::into) } diff --git a/core/src/smartcontracts/isi/block.rs b/core/src/smartcontracts/isi/block.rs index 08f4af0fb6e..8fa0a808bb3 100644 --- a/core/src/smartcontracts/isi/block.rs +++ b/core/src/smartcontracts/isi/block.rs @@ -18,9 +18,7 @@ impl ValidQuery for FindAllBlocks { wsv: &'wsv WorldStateView, ) -> Result + 'wsv>, QueryExecutionFail> { Ok(Box::new( - wsv.all_blocks() - .rev() - .map(|block| SignedBlock::clone(&block)), + wsv.all_blocks().rev().map(|block| (*block).clone()), )) } } diff --git a/core/src/smartcontracts/isi/domain.rs b/core/src/smartcontracts/isi/domain.rs index 2912b511b09..21de19e9e8d 100644 --- a/core/src/smartcontracts/isi/domain.rs +++ b/core/src/smartcontracts/isi/domain.rs @@ -334,7 +334,7 @@ pub mod query { let id = &self.id; let key = &self.key; iroha_logger::trace!(%id, %key); - wsv.map_domain(id, |domain| domain.metadata.get(key).map(Clone::clone))? + wsv.map_domain(id, |domain| domain.metadata.get(key).cloned())? .ok_or_else(|| FindError::MetadataKey(key.clone()).into()) .map(Into::into) } diff --git a/core/src/tx.rs b/core/src/tx.rs index 01ee688edcf..89bacca20e0 100644 --- a/core/src/tx.rs +++ b/core/src/tx.rs @@ -58,9 +58,7 @@ impl AcceptedTransaction { match &transaction.payload().instructions { Executable::Instructions(instructions) => { let instruction_count = instructions.len(); - if u64::try_from(instruction_count).expect("`usize` should always fit into `u64`") - > limits.max_instruction_number - { + if Self::len_u64(instruction_count) > limits.max_instruction_number { return Err(AcceptTransactionFail::TransactionLimit( TransactionLimitError { reason: format!( @@ -76,13 +74,9 @@ impl AcceptedTransaction { // // Should we allow infinite instructions in wasm? And deny only based on fuel and size Executable::Wasm(smart_contract) => { + let size_bytes = Self::len_u64(smart_contract.size_bytes()); let max_wasm_size_bytes = limits.max_wasm_size_bytes; - let size_bytes: u64 = smart_contract - .size_bytes() - .try_into() - .expect("`u64` should always fit in `u64`"); - if size_bytes > max_wasm_size_bytes { return Err(AcceptTransactionFail::TransactionLimit( TransactionLimitError { @@ -113,6 +107,10 @@ impl AcceptedTransaction { pub(crate) fn merge_signatures(&mut self, other: Self) -> bool { self.0.merge_signatures(other.0) } + + fn len_u64(instruction_count: usize) -> u64 { + u64::try_from(instruction_count).expect("`usize` should always fit into `u64`") + } } impl From for SignedTransaction { diff --git a/core/src/wsv.rs b/core/src/wsv.rs index 2bbf58ff7ca..eab695bbd88 100644 --- a/core/src/wsv.rs +++ b/core/src/wsv.rs @@ -770,7 +770,7 @@ impl WorldStateView { .assets .get(id) .ok_or_else(|| QueryExecutionFail::from(FindError::Asset(id.clone()))) - .map(Clone::clone) + .cloned() }, )? } @@ -779,7 +779,6 @@ impl WorldStateView { /// /// # Errors /// - There is no account with such name. - #[allow(clippy::missing_panics_doc)] pub fn asset_or_insert( &mut self, asset_id: AssetId, @@ -1102,7 +1101,7 @@ impl WorldStateView { .asset_definitions .get(asset_id) .ok_or_else(|| FindError::AssetDefinition(asset_id.clone())) - .map(Clone::clone) + .cloned() } /// Get total amount of [`Asset`]. diff --git a/crypto/src/hash.rs b/crypto/src/hash.rs index bde357451c3..86505f7d202 100644 --- a/crypto/src/hash.rs +++ b/crypto/src/hash.rs @@ -69,8 +69,6 @@ impl Hash { impl Hash { /// Hash the given bytes. #[must_use] - // NOTE: Panic is predicated by implementation not user input - #[allow(clippy::missing_panics_doc)] pub fn new(bytes: impl AsRef<[u8]>) -> Self { let vec_hash = Blake2bVar::new(Self::LENGTH) .expect("Failed to initialize variable size hash") diff --git a/crypto/src/lib.rs b/crypto/src/lib.rs index 5ae39a89ea2..5652c2965a8 100755 --- a/crypto/src/lib.rs +++ b/crypto/src/lib.rs @@ -113,10 +113,6 @@ impl FromStr for Algorithm { /// Options for key generation #[cfg(not(feature = "ffi_import"))] -#[cfg_attr( - any(not(feature = "std"), feature = "ffi_import"), - allow(unused_tuple_struct_fields) -)] #[derive(Debug, Clone)] pub enum KeyGenOption { /// Use seed @@ -474,7 +470,6 @@ impl<'de> Deserialize<'de> for PrivateKey { } /// A session key derived from a key exchange. Will usually be used for a symmetric encryption afterwards -#[allow(unused_tuple_struct_fields)] pub struct SessionKey(ConstVec); impl SessionKey { diff --git a/crypto/src/signature/bls/implementation.rs b/crypto/src/signature/bls/implementation.rs index d7084aa82c7..1cc3ae17efd 100644 --- a/crypto/src/signature/bls/implementation.rs +++ b/crypto/src/signature/bls/implementation.rs @@ -10,10 +10,6 @@ use sha2::Sha256; pub(super) const MESSAGE_CONTEXT: &[u8; 20] = b"for signing messages"; -// it is not unused? Why am I getting the unused lint here? -#[allow(dead_code)] -const PUBLICKEY_CONTEXT: &[u8; 47] = b"for signing public keys for proof of possession"; - use super::PRIVATE_KEY_SIZE; use crate::{ Algorithm, ConstVec, Error, KeyGenOption, PrivateKey as IrohaPrivateKey, @@ -61,6 +57,7 @@ pub trait BlsConfiguration { } fn hash_key(pk: &PublicKey, context: Option<&'static [u8]>) -> Self::SignatureGroup { + const PUBLICKEY_CONTEXT: &[u8; 47] = b"for signing public keys for proof of possession"; let ctx: &[u8] = context.unwrap_or(PUBLICKEY_CONTEXT); Self::hash_to_point(pk.to_bytes(), ctx) } diff --git a/data_model/src/isi.rs b/data_model/src/isi.rs index 0dd3b178d9e..4bbfa0de0d7 100644 --- a/data_model/src/isi.rs +++ b/data_model/src/isi.rs @@ -1,9 +1,7 @@ //! This library contains basic Iroha Special Instructions. -#![allow(clippy::len_without_is_empty, clippy::unused_self)] - #[cfg(not(feature = "std"))] -use alloc::{boxed::Box, format, string::String, vec::Vec}; +use alloc::{format, string::String, vec::Vec}; use core::fmt::{Debug, Display}; use derive_more::{Constructor, DebugCustom, Display}; diff --git a/data_model/src/lib.rs b/data_model/src/lib.rs index 0852f58d05f..b6b232b9184 100644 --- a/data_model/src/lib.rs +++ b/data_model/src/lib.rs @@ -3,8 +3,6 @@ // Clippy bug #![allow(clippy::items_after_test_module)] -// in no_std some code gets cfg-ed out, so we silence the warnings -#![cfg_attr(not(feature = "std"), allow(unused, unused_tuple_struct_fields))] #![cfg_attr(not(feature = "std"), no_std)] #[cfg(not(feature = "std"))] diff --git a/data_model/src/metadata.rs b/data_model/src/metadata.rs index 563c8a143ff..f4ab99dc2b9 100644 --- a/data_model/src/metadata.rs +++ b/data_model/src/metadata.rs @@ -68,9 +68,7 @@ pub mod model { #[ffi_type(opaque)] #[serde(transparent)] #[repr(transparent)] - pub struct Metadata { - pub(super) map: btree_map::BTreeMap, - } + pub struct Metadata(pub(super) btree_map::BTreeMap); } /// Metadata related errors. @@ -144,15 +142,13 @@ impl Metadata { /// Constructor. #[inline] pub fn new() -> Self { - Self { - map: UnlimitedMetadata::new(), - } + Self(UnlimitedMetadata::new()) } /// Get the (expensive) cumulative length of all [`Value`]s housed /// in this map. pub fn nested_len(&self) -> usize { - self.map.values().map(|v| 1 + v.len()).sum() + self.0.values().map(|v| 1 + v.len()).sum() } /// Get metadata given path. If the path is malformed, or @@ -161,10 +157,10 @@ impl Metadata { /// corresponding to that path. pub fn nested_get(&self, path: &Path) -> Option<&Value> { let key = path.last()?; - let mut map = &self.map; + let mut map = &self.0; for k in path.iter().take(path.len() - 1) { map = match map.get(k)? { - Value::LimitedMetadata(data) => &data.map, + Value::LimitedMetadata(data) => &data.0, _ => return None, }; } @@ -173,12 +169,12 @@ impl Metadata { /// Check if the internal map contains the given key. pub fn contains(&self, key: &Name) -> bool { - self.map.contains_key(key) + self.0.contains_key(key) } /// Iterate over key/value pairs stored in the internal map. pub fn iter(&self) -> impl ExactSizeIterator { - self.map.iter() + self.0.iter() } /// Get the `Some(&Value)` associated to `key`. Return `None` if not found. @@ -187,7 +183,14 @@ impl Metadata { where Name: Borrow, { - self.map.get(key) + self.0.get(key) + } + + fn len_u64(&self) -> u64 { + self.0 + .len() + .try_into() + .expect("`usize` should always fit into `u64`") } /// Insert the given [`Value`] into the given path. If the path is @@ -204,21 +207,17 @@ impl Metadata { value: Value, limits: Limits, ) -> Result, MetadataError> { - if self.map.len() >= limits.max_len as usize { + if self.0.len() >= limits.max_len as usize { return Err(MetadataError::OverallSize(SizeError { limits, - actual: self - .map - .len() - .try_into() - .expect("`usize` should always fit into `u64`"), + actual: self.len_u64(), })); } let key = path.last().ok_or(MetadataError::EmptyPath)?; let mut layer = self; for k in path.iter().take(path.len() - 1) { layer = match layer - .map + .0 .get_mut(k) .ok_or_else(|| MetadataError::MissingSegment(k.clone()))? { @@ -240,18 +239,14 @@ impl Metadata { value: Value, limits: Limits, ) -> Result, MetadataError> { - if self.map.len() >= limits.max_len as usize && !self.map.contains_key(&key) { + if self.0.len() >= limits.max_len as usize && !self.0.contains_key(&key) { return Err(MetadataError::OverallSize(SizeError { limits, - actual: self - .map - .len() - .try_into() - .expect("`usize` should always fit into `u64`"), + actual: self.len_u64(), })); } check_size_limits(&key, value.clone(), limits)?; - Ok(self.map.insert(key, value)) + Ok(self.0.insert(key, value)) } } @@ -265,7 +260,7 @@ impl Metadata { where Name: Borrow, { - self.map.remove(key) + self.0.remove(key) } /// Remove leaf node in metadata, given path. If the path is @@ -274,10 +269,10 @@ impl Metadata { /// owned value corresponding to that path. pub fn nested_remove(&mut self, path: &Path) -> Option { let key = path.last()?; - let mut map = &mut self.map; + let mut map = &mut self.0; for k in path.iter().take(path.len() - 1) { map = match map.get_mut(k)? { - Value::LimitedMetadata(data) => &mut data.map, + Value::LimitedMetadata(data) => &mut data.0, _ => return None, }; } diff --git a/data_model/src/visit.rs b/data_model/src/visit.rs index 9f82b19baae..f20931a59ea 100644 --- a/data_model/src/visit.rs +++ b/data_model/src/visit.rs @@ -1,9 +1,6 @@ //! Visitor that visits every node in Iroha syntax tree #![allow(missing_docs, clippy::missing_errors_doc)] -#[cfg(not(feature = "std"))] -use alloc::format; - use iroha_crypto::PublicKey; use crate::{isi::Log, prelude::*}; diff --git a/docs/source/references/schema.json b/docs/source/references/schema.json index 3ac32350d43..d6af0cf7e7e 100644 --- a/docs/source/references/schema.json +++ b/docs/source/references/schema.json @@ -2418,14 +2418,7 @@ "MerkleTree": { "Vec": "HashOf" }, - "Metadata": { - "Struct": [ - { - "name": "map", - "type": "SortedMap" - } - ] - }, + "Metadata": "SortedMap", "MetadataChanged": { "Struct": [ { diff --git a/ffi/derive/src/attr_parse/derive.rs b/ffi/derive/src/attr_parse/derive.rs index d1d36bb5832..7ee73ab107d 100644 --- a/ffi/derive/src/attr_parse/derive.rs +++ b/ffi/derive/src/attr_parse/derive.rs @@ -21,9 +21,7 @@ pub enum RustcDerive { impl RustcDerive { fn try_from_path(path: &syn2::Path) -> Option { - let Some(ident) = path.get_ident() else { - return None; - }; + let ident = path.get_ident()?; match ident.to_string().as_str() { "Eq" => Some(Self::Eq), diff --git a/ffi/derive/src/impl_visitor.rs b/ffi/derive/src/impl_visitor.rs index 4d7f8dde79d..790728522e0 100644 --- a/ffi/derive/src/impl_visitor.rs +++ b/ffi/derive/src/impl_visitor.rs @@ -189,7 +189,7 @@ impl<'ast> FnDescriptor<'ast> { Some(Self { attrs: visitor.attrs, doc: visitor.doc, - self_ty: visitor.self_ty.map(Clone::clone), + self_ty: visitor.self_ty.cloned(), sig: visitor.sig.expect("Missing signature").clone(), @@ -270,11 +270,8 @@ impl<'ast, 'emitter> FnVisitor<'ast, 'emitter> { Span::call_site(), ) }); - self.input_args.push(Arg::new( - self.self_ty.map(Clone::clone), - arg_name, - src_type.clone(), - )); + self.input_args + .push(Arg::new(self.self_ty.cloned(), arg_name, src_type.clone())); } fn add_output_arg(&mut self, src_type: &'ast Type) { @@ -282,7 +279,7 @@ impl<'ast, 'emitter> FnVisitor<'ast, 'emitter> { assert!(self.output_arg.is_none()); let output_arg = Arg::new( - self.self_ty.map(Clone::clone), + self.self_ty.cloned(), Ident::new("__output", Span::call_site()), src_type.clone(), ); @@ -445,11 +442,7 @@ impl<'ast> Visit<'ast> for FnVisitor<'ast, '_> { ); let handle_name = Ident::new("__handle", Span::call_site()); - self.receiver = Some(Arg::new( - self.self_ty.map(Clone::clone), - handle_name, - src_type, - )); + self.receiver = Some(Arg::new(self.self_ty.cloned(), handle_name, src_type)); } fn visit_pat_type(&mut self, node: &'ast syn2::PatType) { diff --git a/ffi/derive/tests/ui_fail/fallible_transmute_mut_ref.stderr b/ffi/derive/tests/ui_fail/fallible_transmute_mut_ref.stderr index 7a3e5b2340c..35cd9191330 100644 --- a/ffi/derive/tests/ui_fail/fallible_transmute_mut_ref.stderr +++ b/ffi/derive/tests/ui_fail/fallible_transmute_mut_ref.stderr @@ -5,14 +5,14 @@ error[E0277]: the trait bound `Wrapper: InfallibleTransmute` is not satisfied | ^^^^^^^^^^^^^ the trait `InfallibleTransmute` is not implemented for `Wrapper` | = help: the following other types implement trait `InfallibleTransmute`: - ManuallyDrop - [R; N] + i8 i16 i32 i64 - i8 + u8 u16 u32 + u64 and $N others = note: required for `&mut Wrapper` to implement `Ir` = note: required for `&mut Wrapper` to implement `FfiType` diff --git a/ffi/derive/tests/ui_fail/non_robust_repr_c.stderr b/ffi/derive/tests/ui_fail/non_robust_repr_c.stderr index 179f1d1ef59..b0a40e6df39 100644 --- a/ffi/derive/tests/ui_fail/non_robust_repr_c.stderr +++ b/ffi/derive/tests/ui_fail/non_robust_repr_c.stderr @@ -5,14 +5,14 @@ error[E0277]: the trait bound `bool: ReprC` is not satisfied | ^^^^^^^ the trait `ReprC` is not implemented for `bool` | = help: the following other types implement trait `ReprC`: - *const R - *mut R - FfiI128 - FfiTuple10 - FfiTuple11 - FfiTuple12 - FfiTuple1 - FfiTuple2 + i8 + i16 + i32 + i64 + u8 + u16 + u32 + u64 and $N others = help: see issue #48214 = help: add `#![feature(trivial_bounds)]` to the crate attributes to enable diff --git a/macro/derive/tests/enum_from_variant_attrs.rs b/macro/derive/tests/enum_from_variant_attrs.rs index dc27d66aeab..0c508d71808 100644 --- a/macro/derive/tests/enum_from_variant_attrs.rs +++ b/macro/derive/tests/enum_from_variant_attrs.rs @@ -7,7 +7,7 @@ struct Variant4; struct Variant5; struct Variant6; -#[allow(unused, unused_tuple_struct_fields)] +#[allow(unused)] #[derive(iroha_derive::FromVariant)] enum Enum { Variant1(Box), diff --git a/schema/derive/src/lib.rs b/schema/derive/src/lib.rs index 3f801b67459..91a970c8ef4 100644 --- a/schema/derive/src/lib.rs +++ b/schema/derive/src/lib.rs @@ -116,7 +116,7 @@ struct IntoSchemaVariant { impl FromVariant for IntoSchemaVariant { fn from_variant(variant: &syn2::Variant) -> darling::Result { let ident = variant.ident.clone(); - let discriminant = variant.discriminant.as_ref().map(|(_, expr)| expr.clone()); + let discriminant = variant.discriminant.clone().map(|(_, expr)| expr); let fields = IntoSchemaFields::try_from(&variant.fields)?; let codec_attrs = CodecAttributes::from_attributes(&variant.attrs)?; diff --git a/schema/tests/schema_json.rs b/schema/tests/schema_json.rs index 4a98f3844ce..7b5f8727830 100644 --- a/schema/tests/schema_json.rs +++ b/schema/tests/schema_json.rs @@ -1,7 +1,5 @@ //! This test checks how the json-serialized schema looks like. - #![allow(dead_code)] -#![allow(unused_tuple_struct_fields)] use iroha_schema::IntoSchema; use serde_json::json; diff --git a/smart_contract/executor/derive/src/validate.rs b/smart_contract/executor/derive/src/validate.rs index 3b7df471f0f..46d3693b62f 100644 --- a/smart_contract/executor/derive/src/validate.rs +++ b/smart_contract/executor/derive/src/validate.rs @@ -37,7 +37,7 @@ enum ValidateAttribute { } impl FromAttributes for ValidateAttribute { - // we use `Option::or` to select the first specified condition in case of duplicates + // NOTE: we use `Option::or` to select the first specified condition in case of duplicates // but we still _want_ to validate that each attribute parses successfully // this is to ensure that we provide the user with as much validation as possible, instead of bailing out early // `Option::or_else` would NOT work here, as it would not validate conditions after the first valid one diff --git a/smart_contract/executor/src/default/tokens.rs b/smart_contract/executor/src/default/tokens.rs index df79280ba40..97382cc7806 100644 --- a/smart_contract/executor/src/default/tokens.rs +++ b/smart_contract/executor/src/default/tokens.rs @@ -12,8 +12,9 @@ use crate::permission::{self, Token as _}; /// Used to iterate over tokens to validate `Grant` and `Revoke` instructions. /// /// -/// TODO: Replace with procedural macro. Example: -/// ``` +/// Example: +/// +/// ```ignore /// mod tokens { /// use std::borrow::ToOwned; /// diff --git a/telemetry/derive/src/lib.rs b/telemetry/derive/src/lib.rs index eff6d509c62..c0420a4a868 100644 --- a/telemetry/derive/src/lib.rs +++ b/telemetry/derive/src/lib.rs @@ -59,8 +59,7 @@ fn arg_metrics(input: &Punctuated) -> Result); // `HashSet` — idiomatic; slow +struct MetricSpecs(#[allow(dead_code)] Vec); // `HashSet` — idiomatic; slow impl Parse for MetricSpecs { fn parse(input: syn2::parse::ParseStream) -> syn2::Result { diff --git a/tools/swarm/src/compose.rs b/tools/swarm/src/compose.rs index 80dc51f7ac4..a8e538ea328 100644 --- a/tools/swarm/src/compose.rs +++ b/tools/swarm/src/compose.rs @@ -240,7 +240,7 @@ impl From for FullPeerEnv { .genesis_private_key .map_or((None, None), |private_key| { ( - Some(private_key).map(SerializeAsJsonStr), + Some(SerializeAsJsonStr(private_key)), Some(PATH_TO_GENESIS.to_string()), ) }); @@ -515,7 +515,7 @@ mod tests { let json = serde_json::to_string(&peer_env).expect("Must be serializable"); let env: HashMap<_, _> = serde_json::from_str(&json).expect("Must be deserializable into a hash map"); - let untouched = env.keys().map(Clone::clone).collect(); + let untouched = env.keys().cloned().collect(); Self { env, untouched: RefCell::new(untouched), @@ -537,11 +537,7 @@ mod tests { .to_str() .ok_or_else(|| VarError::NotUnicode(key.as_ref().into()))?; - let res = self - .env - .get(key_str) - .ok_or(VarError::NotPresent) - .map(std::clone::Clone::clone); + let res = self.env.get(key_str).ok_or(VarError::NotPresent).cloned(); if res.is_ok() { self.untouched.borrow_mut().remove(key_str); diff --git a/version/derive/tests/scale.rs b/version/derive/tests/scale.rs index 925077b8fcf..7b57b9bde28 100644 --- a/version/derive/tests/scale.rs +++ b/version/derive/tests/scale.rs @@ -10,8 +10,6 @@ mod tests { use serde::{Deserialize, Serialize}; mod model_1 { - #![allow(unused_results)] - use super::*; declare_versioned!(VersionedMessage 1..3, Debug, Clone, iroha_macro::FromVariant); @@ -26,8 +24,6 @@ mod tests { } mod model_2 { - #![allow(unused_results)] - use super::*; declare_versioned!(VersionedMessage 1..4, Debug, Clone, iroha_macro::FromVariant); diff --git a/wasm_codec/derive/src/lib.rs b/wasm_codec/derive/src/lib.rs index 2053fd0d8e3..697c0b906f3 100644 --- a/wasm_codec/derive/src/lib.rs +++ b/wasm_codec/derive/src/lib.rs @@ -333,7 +333,7 @@ enum ReturnType { /// [`Result`] type with [`Ok`] and [`Err`] types respectively Result(Option, ErrType), /// Something other than [`Result`] - #[allow(unused_tuple_struct_fields)] // May be used in future + #[allow(dead_code)] // May be used in future Other(syn::Type), } @@ -342,7 +342,7 @@ enum ErrType { /// `wasmtime::Error` error type WasmtimeError, /// Something other than `wasmtime::Error` - #[allow(unused_tuple_struct_fields)] // May be used in future + #[allow(dead_code)] // May be used in future Other(syn::Type), } diff --git a/wasm_codec/src/lib.rs b/wasm_codec/src/lib.rs index ecca6e08d47..3e80ad3e64c 100644 --- a/wasm_codec/src/lib.rs +++ b/wasm_codec/src/lib.rs @@ -50,8 +50,6 @@ pub fn decode_from_memory( /// /// - Failed to decode object /// - Failed to call `dealloc_fn` -// NOTE: Panic is predicated by implementation not user input -#[allow(clippy::missing_panics_doc)] pub fn decode_with_length_prefix_from_memory< C: wasmtime::AsContextMut, T: DecodeAll + std::fmt::Debug, @@ -90,8 +88,6 @@ pub fn decode_with_length_prefix_from_memory< /// /// - If failed to call `alloc_fn` /// - If failed to write into the `memory` -// NOTE: Panic is predicated by implementation not user input -#[allow(clippy::missing_panics_doc)] pub fn encode_into_memory( obj: &T, memory: &wasmtime::Memory, @@ -123,8 +119,6 @@ pub fn encode_into_memory( /// `WebAssembly` it's not possible to return two values from a wasm function without some /// shenanignas. In those cases, only one value is sent which is pointer to the allocation /// with the first element being the length of the encoded object following it. -// NOTE: Panic is predicated by implementation not user input -#[allow(clippy::missing_panics_doc)] pub fn encode_with_length_prefix(obj: &T) -> Vec { // Compile-time size check #[allow(clippy::let_unit_value)]