Skip to content

Commit

Permalink
Feature/ipfs key tracking (#9)
Browse files Browse the repository at this point in the history
* Playing around with tracking IPFS swarm keys using the substrate node

* Add in events

* rename pallet

* Fix broken test due to missing std build config

* Test apparatus

* Add tests for update, rotate and scheduled rotate

* Parking Readme and benchmarking impl

* Add weight calculations

* Update weights documentation

* Fixes from merge

* Format fixes
  • Loading branch information
mattdean-digicatapult authored Nov 29, 2021
1 parent db2f209 commit 53d837d
Show file tree
Hide file tree
Showing 20 changed files with 681 additions and 60 deletions.
36 changes: 35 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

40 changes: 36 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -69,18 +69,18 @@ For `dev` chain, the network only contains a node for `Alice` so other nodes wil

### Calculating weights

To calculate the weights for the `pallet_simple_nft` you first must ensure the node is built with the benchmarking feature enabled:
To calculate the weights for a pallet you first must ensure the node is built with the benchmarking feature enabled:

```bash
cargo build --release --features runtime-benchmarks
```

Then you can run the benchmark tool with
Then you can run the benchmark tool with for example

```bash
./target/release/vitalam-node benchmark \
--pallet 'pallet_simple_nft' \
--extrinsic 'run_process' \
--extrinsic '*' \
--output ./weights/
```

Expand All @@ -101,7 +101,8 @@ In order to use the API within `polkadot.js` you'll need to configure the follow
{
"Address": "MultiAddress",
"LookupSource": "MultiAddress",
"PeerId": "(Vec<u8>", # for node-authorization pallet
"PeerId": "Vec<u8>",
"Key": "Vec<u8>",
"TokenId": "u128",
"TokenMetadataKey": "[u8; 32]",
"TokenMetadataValue": "MetadataValue",
Expand Down Expand Up @@ -152,6 +153,37 @@ Pallet tests can be run with:
cargo test -p pallet-simple-nft
```

### IPFSKey pallet

The `IPFSKey` pallet facilitates the generation and scheduled rotation of a fixed length symmetric encryption key that is distributed to all chain participants. In this instance the key is to be used as an IPFS swarm key.

Two storage values are exposed by this pallet:

```rust
#[pallet::storage]
#[pallet::getter(fn key)]
pub(super) type Key<T: Config> = StorageValue<_, Vec<u8>, ValueQuery>;

#[pallet::storage]
#[pallet::getter(fn key_schedule)]
pub(super) type KeyScheduleId<T: Config> = StorageValue<_, Option<Vec<u8>>, ValueQuery>;
```

The first exposes the maintained swarm key, whilst the latter the handle used with the `pallet-scheduling` frame pallet for setting a rotation schedule. This schedule is configured for a 7 day rotation.

Two extrinsics are exposed by this pallet, one for updating a shared symmetric key and one for forcing a rotation of the key based on a configured randomness source. In the `runtime` in this repository these can only be called by `sudo`:

```rust
pub(super) fn update_key(origin: OriginFor<T>, new_key: Vec<u8>) -> DispatchResultWithPostInfo { ... }
pub(super) fn rotate_key(origin: OriginFor<T>) -> DispatchResultWithPostInfo { ... }
```

Pallet tests can be run with:

```bash
cargo test -p pallet-symmetric-key
```

## Repo Structure

A Substrate project consists of a number of components that are spread across a few
Expand Down
2 changes: 1 addition & 1 deletion node/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ edition = '2018'
license = 'Apache-2.0'
repository = 'https://github.com/digicatapult/vitalam-node/'
name = 'vitalam-node'
version = '2.2.0'
version = '2.3.0'

[[bin]]
name = 'vitalam-node'
Expand Down
8 changes: 4 additions & 4 deletions node/src/chain_spec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@ use sp_core::{sr25519, Pair, Public};
use sp_finality_grandpa::AuthorityId as GrandpaId;
use sp_runtime::traits::{IdentifyAccount, Verify};
use vitalam_node_runtime::{
AccountId, AuraConfig, BalancesConfig, GenesisConfig, GrandpaConfig, NodeAuthorizationConfig, MembershipConfig, Signature,
SudoConfig, SystemConfig, WASM_BINARY,
AccountId, AuraConfig, BalancesConfig, GenesisConfig, GrandpaConfig, MembershipConfig, NodeAuthorizationConfig,
Signature, SudoConfig, SystemConfig, WASM_BINARY,
};
const DEFAULT_PROTOCOL_ID: &str = "vam";

Expand Down Expand Up @@ -415,8 +415,8 @@ fn testnet_genesis(
}),
pallet_node_authorization: node_authorization_config,
pallet_membership: Some(MembershipConfig {
members: endowed_accounts.iter().map(|k| k.clone()).collect(),
.. Default::default()
members: endowed_accounts.iter().map(|k| k.clone()).collect(),
..Default::default()
}),
}
}
8 changes: 4 additions & 4 deletions pallets/simple-nft/src/benchmarking.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,13 +36,13 @@ fn mk_inputs<T: Config>(i: u32) -> Result<Vec<T::TokenId>, &'static str> {
Ok(inputs)
}

fn mk_outputs<T: Config>(o: u32) -> Result<Vec<(T::AccountId, BTreeMap<T::TokenMetadataKey, T::TokenMetadataValue>)>, &'static str> {
fn mk_outputs<T: Config>(
o: u32,
) -> Result<Vec<(T::AccountId, BTreeMap<T::TokenMetadataKey, T::TokenMetadataValue>)>, &'static str> {
let owner: T::AccountId = account("owner", 0, SEED);
let mut metadata = BTreeMap::new();
metadata.insert(T::TokenMetadataKey::default(), T::TokenMetadataValue::default());
let outputs = (0..o)
.map(|_| (owner.clone(), metadata.clone()))
.collect::<Vec<_>>();
let outputs = (0..o).map(|_| (owner.clone(), metadata.clone())).collect::<Vec<_>>();

Ok(outputs)
}
Expand Down
5 changes: 4 additions & 1 deletion pallets/simple-nft/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,10 @@ pub mod pallet {

// check metadata count
for output in outputs.iter() {
ensure!(output.1.len() <= T::MaxMetadataCount::get() as usize, Error::<T>::TooManyMetadataItems);
ensure!(
output.1.len() <= T::MaxMetadataCount::get() as usize,
Error::<T>::TooManyMetadataItems
);
}

// check origin owns inputs and that inputs have not been burnt
Expand Down
4 changes: 2 additions & 2 deletions pallets/simple-nft/src/mock.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
// Creating mock runtime here

use codec::{Decode, Encode};
use crate as pallet_simple_nft;
use codec::{Decode, Encode};
use frame_support::parameter_types;
use frame_system as system;
use sp_core::H256;
Expand Down Expand Up @@ -69,7 +69,7 @@ parameter_types! {
pub enum MetadataValue {
File(Hash),
Literal([u8; 1]),
None
None,
}

impl Default for MetadataValue {
Expand Down
50 changes: 34 additions & 16 deletions pallets/simple-nft/src/tests.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
// Tests to be written here

use crate::{mock::*, Error, Token};
use frame_support::{assert_err, assert_ok};
use sp_core::H256;
use sp_std::collections::btree_map::BTreeMap;
use sp_std::iter::FromIterator;
use sp_core::H256;
use frame_support::{assert_err, assert_ok};
// use frame_support::traits::OnRuntimeUpgrade;

#[test]
Expand Down Expand Up @@ -97,15 +97,14 @@ fn it_works_for_creating_token_with_no_metadata_value() {
});
}


#[test]
fn it_works_for_creating_token_with_multiple_metadata_items() {
new_test_ext().execute_with(|| {
// create a token with no parents
let metadata = BTreeMap::from_iter(vec![
(0, MetadataValue::File(H256::zero())),
(1, MetadataValue::Literal([0])),
(2, MetadataValue::None)
(0, MetadataValue::File(H256::zero())),
(1, MetadataValue::Literal([0])),
(2, MetadataValue::None),
]);
assert_ok!(SimpleNFTModule::run_process(
Origin::signed(1),
Expand Down Expand Up @@ -192,7 +191,6 @@ fn it_works_for_creating_many_token() {
});
}


#[test]
fn it_works_for_creating_many_token_with_varied_metadata() {
new_test_ext().execute_with(|| {
Expand Down Expand Up @@ -287,10 +285,11 @@ fn it_works_for_destroying_many_tokens() {
let metadata1 = BTreeMap::from_iter(vec![(0, MetadataValue::None)]);
let metadata2 = BTreeMap::from_iter(vec![(0, MetadataValue::None)]);
SimpleNFTModule::run_process(
Origin::signed(1),
Vec::new(),
vec![(1, metadata0.clone()), (1, metadata1.clone()), (1, metadata2.clone())]
).unwrap();
Origin::signed(1),
Vec::new(),
vec![(1, metadata0.clone()), (1, metadata1.clone()), (1, metadata2.clone())],
)
.unwrap();
// create a token with no parents
assert_ok!(SimpleNFTModule::run_process(
Origin::signed(1),
Expand Down Expand Up @@ -352,7 +351,11 @@ fn it_works_for_creating_and_destroy_single_tokens() {
let metadata1 = BTreeMap::from_iter(vec![(0, MetadataValue::None)]);
SimpleNFTModule::run_process(Origin::signed(1), Vec::new(), vec![(1, metadata0.clone())]).unwrap();
// create a token with a parent
assert_ok!(SimpleNFTModule::run_process(Origin::signed(1), vec![1], vec![(2, metadata1.clone())]));
assert_ok!(SimpleNFTModule::run_process(
Origin::signed(1),
vec![1],
vec![(2, metadata1.clone())]
));
// assert 1 more token was created
assert_eq!(SimpleNFTModule::last_token(), 2);
// get the old token
Expand Down Expand Up @@ -394,7 +397,12 @@ fn it_works_for_creating_and_destroy_many_tokens() {
let metadata1 = BTreeMap::from_iter(vec![(0, MetadataValue::None)]);
let metadata2 = BTreeMap::from_iter(vec![(0, MetadataValue::None)]);
let metadata3 = BTreeMap::from_iter(vec![(0, MetadataValue::None)]);
SimpleNFTModule::run_process(Origin::signed(1), Vec::new(), vec![(1, metadata0.clone()), (1, metadata1.clone())]).unwrap();
SimpleNFTModule::run_process(
Origin::signed(1),
Vec::new(),
vec![(1, metadata0.clone()), (1, metadata1.clone())],
)
.unwrap();
// create a token with 2 parents
assert_ok!(SimpleNFTModule::run_process(
Origin::signed(1),
Expand Down Expand Up @@ -534,7 +542,12 @@ fn it_fails_for_destroying_multiple_tokens_with_burnt_token() {
let metadata0 = BTreeMap::from_iter(vec![(0, MetadataValue::None)]);
let metadata1 = BTreeMap::from_iter(vec![(0, MetadataValue::None)]);
let metadata2 = BTreeMap::from_iter(vec![(0, MetadataValue::None)]);
SimpleNFTModule::run_process(Origin::signed(1), Vec::new(), vec![(1, metadata0.clone()), (1, metadata1.clone())]).unwrap();
SimpleNFTModule::run_process(
Origin::signed(1),
Vec::new(),
vec![(1, metadata0.clone()), (1, metadata1.clone())],
)
.unwrap();
SimpleNFTModule::run_process(Origin::signed(1), vec![1], Vec::new()).unwrap();
// get old token
let token_1 = SimpleNFTModule::tokens_by_id(1);
Expand All @@ -558,7 +571,12 @@ fn it_fails_for_destroying_multiple_tokens_with_burnt_token() {
fn it_fails_for_creating_single_token_with_too_many_metadata_items() {
new_test_ext().execute_with(|| {
let metadata0 = BTreeMap::from_iter(vec![(0, MetadataValue::None)]);
let metadata_too_many = BTreeMap::from_iter(vec![(0, MetadataValue::None), (1, MetadataValue::None), (2, MetadataValue::None), (3, MetadataValue::None)]);
let metadata_too_many = BTreeMap::from_iter(vec![
(0, MetadataValue::None),
(1, MetadataValue::None),
(2, MetadataValue::None),
(3, MetadataValue::None),
]);
SimpleNFTModule::run_process(Origin::signed(1), Vec::new(), vec![(1, metadata0.clone())]).unwrap();
// get old token
let token = SimpleNFTModule::tokens_by_id(1);
Expand All @@ -572,4 +590,4 @@ fn it_fails_for_creating_single_token_with_too_many_metadata_items() {
// assert old token hasn't changed
assert_eq!(token, SimpleNFTModule::tokens_by_id(1));
});
}
}
20 changes: 10 additions & 10 deletions pallets/simple-nft/src/weights.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,16 +27,16 @@ pub trait WeightInfo {
/// Weight functions for pallet_simple_nft.
pub struct SubstrateWeight<T>(PhantomData<T>);
impl<T: frame_system::Config> WeightInfo for SubstrateWeight<T> {
fn run_process(i: usize, o: usize) -> Weight {
(104_267_000 as Weight)
// Standard Error: 6_647_000
.saturating_add((13_768_000 as Weight).saturating_mul(i as Weight))
.saturating_add(T::DbWeight::get().reads(1 as Weight))
.saturating_add(T::DbWeight::get().reads((1 as Weight).saturating_mul(i as Weight)))
.saturating_add(T::DbWeight::get().writes(1 as Weight))
.saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(i as Weight)))
.saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(o as Weight)))
}
fn run_process(i: usize, o: usize) -> Weight {
(104_267_000 as Weight)
// Standard Error: 6_647_000
.saturating_add((13_768_000 as Weight).saturating_mul(i as Weight))
.saturating_add(T::DbWeight::get().reads(1 as Weight))
.saturating_add(T::DbWeight::get().reads((1 as Weight).saturating_mul(i as Weight)))
.saturating_add(T::DbWeight::get().writes(1 as Weight))
.saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(i as Weight)))
.saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(o as Weight)))
}
}

impl WeightInfo for () {
Expand Down
Loading

0 comments on commit 53d837d

Please sign in to comment.