generated from PaulRBerg/hardhat-template
-
Notifications
You must be signed in to change notification settings - Fork 52
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #69 from gnosis/feat/hashi-prover
feat: hashi prover
- Loading branch information
Showing
20 changed files
with
898 additions
and
271 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,29 @@ | ||
// SPDX-License-Identifier: LGPL-3.0-only | ||
pragma solidity ^0.8.0; | ||
|
||
/** | ||
* @title IHashiProver | ||
*/ | ||
interface IHashiProver { | ||
struct AccountAndStorageProof { | ||
uint256 chainId; | ||
uint256 blockNumber; | ||
bytes blockHeader; | ||
uint256 ancestralBlockNumber; | ||
bytes[] ancestralBlockHeaders; | ||
address account; | ||
bytes accountProof; | ||
bytes32 storageHash; | ||
bytes32[] storageKeys; | ||
bytes[] storageProof; | ||
} | ||
|
||
error AncestralBlockHeadersLengthReached(); | ||
error BlockHeaderNotFound(); | ||
error ConflictingBlockHeader(uint256 blockNumber, bytes32 ancestralBlockHeaderHash, bytes32 blockHeaderHash); | ||
error InvalidAccount(); | ||
error InvalidBlockHeader(); | ||
error InvalidBlockHeaderLength(); | ||
error InvalidStorageHash(); | ||
error InvalidStorageProofParams(); | ||
} |
248 changes: 248 additions & 0 deletions
248
packages/evm/contracts/libraries/MerklePatriciaProofVerifier.sol
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,248 @@ | ||
pragma solidity ^0.8.20; | ||
/* solhint-disable */ | ||
|
||
import { RLPReader } from "solidity-rlp/contracts/RLPReader.sol"; | ||
|
||
// Copied from here: https://github.com/defi-wonderland/safe-liveness/blob/dev/solidity/libraries/MerklePatriciaProofVerifier.sol | ||
library MerklePatriciaProofVerifier { | ||
using RLPReader for RLPReader.RLPItem; | ||
using RLPReader for bytes; | ||
|
||
/// @dev Validates a Merkle-Patricia-Trie proof. | ||
/// If the proof proves the inclusion of some key-value pair in the | ||
/// trie, the value is returned. Otherwise, i.e. if the proof proves | ||
/// the exclusion of a key from the trie, an empty byte array is | ||
/// returned. | ||
/// @param rootHash is the Keccak-256 hash of the root node of the MPT. | ||
/// @param path is the key of the node whose inclusion/exclusion we are | ||
/// proving. | ||
/// @param stack is the stack of MPT nodes (starting with the root) that | ||
/// need to be traversed during verification. | ||
/// @return value whose inclusion is proved or an empty byte array for | ||
/// a proof of exclusion | ||
function extractProofValue( | ||
bytes32 rootHash, | ||
bytes memory path, | ||
RLPReader.RLPItem[] memory stack | ||
) internal pure returns (bytes memory value) { | ||
bytes memory mptKey = _decodeNibbles(path, 0); | ||
uint256 mptKeyOffset = 0; | ||
|
||
bytes32 nodeHashHash; | ||
RLPReader.RLPItem[] memory node; | ||
|
||
RLPReader.RLPItem memory rlpValue; | ||
|
||
if (stack.length == 0) { | ||
// Root hash of empty Merkle-Patricia-Trie | ||
require(rootHash == 0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421); | ||
return new bytes(0); | ||
} | ||
|
||
// Traverse stack of nodes starting at root. | ||
for (uint256 i = 0; i < stack.length; i++) { | ||
// We use the fact that an rlp encoded list consists of some | ||
// encoding of its length plus the concatenation of its | ||
// *rlp-encoded* items. | ||
|
||
// The root node is hashed with Keccak-256 ... | ||
if (i == 0 && rootHash != stack[i].rlpBytesKeccak256()) { | ||
revert(); | ||
} | ||
// ... whereas all other nodes are hashed with the MPT | ||
// hash function. | ||
if (i != 0 && nodeHashHash != _mptHashHash(stack[i])) { | ||
revert(); | ||
} | ||
// We verified that stack[i] has the correct hash, so we | ||
// may safely decode it. | ||
node = stack[i].toList(); | ||
|
||
if (node.length == 2) { | ||
// Extension or Leaf node | ||
|
||
bool isLeaf; | ||
bytes memory nodeKey; | ||
(isLeaf, nodeKey) = _merklePatriciaCompactDecode(node[0].toBytes()); | ||
|
||
uint256 prefixLength = _sharedPrefixLength(mptKeyOffset, mptKey, nodeKey); | ||
mptKeyOffset += prefixLength; | ||
|
||
if (prefixLength < nodeKey.length) { | ||
// Proof claims divergent extension or leaf. (Only | ||
// relevant for proofs of exclusion.) | ||
// An Extension/Leaf node is divergent iff it 'skips' over | ||
// the point at which a Branch node should have been had the | ||
// excluded key been included in the trie. | ||
// Example: Imagine a proof of exclusion for path [1, 4], | ||
// where the current node is a Leaf node with | ||
// path [1, 3, 3, 7]. For [1, 4] to be included, there | ||
// should have been a Branch node at [1] with a child | ||
// at 3 and a child at 4. | ||
|
||
// Sanity check | ||
if (i < stack.length - 1) { | ||
// divergent node must come last in proof | ||
revert(); | ||
} | ||
|
||
return new bytes(0); | ||
} | ||
|
||
if (isLeaf) { | ||
// Sanity check | ||
if (i < stack.length - 1) { | ||
// leaf node must come last in proof | ||
revert(); | ||
} | ||
|
||
if (mptKeyOffset < mptKey.length) { | ||
return new bytes(0); | ||
} | ||
|
||
rlpValue = node[1]; | ||
return rlpValue.toBytes(); | ||
} else { | ||
// extension | ||
// Sanity check | ||
if (i == stack.length - 1) { | ||
// shouldn't be at last level | ||
revert(); | ||
} | ||
|
||
if (!node[1].isList()) { | ||
// rlp(child) was at least 32 bytes. node[1] contains | ||
// Keccak256(rlp(child)). | ||
nodeHashHash = node[1].payloadKeccak256(); | ||
} else { | ||
// rlp(child) was less than 32 bytes. node[1] contains | ||
// rlp(child). | ||
nodeHashHash = node[1].rlpBytesKeccak256(); | ||
} | ||
} | ||
} else if (node.length == 17) { | ||
// Branch node | ||
|
||
if (mptKeyOffset != mptKey.length) { | ||
// we haven't consumed the entire path, so we need to look at a child | ||
uint8 nibble = uint8(mptKey[mptKeyOffset]); | ||
mptKeyOffset += 1; | ||
if (nibble >= 16) { | ||
// each element of the path has to be a nibble | ||
revert(); | ||
} | ||
|
||
if (_isEmptyBytesequence(node[nibble])) { | ||
// Sanity | ||
if (i != stack.length - 1) { | ||
// leaf node should be at last level | ||
revert(); | ||
} | ||
|
||
return new bytes(0); | ||
} else if (!node[nibble].isList()) { | ||
nodeHashHash = node[nibble].payloadKeccak256(); | ||
} else { | ||
nodeHashHash = node[nibble].rlpBytesKeccak256(); | ||
} | ||
} else { | ||
// we have consumed the entire mptKey, so we need to look at what's contained in this node. | ||
|
||
// Sanity | ||
if (i != stack.length - 1) { | ||
// should be at last level | ||
revert(); | ||
} | ||
|
||
return node[16].toBytes(); | ||
} | ||
} | ||
} | ||
} | ||
|
||
/// @dev Computes the hash of the Merkle-Patricia-Trie hash of the RLP item. | ||
/// Merkle-Patricia-Tries use a weird 'hash function' that outputs | ||
/// *variable-length* hashes: If the item is shorter than 32 bytes, | ||
/// the MPT hash is the item. Otherwise, the MPT hash is the | ||
/// Keccak-256 hash of the item. | ||
/// The easiest way to compare variable-length byte sequences is | ||
/// to compare their Keccak-256 hashes. | ||
/// @param item The RLP item to be hashed. | ||
/// @return Keccak-256(MPT-hash(item)) | ||
function _mptHashHash(RLPReader.RLPItem memory item) private pure returns (bytes32) { | ||
if (item.len < 32) { | ||
return item.rlpBytesKeccak256(); | ||
} else { | ||
return keccak256(abi.encodePacked(item.rlpBytesKeccak256())); | ||
} | ||
} | ||
|
||
function _isEmptyBytesequence(RLPReader.RLPItem memory item) private pure returns (bool) { | ||
if (item.len != 1) { | ||
return false; | ||
} | ||
uint8 b; | ||
uint256 memPtr = item.memPtr; | ||
assembly { | ||
b := byte(0, mload(memPtr)) | ||
} | ||
return b == 0x80; /* empty byte string */ | ||
} | ||
|
||
function _merklePatriciaCompactDecode( | ||
bytes memory compact | ||
) private pure returns (bool isLeaf, bytes memory nibbles) { | ||
require(compact.length > 0); | ||
uint256 first_nibble = (uint8(compact[0]) >> 4) & 0xF; | ||
uint256 skipNibbles; | ||
if (first_nibble == 0) { | ||
skipNibbles = 2; | ||
isLeaf = false; | ||
} else if (first_nibble == 1) { | ||
skipNibbles = 1; | ||
isLeaf = false; | ||
} else if (first_nibble == 2) { | ||
skipNibbles = 2; | ||
isLeaf = true; | ||
} else if (first_nibble == 3) { | ||
skipNibbles = 1; | ||
isLeaf = true; | ||
} else { | ||
// Not supposed to happen! | ||
revert(); | ||
} | ||
return (isLeaf, _decodeNibbles(compact, skipNibbles)); | ||
} | ||
|
||
function _decodeNibbles(bytes memory compact, uint256 skipNibbles) private pure returns (bytes memory nibbles) { | ||
require(compact.length > 0); | ||
|
||
uint256 length = compact.length * 2; | ||
require(skipNibbles <= length); | ||
length -= skipNibbles; | ||
|
||
nibbles = new bytes(length); | ||
uint256 nibblesLength = 0; | ||
|
||
for (uint256 i = skipNibbles; i < skipNibbles + length; i += 1) { | ||
if (i % 2 == 0) { | ||
nibbles[nibblesLength] = bytes1((uint8(compact[i / 2]) >> 4) & 0xF); | ||
} else { | ||
nibbles[nibblesLength] = bytes1((uint8(compact[i / 2]) >> 0) & 0xF); | ||
} | ||
nibblesLength += 1; | ||
} | ||
|
||
assert(nibblesLength == nibbles.length); | ||
} | ||
|
||
function _sharedPrefixLength(uint256 xsOffset, bytes memory xs, bytes memory ys) private pure returns (uint256) { | ||
uint256 i; | ||
for (i = 0; i + xsOffset < xs.length && i < ys.length; i++) { | ||
if (xs[i + xsOffset] != ys[i]) { | ||
return i; | ||
} | ||
} | ||
return i; | ||
} | ||
} |
Oops, something went wrong.