Skip to content

Commit

Permalink
Merge pull request #38 from HerodotusDev/develop
Browse files Browse the repository at this point in the history
Error handling
  • Loading branch information
beeinger authored Jan 3, 2024
2 parents 177aa68 + e6949f3 commit a8090bc
Show file tree
Hide file tree
Showing 31 changed files with 762 additions and 528 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ jobs:
- uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{ matrix.toolchain }}
- run: cargo test --all-features && cargo clippy --all-features
- run: cargo test --all-features
4 changes: 2 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "accumulators"
version = "0.2.0"
version = "0.3.0"
edition = "2021"
license-file = "LICENSE"
description = "Complete package of multiple Accumulators with Stores and hashing functions (Hashers)"
Expand All @@ -18,7 +18,7 @@ sqlx = { version = "0.7", features = [
"runtime-tokio",
"sqlite",
] } # SQLite for rust
anyhow = "1.0" # Error handling
thiserror = "1.0" # Error handling
async-trait = "0.1.74" # Async traits
hex = "0.4.3" # Hex encoding
sha3 = "0.10.8" # Keccak hashing
Expand Down
4 changes: 3 additions & 1 deletion benches/incremental_benchmark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@ async fn prepare_incremental(count: usize) -> IncrementalMerkleTree<StarkPoseido

let store = Arc::new(store);

IncrementalMerkleTree::initialize(count, "0x0".to_string(), hasher, store, None).await
IncrementalMerkleTree::initialize(count, "0x0".to_string(), hasher, store, None)
.await
.unwrap()
}

fn bench(c: &mut Criterion) {
Expand Down
55 changes: 46 additions & 9 deletions src/hasher/core.rs
Original file line number Diff line number Diff line change
@@ -1,18 +1,49 @@
use anyhow::Result;
use std::{fmt::Debug, str::FromStr};
use strum_macros::EnumIter;
use thiserror::Error;

// Default Hasher Options
pub const DEFAULT_BLOCK_SIZE_BITS: usize = 256;
/// Hasher error
#[derive(Error, Debug)]
pub enum HasherError {
#[error("Invalid hashing function")]
InvalidHashingFunction,
#[error(
"Element size {element_size} is too big for hashing function with block size {block_size_bits}"
)]
InvalidElementSize {
element_size: usize,
block_size_bits: usize,
},
#[error("Invalid elements length for hashing function")]
InvalidElementsLength,
#[error("Fail to convert to U256")]
U256ConversionError,
#[error("Fail to decode hex")]
HexDecodeError(#[from] hex::FromHexError),
}

/// A trait for hash functions
pub trait Hasher: Send + Sync + Debug {
fn hash(&self, data: Vec<String>) -> Result<String>;
fn is_element_size_valid(&self, element: &str) -> bool;
fn hash_single(&self, data: &str) -> Result<String>;
fn get_genesis(&self) -> Result<String>;
/// Hashes a data which is a vector of strings
fn hash(&self, data: Vec<String>) -> Result<String, HasherError>;

/// Checks if the element size is valid, i.e. if it is less than the block size
fn is_element_size_valid(&self, element: &str) -> Result<bool, HasherError>;

/// Hashes a single element
fn hash_single(&self, data: &str) -> Result<String, HasherError>;

/// Returns the genesis hash
fn get_genesis(&self) -> Result<String, HasherError>;

/// Returns the name of the [`HashingFunction`]
fn get_name(&self) -> HashingFunction;

/// Returns the block size in bits
fn get_block_size_bits(&self) -> usize;
}

/// Hashing functions types supported by the hasher
#[derive(EnumIter, Debug, PartialEq, Eq, Clone, Copy)]
pub enum HashingFunction {
Keccak256,
Expand All @@ -21,14 +52,14 @@ pub enum HashingFunction {
}

impl FromStr for HashingFunction {
type Err = anyhow::Error;
type Err = HasherError;

fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"keccak" => Ok(HashingFunction::Keccak256),
"poseidon" => Ok(HashingFunction::Poseidon),
"pedersen" => Ok(HashingFunction::Pedersen),
_ => Err(anyhow::anyhow!("invalid hashing function")),
_ => Err(HasherError::InvalidHashingFunction),
}
}
}
Expand All @@ -42,3 +73,9 @@ impl ToString for HashingFunction {
}
}
}

/// Returns the byte size of a hex string
pub fn byte_size(hex: &str) -> usize {
let hex = hex.strip_prefix("0x").unwrap_or(hex);
hex.len() / 2
}
42 changes: 27 additions & 15 deletions src/hasher/hashers/keccak.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
use anyhow::Result;
use crate::hasher::{byte_size, HasherError, HashingFunction};
use sha3::{Digest, Keccak256};

use crate::hasher::HashingFunction;

use super::super::Hasher;

#[derive(Debug)]
/// Hasher for Keccak256
#[derive(Debug, Clone)]
pub struct KeccakHasher {
/// The block size in bits for Keccak256 is 256
block_size_bits: usize,
}

Expand All @@ -15,9 +15,14 @@ impl Hasher for KeccakHasher {
HashingFunction::Keccak256
}

fn hash(&self, data: Vec<String>) -> Result<String> {
/// Hashes a data which is a vector of strings (all elements must be hex encoded)
///
/// NOTE: data have no limit in length of elements
fn hash(&self, data: Vec<String>) -> Result<String, HasherError> {
let mut keccak = Keccak256::new();

//? We deliberately don't validate the size of the elements here, because we want to allow hashing of the RLP encoded block to get a block hash

if data.is_empty() {
keccak.update([]);
} else if data.len() == 1 {
Expand All @@ -38,20 +43,33 @@ impl Hasher for KeccakHasher {
Ok(format!("0x{:0>64}", hex::encode(res)))
}

fn is_element_size_valid(&self, element: &str) -> bool {
byte_size(element) <= self.block_size_bits
fn is_element_size_valid(&self, element: &str) -> Result<bool, HasherError> {
let size = byte_size(element);
if size <= self.block_size_bits {
Ok(true)
} else {
Err(HasherError::InvalidElementSize {
element_size: size,
block_size_bits: self.block_size_bits,
})
}
}

fn hash_single(&self, data: &str) -> Result<String> {
/// Hashes a single data which is a string (must be hex encoded)
fn hash_single(&self, data: &str) -> Result<String, HasherError> {
self.hash(vec![data.to_string()])
}

fn get_genesis(&self) -> Result<String> {
fn get_genesis(&self) -> Result<String, HasherError> {
let genesis_str = "brave new world";
let hex = hex::encode(genesis_str);

self.hash_single(&hex)
}

fn get_block_size_bits(&self) -> usize {
self.block_size_bits
}
}

impl KeccakHasher {
Expand All @@ -67,9 +85,3 @@ impl Default for KeccakHasher {
Self::new()
}
}

fn byte_size(hex: &str) -> usize {
let hex = hex.strip_prefix("0x").unwrap_or(hex);

hex.len() / 2
}
83 changes: 45 additions & 38 deletions src/hasher/hashers/stark_pedersen.rs
Original file line number Diff line number Diff line change
@@ -1,75 +1,92 @@
use anyhow::Result;
use std::collections::HashMap;

use primitive_types::U256;
use starknet::core::{crypto::pedersen_hash, types::FieldElement};

use crate::hasher::HashingFunction;
use crate::hasher::{byte_size, HasherError, HashingFunction};

use super::super::Hasher;

/// Hasher for Stark Pedersen
#[derive(Debug, Clone)]
pub struct StarkPedersenHasher {
options: HashMap<String, usize>,
/// The block size in bits for Stark Pedersen is 252
block_size_bits: usize,
}

impl Hasher for StarkPedersenHasher {
fn get_name(&self) -> HashingFunction {
HashingFunction::Pedersen
}

fn hash(&self, data: Vec<String>) -> Result<String> {
/// Hashes a data which is a vector of strings
///
/// NOTE: data should be of size 2
fn hash(&self, data: Vec<String>) -> Result<String, HasherError> {
if data.len() != 2 {
panic!("Stark Pedersen Hasher only accepts two elements");
return Err(HasherError::InvalidElementsLength);
}

for element in &data {
if !self.is_element_size_valid(element) {
panic!("{}", format!("Element {} is not of valid size", element));
}
self.is_element_size_valid(element)?;
}

let clean_data: Vec<String> = data
.iter()
.map(|s| {
if let Some(stripped) = s.strip_prefix("0x") {
U256::from_str_radix(stripped, 16).unwrap().to_string()
} else {
U256::from_str_radix(s, 16).unwrap().to_string()
}
})
.collect();
let mut clean_data = Vec::with_capacity(data.len());
for s in data.iter() {
let number_str = if let Some(stripped) = s.strip_prefix("0x") {
U256::from_str_radix(stripped, 16)
} else {
U256::from_str_radix(s, 16)
};

match number_str {
Ok(number) => clean_data.push(number.to_string()),
Err(_) => return Err(HasherError::U256ConversionError),
}
}

let result = pedersen_hash(
&FieldElement::from_dec_str(&clean_data[0]).unwrap_or_default(),
&FieldElement::from_dec_str(&clean_data[1]).unwrap_or_default(),
)
.to_string();

let computed_result = U256::from_dec_str(result.trim()).expect("Failed to convert to U256");
let computed_result =
U256::from_dec_str(result.trim()).map_err(|_| HasherError::U256ConversionError)?;
let padded_hex_str = format!("0x{:064x}", computed_result);

Ok(padded_hex_str)
}

fn is_element_size_valid(&self, element: &str) -> bool {
byte_size(element) <= *self.options.get("blockSizeBits").unwrap()
fn is_element_size_valid(&self, element: &str) -> Result<bool, HasherError> {
let size = byte_size(element);
if size <= self.block_size_bits {
Ok(true)
} else {
Err(HasherError::InvalidElementSize {
element_size: size,
block_size_bits: self.block_size_bits,
})
}
}

fn hash_single(&self, data: &str) -> Result<String> {
fn hash_single(&self, data: &str) -> Result<String, HasherError> {
self.hash(vec![data.to_string(), "".to_string()])
}

fn get_genesis(&self) -> Result<String> {
fn get_genesis(&self) -> Result<String, HasherError> {
let genesis_str = "brave new world";
self.hash_single(genesis_str)
}

fn get_block_size_bits(&self) -> usize {
self.block_size_bits
}
}

impl StarkPedersenHasher {
pub fn new() -> Self {
let mut options = HashMap::new();
options.insert("blockSizeBits".to_string(), 252);
StarkPedersenHasher { options }
Self {
block_size_bits: 252,
}
}
}

Expand All @@ -78,13 +95,3 @@ impl Default for StarkPedersenHasher {
Self::new()
}
}

fn byte_size(hex: &str) -> usize {
let hex = if let Some(stripped) = hex.strip_prefix("0x") {
stripped
} else {
hex
};

hex.len() / 2
}
Loading

0 comments on commit a8090bc

Please sign in to comment.