merkel generation + render

This commit is contained in:
enx01
2025-11-27 18:32:56 +01:00
parent 8859846d8b
commit 74869221e0
5 changed files with 203 additions and 167 deletions

View File

@@ -1,6 +1,7 @@
use std::collections::HashMap;
use std::hash::{DefaultHasher, Hash, Hasher};
use rand::{rng, Rng};
use sha2::{Digest, Sha256};
// --- Constants ---
const MAX_CHUNK_DATA_SIZE: usize = 1024;
@@ -10,7 +11,7 @@ const MIN_BIG_CHILDREN: usize = 2;
const FILENAME_HASH_SIZE: usize = 32;
const DIRECTORY_ENTRY_SIZE: usize = FILENAME_HASH_SIZE * 2; // 64 bytes
fn dummy_hash(data: &[u8]) -> NodeHash {
fn hash(data: &[u8]) -> NodeHash {
let mut hasher = DefaultHasher::new();
data.hash(&mut hasher);
let hash_u64 = hasher.finish();
@@ -70,6 +71,18 @@ pub enum MerkleNode {
BigDirectory(BigDirectoryNode) = 4,
}
#[derive(Debug, Clone)]
pub struct MerkleTree {
pub data: HashMap<NodeHash, MerkleNode>,
pub root: NodeHash,
}
impl MerkleTree {
pub fn new(data: HashMap<NodeHash, MerkleNode>, root: NodeHash) -> MerkleTree {
MerkleTree { data, root }
}
}
fn generate_random_file_node(storage: &mut HashMap<NodeHash, MerkleNode>) -> Result<NodeHash, String> {
let mut rng = rng();
let is_big = rng.random_bool(0.2); // 20% chance of being a big file
@@ -77,7 +90,7 @@ fn generate_random_file_node(storage: &mut HashMap<NodeHash, MerkleNode>) -> Res
if !is_big {
// Generate a simple Chunk Node
let node = MerkleNode::Chunk(ChunkNode::new_random());
let hash = dummy_hash(&node.serialize());
let hash = hash(&node.serialize());
storage.insert(hash, node);
Ok(hash)
} else {
@@ -88,13 +101,13 @@ fn generate_random_file_node(storage: &mut HashMap<NodeHash, MerkleNode>) -> Res
for _ in 0..num_children {
// Children must be Chunk or Big; for simplicity, we only generate Chunk children here.
let chunk_node = MerkleNode::Chunk(ChunkNode::new_random());
let chunk_hash = dummy_hash(&chunk_node.serialize());
let chunk_hash = hash(&chunk_node.serialize());
storage.insert(chunk_hash, chunk_node);
children_hashes.push(chunk_hash);
}
let node = MerkleNode::Big(BigNode::new(children_hashes)?);
let hash = dummy_hash(&node.serialize());
let hash = hash(&node.serialize());
storage.insert(hash, node);
Ok(hash)
}
@@ -141,7 +154,7 @@ fn generate_random_directory_node(
}
let node = MerkleNode::Directory(DirectoryNode::new(entries)?);
let hash = dummy_hash(&node.serialize());
let hash = hash(&node.serialize());
storage.insert(hash, node);
Ok(hash)
@@ -157,7 +170,7 @@ fn generate_random_directory_node(
}
let node = MerkleNode::BigDirectory(BigDirectoryNode::new(children)?);
let hash = dummy_hash(&node.serialize());
let hash = hash(&node.serialize());
storage.insert(hash, node);
Ok(hash)
}
@@ -201,6 +214,11 @@ pub struct DirectoryEntry {
pub content_hash: NodeHash,
}
pub fn filename_to_string(filename: [u8; FILENAME_HASH_SIZE]) -> String {
let end_index = filename.iter().position(|&b| b == 0).unwrap_or(FILENAME_HASH_SIZE);
String::from_utf8_lossy(&filename[..end_index]).to_string()
}
#[derive(Debug, Clone)]
pub struct DirectoryNode {
pub entries: Vec<DirectoryEntry>,