use crate::data::*; use rand::{Rng, rng}; use std::collections::HashMap; use std::hash::{DefaultHasher, Hash, Hasher}; fn hash(data: &[u8]) -> NodeHash { let mut hasher = DefaultHasher::new(); data.hash(&mut hasher); let hash_u64 = hasher.finish(); let mut hash_array = [0u8; FILENAME_HASH_SIZE]; // Simple way to spread a 64-bit hash across 32 bytes for a unique-ish ID for i in 0..8 { hash_array[i] = (hash_u64 >> (i * 8)) as u8; } hash_array // The rest remains 0, satisfying the 32-byte requirement } fn generate_random_filename() -> [u8; FILENAME_HASH_SIZE] { let mut rng = rand::rng(); let mut filename_bytes = [0; FILENAME_HASH_SIZE]; // Generate a random length for the base name let name_len = rng.random_range(5..21); // Generate random alphanumeric characters for i in 0..name_len { let char_code = rng.random_range(97..123); // 'a' through 'z' if i < FILENAME_HASH_SIZE { filename_bytes[i] = char_code as u8; } } // Append a common extension let ext = if rng.random_bool(0.5) { ".txt" } else { ".dat" }; let ext_bytes = ext.as_bytes(); let start_index = name_len.min(FILENAME_HASH_SIZE - ext_bytes.len()); if start_index < FILENAME_HASH_SIZE { filename_bytes[start_index..(start_index + ext_bytes.len())].copy_from_slice(ext_bytes); } filename_bytes } fn generate_random_file_node( storage: &mut HashMap, ) -> Result { let mut rng = rng(); let is_big = rng.random_bool(0.2); // 20% chance of being a big file if !is_big { // Generate a simple Chunk Node let node = MerkleNode::Chunk(ChunkNode::new_random()); let hash = hash(&node.serialize()); storage.insert(hash, node); Ok(hash) } else { // Generate a Big Node (a file composed of chunks) let num_children = rng.random_range(MIN_BIG_CHILDREN..=MAX_BIG_CHILDREN.min(8)); // Limit complexity let mut children_hashes = Vec::with_capacity(num_children); for _ in 0..num_children { // Children must be Chunk or Big; for simplicity, we only generate Chunk children here. let chunk_node = MerkleNode::Chunk(ChunkNode::new_random()); let chunk_hash = hash(&chunk_node.serialize()); storage.insert(chunk_hash, chunk_node); children_hashes.push(chunk_hash); } let node = MerkleNode::Big(BigNode::new(children_hashes)?); let hash = hash(&node.serialize()); storage.insert(hash, node); Ok(hash) } } fn generate_random_directory_node( depth: u32, max_depth: u32, storage: &mut HashMap, ) -> Result { let mut rng = rng(); let current_depth = depth + 1; let is_big_dir = rng.random_bool(0.3) && current_depth < max_depth; if !is_big_dir || current_depth >= max_depth { // Generate a simple Directory Node (leaf level directory) let num_entries = rng.random_range(1..=MAX_DIRECTORY_ENTRIES.min(5)); // Limit directory size for testing let mut entries = Vec::with_capacity(num_entries); for _ in 0..num_entries { if rng.random_bool(0.7) { // 70% chance of creating a file (Chunk/Big) let file_hash = generate_random_file_node(storage)?; let entry = DirectoryEntry { filename: generate_random_filename(), content_hash: file_hash, }; entries.push(entry); } else if current_depth < max_depth { // 30% chance of creating a subdirectory let dir_hash = generate_random_directory_node(current_depth, max_depth, storage)?; // Create a basic directory entry name let mut filename_bytes = [0; 32]; let subdir_name = format!("dir_{}", current_depth); filename_bytes[..subdir_name.len()].copy_from_slice(subdir_name.as_bytes()); let entry = DirectoryEntry { filename: filename_bytes, content_hash: dir_hash, }; entries.push(entry); } } let node = MerkleNode::Directory(DirectoryNode::new(entries)?); let hash = hash(&node.serialize()); storage.insert(hash, node); Ok(hash) } else { // Generate a BigDirectory Node (internal directory structure) let num_children = rng.random_range(MIN_BIG_CHILDREN..=MAX_BIG_CHILDREN.min(4)); // Limit children count let mut children = Vec::with_capacity(num_children); for _ in 0..num_children { // Children must be Directory or BigDirectory let child_hash = generate_random_directory_node(current_depth, max_depth, storage)?; children.push(child_hash); } let node = MerkleNode::BigDirectory(BigDirectoryNode::new(children)?); let hash = hash(&node.serialize()); storage.insert(hash, node); Ok(hash) } } pub fn generate_random_tree( max_depth: u32, ) -> Result<(NodeHash, HashMap), String> { let mut storage = HashMap::new(); // Start tree generation from the root directory at depth 0 let root_hash = generate_random_directory_node(0, max_depth, &mut storage)?; Ok((root_hash, storage)) } pub fn generate_base_tree() -> (NodeHash, HashMap) { let mut res = HashMap::new(); let node1 = MerkleNode::Chunk(ChunkNode::new_random()); let hash1 = hash(&node1.serialize()); let node2 = MerkleNode::Chunk(ChunkNode::new_random()); let hash2 = hash(&node2.serialize()); res.insert(hash1, node1); res.insert(hash2, node2); let node3 = MerkleNode::Chunk(ChunkNode::new_random()); let hash3 = hash(&node3.serialize()); res.insert(hash3, node3); let dir1 = MerkleNode::Directory(DirectoryNode { entries: [DirectoryEntry { filename: generate_random_filename(), content_hash: hash3, }] .to_vec(), }); let hash_dir1 = hash(&dir1.serialize()); res.insert(hash_dir1, dir1); let root = MerkleNode::Directory(DirectoryNode { entries: [ DirectoryEntry { filename: generate_random_filename(), content_hash: hash1, }, DirectoryEntry { filename: generate_random_filename(), content_hash: hash2, }, DirectoryEntry { filename: generate_random_filename(), content_hash: hash_dir1, }, ] .to_vec(), }); let root_hash = hash(&root.serialize()); res.insert(root_hash, root); (root_hash, res) }