base tree creation

This commit is contained in:
Tiago Batista Cardoso
2025-11-28 11:33:50 +01:00
parent 74869221e0
commit 85e78447a7
2 changed files with 136 additions and 102 deletions

View File

@@ -1,6 +1,12 @@
use client_network::{filename_to_string, node_hash_to_hex_string, MerkleNode, MerkleTree, NetworkCommand, NetworkEvent, NodeHash}; use client_network::{
MerkleNode, MerkleTree, NetworkCommand, NetworkEvent, NodeHash, filename_to_string,
node_hash_to_hex_string,
};
use crossbeam_channel::{Receiver, Sender}; use crossbeam_channel::{Receiver, Sender};
use egui::{Align, Button, CentralPanel, CollapsingHeader, Context, Layout, ScrollArea, SidePanel, TopBottomPanel, Ui, ViewportCommand}; use egui::{
Align, Button, CentralPanel, CollapsingHeader, Context, Layout, ScrollArea, SidePanel,
TopBottomPanel, Ui, ViewportCommand,
};
use std::collections::HashMap; use std::collections::HashMap;
// --- Main Application Struct --- // --- Main Application Struct ---
@@ -23,22 +29,17 @@ pub struct P2PClientApp {
impl P2PClientApp { impl P2PClientApp {
pub fn new(cmd_tx: Sender<NetworkCommand>, event_rx: Receiver<NetworkEvent>) -> Self { pub fn new(cmd_tx: Sender<NetworkCommand>, event_rx: Receiver<NetworkEvent>) -> Self {
let (root_hash, tree_content) = MerkleNode::generate_random_tree(5).expect("Couldn't generate tree"); let (root_hash, tree_content) = MerkleNode::generate_base_tree();
let mut peer_root_hash = HashMap::new();
peer_root_hash.insert("bob".to_string(), "yoyoyoyo".to_string());
let mut loaded_fs = HashMap::new(); let mut loaded_fs = HashMap::new();
let tree = MerkleTree::new(tree_content, root_hash); let tree = MerkleTree::new(tree_content, root_hash);
loaded_fs.insert("bob".to_string(), tree); loaded_fs.insert("bob".to_string(), tree);
Self { Self {
network_cmd_tx: cmd_tx, network_cmd_tx: cmd_tx,
network_event_rx: event_rx, network_event_rx: event_rx,
status_message: "Client Initialized. Awaiting network status...".to_string(), status_message: "Client Initialized. Awaiting network status...".to_string(),
known_peers: vec![ known_peers: vec!["bob".to_string()],
"bob".to_string()
],
connect_address_input: "127.0.0.1:8080".to_string(), connect_address_input: "127.0.0.1:8080".to_string(),
loaded_fs, loaded_fs,
active_peer: None, active_peer: None,
@@ -50,7 +51,6 @@ impl P2PClientApp {
impl eframe::App for P2PClientApp { impl eframe::App for P2PClientApp {
fn update(&mut self, ctx: &Context, _frame: &mut eframe::Frame) { fn update(&mut self, ctx: &Context, _frame: &mut eframe::Frame) {
// 1. Process incoming Network Events // 1. Process incoming Network Events
// We poll the channel and update the GUI state for every event received. // We poll the channel and update the GUI state for every event received.
while let Ok(event) = self.network_event_rx.try_recv() { while let Ok(event) = self.network_event_rx.try_recv() {
@@ -119,7 +119,10 @@ impl eframe::App for P2PClientApp {
}); });
// 3. Right-sided Panel (Known Peers) // 3. Right-sided Panel (Known Peers)
SidePanel::right("right_panel").resizable(true).min_width(180.0).show(ctx, |ui| { SidePanel::right("right_panel")
.resizable(true)
.min_width(180.0)
.show(ctx, |ui| {
ui.heading("🌐 Known Peers"); ui.heading("🌐 Known Peers");
ui.separator(); ui.separator();
ScrollArea::vertical().show(ui, |ui| { ScrollArea::vertical().show(ui, |ui| {
@@ -128,13 +131,20 @@ impl eframe::App for P2PClientApp {
ui.label("No active peers."); ui.label("No active peers.");
} else { } else {
for peer in &self.known_peers { for peer in &self.known_peers {
let is_active = self.active_peer.as_ref().map_or(false, |id| id == peer); // if peer.id == self.active_peer_id let is_active =
self.active_peer.as_ref().map_or(false, |id| id == peer); // if peer.id == self.active_peer_id
if ui.selectable_label(is_active, format!("{}", peer)).clicked() { if ui
.selectable_label(is_active, format!("{}", peer))
.clicked()
{
// switch to displaying this peer's tree // switch to displaying this peer's tree
self.active_peer = Some(peer.clone()); self.active_peer = Some(peer.clone());
// Request root content if not loaded // Request root content if not loaded
if !self.loaded_fs.contains_key(self.active_peer.as_ref().unwrap()) { if !self
.loaded_fs
.contains_key(self.active_peer.as_ref().unwrap())
{
todo!(); todo!();
// let _ = self.network_cmd_tx.send(NetworkCommand::RequestDirectoryContent( // let _ = self.network_cmd_tx.send(NetworkCommand::RequestDirectoryContent(
// peer.clone(), // peer.clone(),
@@ -185,72 +195,30 @@ impl eframe::App for P2PClientApp {
// --- Helper for Drawing the Recursive File Tree --- // --- Helper for Drawing the Recursive File Tree ---
impl P2PClientApp { impl P2PClientApp {
// fn draw_file_tree(&self, ui: &mut Ui, node: &FileNode, depth: usize) {
// let indent_space = 15.0 * depth as f32;
// let active_peer_id = self.active_peer_id.clone();
//
// let entry_hash = &node.hash_id;
// let filename = &node.name;
// let is_dir = node.is_dir;
//
// if is_dir {
// // --- Directory Node: Check if content (children) is already loaded (stored in the map) ---
//
// if let Some(children) = node.children.as_ref() {
// // Content is already loaded: draw the collapsing header and recurse
// CollapsingHeader::new(format!("📁 {}", filename))
// .default_open(false)
// .enabled(true)
// .show(ui, |ui| {
// // Recursive call: iterate over children and call draw_file_tree for each
// for child_node in children {
// self.draw_file_tree(ui, child_node, depth + 1);
// }
// });
// } else {
// // Content is NOT loaded: show a clickable button to request loading
// let response = ui.with_layout(Layout::left_to_right(Align::Min), |ui| {
// ui.add_space(indent_space);
// ui.add(Button::new(format!("▶️ {} (Load)", filename)).small()).on_hover_text(format!("Hash: {}...", &entry_hash[..8]));
// }).response;
//
// if response.clicked() {
// if let Some(peer_id) = active_peer_id.clone() {
// let _ = self.network_cmd_tx.send(NetworkCommand::RequestDirectoryContent(
// peer_id,
// entry_hash.clone(),
// ));
// // self.status_message = format!("Requested directory content for: {}...", &entry_hash[..8]);
// }
// }
// }
// } else {
// // --- File Node (Chunk or Big) ---
// ui.with_layout(Layout::left_to_right(Align::Center), |ui| {
// ui.add_space(indent_space);
// if ui.selectable_label(false, format!("📄 {} (Hash: {}...)", filename, &entry_hash[..8])).on_hover_text("Click to request file chunks...").clicked() {
// if let Some(peer_id) = active_peer_id.clone() {
// let _ = self.network_cmd_tx.send(NetworkCommand::RequestChunk(peer_id, entry_hash.clone()));
// // self.status_message = format!("Requested file chunks for: {}...", &entry_hash[..8]);
// }
// }
// });
// }
// }
fn draw_file_tree(&self, ui: &mut Ui, tree: &MerkleTree) { fn draw_file_tree(&self, ui: &mut Ui, tree: &MerkleTree) {
assert!(self.active_peer.is_some()); assert!(self.active_peer.is_some());
assert!(self.loaded_fs.get(&self.active_peer.clone().unwrap()).is_some()); assert!(
self.loaded_fs
.get(&self.active_peer.clone().unwrap())
.is_some()
);
let root = tree.root; let root = tree.root;
CollapsingHeader::new(format!("📁 root")) CollapsingHeader::new(format!("📁 root"))
.default_open(true) .default_open(true)
.enabled(true) .enabled(true)
.show(ui, |ui| { .show(ui, |ui| {
self.draw_file_node(ui, root, tree,0, None); self.draw_file_node(ui, root, tree, 0, None);
}); });
} }
fn draw_file_node(&self, ui: &mut Ui, to_draw: NodeHash, tree: &MerkleTree, depth: usize, filename: Option<[u8; 32]>) { fn draw_file_node(
&self,
ui: &mut Ui,
to_draw: NodeHash,
tree: &MerkleTree,
depth: usize,
filename: Option<[u8; 32]>,
) {
if depth >= 32 { if depth >= 32 {
return; return;
} }
@@ -264,7 +232,11 @@ impl P2PClientApp {
}; };
match current { match current {
MerkleNode::Chunk(node) => { MerkleNode::Chunk(node) => {
if ui.selectable_label(false, format!("📄 (C) {}...", name)).on_hover_text("Click to request file chunks...").clicked() { if ui
.selectable_label(false, format!("📄 (C) {}...", name))
.on_hover_text("Click to request file chunks...")
.clicked()
{
todo!(); todo!();
// if let Some(peer_id) = active_peer_id.clone() { // if let Some(peer_id) = active_peer_id.clone() {
// let _ = self.network_cmd_tx.send(NetworkCommand::RequestChunk(peer_id, entry_hash.clone())); // let _ = self.network_cmd_tx.send(NetworkCommand::RequestChunk(peer_id, entry_hash.clone()));
@@ -278,12 +250,17 @@ impl P2PClientApp {
.enabled(true) .enabled(true)
.show(ui, |ui| { .show(ui, |ui| {
for entry in &node.entries { for entry in &node.entries {
self.draw_file_node(ui, entry.content_hash, tree, depth + 1, Some(entry.filename)); self.draw_file_node(
ui,
entry.content_hash,
tree,
depth + 1,
Some(entry.filename),
);
} }
}); });
} }
MerkleNode::Big(node) => { MerkleNode::Big(node) => {
CollapsingHeader::new(format!("📄 (B) {}", name)) CollapsingHeader::new(format!("📄 (B) {}", name))
.default_open(false) .default_open(false)
.enabled(true) .enabled(true)
@@ -307,3 +284,4 @@ impl P2PClientApp {
} }
} }
} }

View File

@@ -1,7 +1,7 @@
use rand::{Rng, rng};
use sha2::{Digest, Sha256};
use std::collections::HashMap; use std::collections::HashMap;
use std::hash::{DefaultHasher, Hash, Hasher}; use std::hash::{DefaultHasher, Hash, Hasher};
use rand::{rng, Rng};
use sha2::{Digest, Sha256};
// --- Constants --- // --- Constants ---
const MAX_CHUNK_DATA_SIZE: usize = 1024; const MAX_CHUNK_DATA_SIZE: usize = 1024;
@@ -53,9 +53,7 @@ fn generate_random_filename() -> [u8; FILENAME_HASH_SIZE] {
pub type NodeHash = [u8; FILENAME_HASH_SIZE]; pub type NodeHash = [u8; FILENAME_HASH_SIZE];
pub fn node_hash_to_hex_string(hash: &NodeHash) -> String { pub fn node_hash_to_hex_string(hash: &NodeHash) -> String {
hash.iter() hash.iter().map(|b| format!("{:02x}", b)).collect()
.map(|b| format!("{:02x}", b))
.collect()
} }
#[repr(u8)] #[repr(u8)]
@@ -83,7 +81,9 @@ impl MerkleTree {
} }
} }
fn generate_random_file_node(storage: &mut HashMap<NodeHash, MerkleNode>) -> Result<NodeHash, String> { fn generate_random_file_node(
storage: &mut HashMap<NodeHash, MerkleNode>,
) -> Result<NodeHash, String> {
let mut rng = rng(); let mut rng = rng();
let is_big = rng.random_bool(0.2); // 20% chance of being a big file let is_big = rng.random_bool(0.2); // 20% chance of being a big file
@@ -116,7 +116,7 @@ fn generate_random_file_node(storage: &mut HashMap<NodeHash, MerkleNode>) -> Res
fn generate_random_directory_node( fn generate_random_directory_node(
depth: u32, depth: u32,
max_depth: u32, max_depth: u32,
storage: &mut HashMap<NodeHash, MerkleNode> storage: &mut HashMap<NodeHash, MerkleNode>,
) -> Result<NodeHash, String> { ) -> Result<NodeHash, String> {
let mut rng = rng(); let mut rng = rng();
let current_depth = depth + 1; let current_depth = depth + 1;
@@ -157,7 +157,6 @@ fn generate_random_directory_node(
let hash = hash(&node.serialize()); let hash = hash(&node.serialize());
storage.insert(hash, node); storage.insert(hash, node);
Ok(hash) Ok(hash)
} else { } else {
// Generate a BigDirectory Node (internal directory structure) // Generate a BigDirectory Node (internal directory structure)
let num_children = rng.random_range(MIN_BIG_CHILDREN..=MAX_BIG_CHILDREN.min(4)); // Limit children count let num_children = rng.random_range(MIN_BIG_CHILDREN..=MAX_BIG_CHILDREN.min(4)); // Limit children count
@@ -215,7 +214,10 @@ pub struct DirectoryEntry {
} }
pub fn filename_to_string(filename: [u8; FILENAME_HASH_SIZE]) -> String { pub fn filename_to_string(filename: [u8; FILENAME_HASH_SIZE]) -> String {
let end_index = filename.iter().position(|&b| b == 0).unwrap_or(FILENAME_HASH_SIZE); let end_index = filename
.iter()
.position(|&b| b == 0)
.unwrap_or(FILENAME_HASH_SIZE);
String::from_utf8_lossy(&filename[..end_index]).to_string() String::from_utf8_lossy(&filename[..end_index]).to_string()
} }
@@ -312,7 +314,9 @@ impl MerkleNode {
bytes bytes
} }
pub fn generate_random_tree(max_depth: u32) -> Result<(NodeHash, HashMap<NodeHash, MerkleNode>), String> { pub fn generate_random_tree(
max_depth: u32,
) -> Result<(NodeHash, HashMap<NodeHash, MerkleNode>), String> {
let mut storage = HashMap::new(); let mut storage = HashMap::new();
// Start tree generation from the root directory at depth 0 // Start tree generation from the root directory at depth 0
@@ -320,4 +324,56 @@ impl MerkleNode {
Ok((root_hash, storage)) Ok((root_hash, storage))
} }
pub fn generate_base_tree() -> (NodeHash, HashMap<NodeHash, MerkleNode>) {
let mut res = HashMap::new();
let node1 = MerkleNode::Chunk(ChunkNode::new_random());
let hash1 = hash(&node1.serialize());
let node2 = MerkleNode::Chunk(ChunkNode::new_random());
let hash2 = hash(&node2.serialize());
res.insert(hash1, node1);
res.insert(hash2, node2);
let node3 = MerkleNode::Chunk(ChunkNode::new_random());
let hash3 = hash(&node3.serialize());
res.insert(hash3, node3);
let dir1 = MerkleNode::Directory(DirectoryNode {
entries: [DirectoryEntry {
filename: generate_random_filename(),
content_hash: hash3,
}]
.to_vec(),
});
let hash_dir1 = hash(&dir1.serialize());
res.insert(hash_dir1, dir1);
let root = MerkleNode::Directory(DirectoryNode {
entries: [
DirectoryEntry {
filename: generate_random_filename(),
content_hash: hash1,
},
DirectoryEntry {
filename: generate_random_filename(),
content_hash: hash2,
},
DirectoryEntry {
filename: generate_random_filename(),
content_hash: hash_dir1,
},
]
.to_vec(),
});
let root_hash = hash(&root.serialize());
res.insert(root_hash, root);
(root_hash, res)
}
} }