fud/
resource.rs

1/* This file is part of DarkFi (https://dark.fi)
2 *
3 * Copyright (C) 2020-2026 Dyne.org foundation
4 *
5 * This program is free software: you can redistribute it and/or modify
6 * it under the terms of the GNU Affero General Public License as
7 * published by the Free Software Foundation, either version 3 of the
8 * License, or (at your option) any later version.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13 * GNU Affero General Public License for more details.
14 *
15 * You should have received a copy of the GNU Affero General Public License
16 * along with this program.  If not, see <https://www.gnu.org/licenses/>.
17 */
18
19use std::{
20    collections::HashSet,
21    path::{Path, PathBuf},
22};
23use tinyjson::JsonValue;
24
25use darkfi::{
26    geode::{hash_to_string, ChunkedStorage, MAX_CHUNK_SIZE},
27    rpc::util::json_map,
28    Error, Result,
29};
30
31use crate::FileSelection;
32
33#[derive(Clone, Debug)]
34pub enum ResourceStatus {
35    Downloading,
36    Seeding,
37    Discovering,
38    Incomplete(Option<String>),
39    Verifying,
40}
41
42impl ResourceStatus {
43    pub fn as_str(&self) -> &str {
44        match self {
45            ResourceStatus::Downloading => "downloading",
46            ResourceStatus::Seeding => "seeding",
47            ResourceStatus::Discovering => "discovering",
48            ResourceStatus::Incomplete(_) => "incomplete",
49            ResourceStatus::Verifying => "verifying",
50        }
51    }
52    fn from_str(s: &str) -> Result<Self> {
53        match s {
54            "downloading" => Ok(ResourceStatus::Downloading),
55            "seeding" => Ok(ResourceStatus::Seeding),
56            "discovering" => Ok(ResourceStatus::Discovering),
57            "incomplete" => Ok(ResourceStatus::Incomplete(None)),
58            "verifying" => Ok(ResourceStatus::Verifying),
59            _ => Err(Error::Custom("Invalid resource status".to_string())),
60        }
61    }
62}
63
64#[derive(Clone, Debug, PartialEq)]
65pub enum ResourceType {
66    Unknown,
67    File,
68    Directory,
69}
70
71impl ResourceType {
72    pub fn as_str(&self) -> &str {
73        match self {
74            ResourceType::Unknown => "unknown",
75            ResourceType::File => "file",
76            ResourceType::Directory => "directory",
77        }
78    }
79    fn from_str(s: &str) -> Result<Self> {
80        match s {
81            "unknown" => Ok(ResourceType::Unknown),
82            "file" => Ok(ResourceType::File),
83            "directory" => Ok(ResourceType::Directory),
84            _ => Err(Error::Custom("Invalid resource type".to_string())),
85        }
86    }
87}
88
89/// Structure representing the current state of a file or directory on fud.
90/// It is used in most `FudEvent`.
91#[derive(Clone, Debug)]
92pub struct Resource {
93    /// Resource hash (used as key in the DHT)
94    pub hash: blake3::Hash,
95    /// Resource type (file or directory)
96    pub rtype: ResourceType,
97    /// Path of the resource on the filesystem
98    pub path: PathBuf,
99    /// Current status of the resource
100    pub status: ResourceStatus,
101    /// The files the user wants to download
102    pub file_selection: FileSelection,
103    /// The last files the user wanted to download
104    pub last_file_selection: FileSelection,
105
106    /// Total number of chunks
107    pub total_chunks_count: u64,
108    /// Number of chunks we want to download
109    pub target_chunks_count: u64,
110    /// Number of chunks we already downloaded
111    pub total_chunks_downloaded: u64,
112    /// Number of chunks we already downloaded,
113    /// but only those we want to download on the last fetch request
114    pub target_chunks_downloaded: u64,
115
116    /// Total size (in bytes) of the resource
117    pub total_bytes_size: u64,
118    /// Data (in bytes) we want to download
119    pub target_bytes_size: u64,
120    /// Data (in bytes) we already downloaded
121    pub total_bytes_downloaded: u64,
122    /// Data (in bytes) we already downloaded,
123    /// but only data we want to download on the last fetch request
124    pub target_bytes_downloaded: u64,
125
126    /// Recent speeds in bytes/sec, used to compute the download ETA.
127    pub speeds: Vec<f64>,
128}
129
130impl Resource {
131    pub fn new(
132        hash: blake3::Hash,
133        rtype: ResourceType,
134        path: &Path,
135        status: ResourceStatus,
136        file_selection: FileSelection,
137    ) -> Self {
138        Self {
139            hash,
140            rtype,
141            path: path.to_path_buf(),
142            status,
143            file_selection: file_selection.clone(),
144            last_file_selection: file_selection,
145            total_chunks_count: 0,
146            target_chunks_count: 0,
147            total_chunks_downloaded: 0,
148            target_chunks_downloaded: 0,
149            total_bytes_size: 0,
150            target_bytes_size: 0,
151            total_bytes_downloaded: 0,
152            target_bytes_downloaded: 0,
153            speeds: vec![],
154        }
155    }
156
157    /// Computes and returns download ETA in seconds using the `speeds` list.
158    pub fn get_eta(&self) -> u64 {
159        if self.speeds.is_empty() {
160            return 0
161        }
162
163        let remaining_chunks = self.target_chunks_count - self.target_chunks_downloaded;
164        let mean_speed = self.speeds.iter().sum::<f64>() / self.speeds.len() as f64;
165
166        ((remaining_chunks * MAX_CHUNK_SIZE as u64) as f64 / mean_speed) as u64
167    }
168
169    /// Returns the list of selected files (absolute paths).
170    pub fn get_selected_files(
171        &self,
172        chunked: &ChunkedStorage,
173        file_selection: &FileSelection,
174    ) -> Vec<PathBuf> {
175        match &file_selection {
176            FileSelection::Set(files) => files
177                .iter()
178                .map(|file| self.path.join(file))
179                .filter(|abs| chunked.get_files().iter().any(|(f, _)| f == abs))
180                .collect(),
181            FileSelection::All => chunked.get_files().iter().map(|(f, _)| f.clone()).collect(),
182        }
183    }
184
185    /// Returns the (sub)set of chunk hashes in a ChunkedStorage for the
186    /// resource's file selection.
187    pub fn get_selected_chunks(&self, chunked: &ChunkedStorage) -> HashSet<blake3::Hash> {
188        self.get_chunks_of_selection(chunked, &self.file_selection)
189    }
190
191    /// Returns the (sub)set of chunk hashes in a ChunkedStorage for a file selection.
192    pub fn get_chunks_of_selection(
193        &self,
194        chunked: &ChunkedStorage,
195        file_selection: &FileSelection,
196    ) -> HashSet<blake3::Hash> {
197        match &file_selection {
198            FileSelection::Set(files) => {
199                let mut chunks = HashSet::new();
200                for file in files {
201                    chunks.extend(chunked.get_chunks_of_file(&self.path.join(file)));
202                }
203                chunks
204            }
205            FileSelection::All => chunked.iter().map(|c| c.hash).collect(),
206        }
207    }
208
209    /// Returns the number of bytes we want from a chunk (depends on the
210    /// resource's file selection).
211    pub fn get_selected_bytes(
212        &self,
213        chunked: &ChunkedStorage,
214        chunk_hash: &blake3::Hash,
215        chunk_size: usize,
216    ) -> usize {
217        self.get_bytes_of_selection(chunked, &self.file_selection, chunk_hash, chunk_size)
218    }
219
220    /// Returns the number of bytes we selected from a chunk.
221    pub fn get_bytes_of_selection(
222        &self,
223        chunked: &ChunkedStorage,
224        file_selection: &FileSelection,
225        chunk_hash: &blake3::Hash,
226        chunk_size: usize,
227    ) -> usize {
228        // If `FileSelection` is not a set, we want all bytes from a chunk
229        let file_set = if let FileSelection::Set(files) = &file_selection {
230            files
231        } else {
232            return chunk_size;
233        };
234
235        let chunk_index = match chunked.iter().position(|c| c.hash == *chunk_hash) {
236            Some(index) => index,
237            None => {
238                return 0;
239            }
240        };
241
242        let files = chunked.get_files();
243        let chunk_length = chunk_size;
244        let position = (chunk_index as u64) * (MAX_CHUNK_SIZE as u64);
245        let mut total_selected_bytes = 0;
246
247        // Find the starting file index based on the position
248        let mut file_index = 0;
249        let mut file_start_pos = 0;
250
251        while file_index < files.len() {
252            if file_start_pos + files[file_index].1 > position {
253                break;
254            }
255            file_start_pos += files[file_index].1;
256            file_index += 1;
257        }
258
259        if file_index >= files.len() {
260            // Out of bounds
261            return 0;
262        }
263
264        // Calculate the end position of the chunk
265        let end_position = position + chunk_length as u64;
266
267        // Iterate through the files and count selected bytes
268        while file_index < files.len() {
269            let (file_path, file_size) = &files[file_index];
270            let file_end_pos = file_start_pos + *file_size;
271
272            // Check if the file is in the selection
273            if let Ok(rel_file_path) = file_path.strip_prefix(&self.path) {
274                if file_set.contains(rel_file_path) {
275                    // Calculate the overlap with the chunk
276                    let overlap_start = position.max(file_start_pos);
277                    let overlap_end = end_position.min(file_end_pos);
278
279                    if overlap_start < overlap_end {
280                        total_selected_bytes += (overlap_end - overlap_start) as usize;
281                    }
282                }
283            }
284
285            // Move to the next file
286            file_start_pos += *file_size;
287            file_index += 1;
288
289            // Stop if we've reached the end of the chunk
290            if file_start_pos >= end_position {
291                break;
292            }
293        }
294
295        total_selected_bytes
296    }
297}
298
299impl From<Resource> for JsonValue {
300    fn from(rs: Resource) -> JsonValue {
301        json_map([
302            ("hash", JsonValue::String(hash_to_string(&rs.hash))),
303            ("type", JsonValue::String(rs.rtype.as_str().to_string())),
304            (
305                "path",
306                JsonValue::String(match rs.path.clone().into_os_string().into_string() {
307                    Ok(path) => path,
308                    Err(_) => "".to_string(),
309                }),
310            ),
311            ("status", JsonValue::String(rs.status.as_str().to_string())),
312            ("file_selection", rs.file_selection.into()),
313            ("last_file_selection", rs.last_file_selection.into()),
314            ("total_chunks_count", JsonValue::Number(rs.total_chunks_count as f64)),
315            ("target_chunks_count", JsonValue::Number(rs.target_chunks_count as f64)),
316            ("total_chunks_downloaded", JsonValue::Number(rs.total_chunks_downloaded as f64)),
317            ("target_chunks_downloaded", JsonValue::Number(rs.target_chunks_downloaded as f64)),
318            ("total_bytes_size", JsonValue::Number(rs.total_bytes_size as f64)),
319            ("target_bytes_size", JsonValue::Number(rs.target_bytes_size as f64)),
320            ("total_bytes_downloaded", JsonValue::Number(rs.total_bytes_downloaded as f64)),
321            ("target_bytes_downloaded", JsonValue::Number(rs.target_bytes_downloaded as f64)),
322            ("speeds", JsonValue::Array(rs.speeds.into_iter().map(JsonValue::Number).collect())),
323        ])
324    }
325}
326
327impl From<JsonValue> for Resource {
328    fn from(value: JsonValue) -> Self {
329        let mut hash_buf = vec![];
330        let _ = bs58::decode(value["hash"].get::<String>().unwrap().as_str()).onto(&mut hash_buf);
331        let mut hash_buf_arr = [0u8; 32];
332        hash_buf_arr.copy_from_slice(&hash_buf);
333        let hash = blake3::Hash::from_bytes(hash_buf_arr);
334
335        let rtype = ResourceType::from_str(value["type"].get::<String>().unwrap()).unwrap();
336        let path = PathBuf::from(value["path"].get::<String>().unwrap());
337        let status = ResourceStatus::from_str(value["status"].get::<String>().unwrap()).unwrap();
338        let file_selection: FileSelection = value["file_selection"].clone().into();
339        let last_file_selection: FileSelection = value["last_file_selection"].clone().into();
340
341        let total_chunks_count = *value["total_chunks_count"].get::<f64>().unwrap() as u64;
342        let target_chunks_count = *value["target_chunks_count"].get::<f64>().unwrap() as u64;
343        let total_chunks_downloaded =
344            *value["total_chunks_downloaded"].get::<f64>().unwrap() as u64;
345        let target_chunks_downloaded =
346            *value["target_chunks_downloaded"].get::<f64>().unwrap() as u64;
347        let total_bytes_size = *value["total_bytes_size"].get::<f64>().unwrap() as u64;
348        let target_bytes_size = *value["target_bytes_size"].get::<f64>().unwrap() as u64;
349        let total_bytes_downloaded = *value["total_bytes_downloaded"].get::<f64>().unwrap() as u64;
350        let target_bytes_downloaded =
351            *value["target_bytes_downloaded"].get::<f64>().unwrap() as u64;
352
353        let speeds = value["speeds"]
354            .get::<Vec<JsonValue>>()
355            .unwrap()
356            .iter()
357            .map(|s| *s.get::<f64>().unwrap())
358            .collect::<Vec<f64>>();
359
360        Resource {
361            hash,
362            rtype,
363            path,
364            status,
365            file_selection,
366            last_file_selection,
367            total_chunks_count,
368            target_chunks_count,
369            total_chunks_downloaded,
370            target_chunks_downloaded,
371            total_bytes_size,
372            target_bytes_size,
373            total_bytes_downloaded,
374            target_bytes_downloaded,
375            speeds,
376        }
377    }
378}