darkfi/event_graph/
util.rs1use std::{
20 collections::HashMap,
21 fs::{self, File, OpenOptions},
22 io::Write,
23 path::Path,
24 time::UNIX_EPOCH,
25};
26
27use darkfi_serial::{deserialize, deserialize_async, serialize};
28use sled_overlay::sled;
29use tinyjson::JsonValue;
30use tracing::error;
31
32use crate::{
33 event_graph::{Event, GENESIS_CONTENTS, INITIAL_GENESIS, NULL_ID, N_EVENT_PARENTS},
34 util::{encoding::base64, file::load_file},
35 Result,
36};
37
38#[cfg(feature = "rpc")]
39use crate::rpc::{
40 jsonrpc::{ErrorCode, JsonError, JsonResponse, JsonResult},
41 util::json_map,
42};
43
44use super::event::Header;
45
46pub(super) const HOUR: i64 = 3_600_000;
48
49pub(super) fn next_hour_timestamp(hours: i64) -> u64 {
52 let now = UNIX_EPOCH.elapsed().unwrap().as_millis() as i64;
54
55 let next_hour = (now / HOUR) * HOUR;
57
58 (next_hour + (HOUR * hours)) as u64
60}
61
62pub(super) fn hours_since(next_hour_ts: u64) -> u64 {
64 let now = UNIX_EPOCH.elapsed().unwrap().as_millis() as u64;
66
67 let elapsed_seconds = now - next_hour_ts;
70
71 elapsed_seconds / HOUR as u64
73}
74
75pub fn next_rotation_timestamp(starting_timestamp: u64, rotation_period: u64) -> u64 {
77 if rotation_period == 0 {
79 panic!("Rotation period cannot be 0");
80 }
81 let hours_passed = hours_since(starting_timestamp);
83
84 let rotations_since_start = hours_passed.div_ceil(rotation_period);
88
89 let hours_until_next_rotation: i64 =
92 (rotations_since_start * rotation_period - hours_passed).try_into().unwrap();
93
94 if hours_until_next_rotation == 0 {
96 return next_hour_timestamp(1)
99 }
100 next_hour_timestamp(hours_until_next_rotation)
101}
102
103pub fn millis_until_next_rotation(next_rotation: u64) -> u64 {
107 let now = UNIX_EPOCH.elapsed().unwrap().as_millis() as u64;
111 if next_rotation < now {
112 panic!("Next rotation timestamp is in the past");
113 }
114 next_rotation - now
115}
116
117pub fn generate_genesis(hours_rotation: u64) -> Event {
119 let timestamp = if hours_rotation == 0 {
121 INITIAL_GENESIS
122 } else {
123 let hours_passed = hours_since(INITIAL_GENESIS);
125
126 let rotations_since_genesis = hours_passed / hours_rotation;
128
129 INITIAL_GENESIS + (rotations_since_genesis * hours_rotation * HOUR as u64)
131 };
132 let header = Header { timestamp, parents: [NULL_ID; N_EVENT_PARENTS], layer: 0 };
133 Event { header, content: GENESIS_CONTENTS.to_vec() }
134}
135
136pub(super) fn replayer_log(datastore: &Path, cmd: String, value: Vec<u8>) -> Result<()> {
137 fs::create_dir_all(datastore)?;
138 let datastore = datastore.join("replayer.log");
139 if !datastore.exists() {
140 File::create(&datastore)?;
141 };
142
143 let mut file = OpenOptions::new().append(true).open(&datastore)?;
144 let v = base64::encode(&value);
145 let f = format!("{cmd} {v}");
146 writeln!(file, "{f}")?;
147
148 Ok(())
149}
150
151#[cfg(feature = "rpc")]
152pub async fn recreate_from_replayer_log(datastore: &Path) -> JsonResult {
153 let log_path = datastore.join("replayer.log");
154 if !log_path.exists() {
155 error!("Error loading replayed log");
156 return JsonResult::Error(JsonError::new(
157 ErrorCode::ParseError,
158 Some("Error loading replayed log".to_string()),
159 1,
160 ))
161 };
162
163 let reader = load_file(&log_path).unwrap();
164
165 let db_datastore = datastore.join("replayed_db");
166
167 let sled_db = sled::open(db_datastore).unwrap();
168 let dag = sled_db.open_tree("replayer").unwrap();
169
170 for line in reader.lines() {
171 let line = line.split(' ').collect::<Vec<&str>>();
172 if line[0] == "insert" {
173 let v = base64::decode(line[1]).unwrap();
174 let v: Event = deserialize(&v).unwrap();
175 let v_se = serialize(&v);
176 dag.insert(v.header.id().as_bytes(), v_se).unwrap();
177 }
178 }
179
180 let mut graph = HashMap::new();
181 for iter_elem in dag.iter() {
182 let (id, val) = iter_elem.unwrap();
183 let id = blake3::Hash::from_bytes((&id as &[u8]).try_into().unwrap());
184 let val: Event = deserialize_async(&val).await.unwrap();
185 graph.insert(id, val);
186 }
187
188 let json_graph = graph
189 .into_iter()
190 .map(|(k, v)| {
191 let key = k.to_string();
192 let value = JsonValue::from(v);
193 (key, value)
194 })
195 .collect();
196 let values = json_map([("dag", JsonValue::Object(json_graph))]);
197 let result = JsonValue::Object(HashMap::from([("eventgraph_info".to_string(), values)]));
198
199 JsonResponse::new(result, 1).into()
200}
201
202#[cfg(test)]
203mod tests {
204 use super::*;
205
206 #[test]
207 fn test_hours_since() {
208 let five_hours_ago = next_hour_timestamp(-5);
209 assert_eq!(hours_since(five_hours_ago), 5);
210
211 let this_hour = next_hour_timestamp(0);
212 assert_eq!(hours_since(this_hour), 0);
213 }
214
215 #[test]
216 fn test_next_rotation_timestamp() {
217 let starting_point = next_hour_timestamp(-10);
218 let rotation_period = 7;
219
220 let expected = next_hour_timestamp(4);
223 assert_eq!(next_rotation_timestamp(starting_point, rotation_period), expected);
224
225 let this_hour: u64 = next_hour_timestamp(0);
229 let next_hour = this_hour + 3_600_000u64; assert_eq!(next_hour, next_rotation_timestamp(this_hour, 1));
231 }
232
233 #[test]
234 #[should_panic]
235 fn test_next_rotation_timestamp_panics_on_overflow() {
236 next_rotation_timestamp(0, u64::MAX);
237 }
238
239 #[test]
240 #[should_panic]
241 fn test_next_rotation_timestamp_panics_on_division_by_zero() {
242 next_rotation_timestamp(0, 0);
243 }
244
245 #[test]
246 fn test_millis_until_next_rotation_is_within_rotation_interval() {
247 let hours_rotation = 1u64;
248 let rotation_interval = hours_rotation * 3_600_000u64;
250 let next_rotation_timestamp = next_rotation_timestamp(INITIAL_GENESIS, hours_rotation);
251 let s = millis_until_next_rotation(next_rotation_timestamp);
252 assert!(s < rotation_interval);
253 }
254}