From a238c7411f042955b6062b05c6a6bd24d2f682ed Mon Sep 17 00:00:00 2001 From: ppom Date: Sun, 16 Feb 2025 12:00:00 +0100 Subject: [PATCH] Transform SledTreeExt into a struct that wraps sled::Tree This permits to have a stronger type system, as the type of a Tree is then decided at construction, not for every function call. This also makes the code clearer, with much less type annotations. --- src/daemon/filter.rs | 51 +++++++++++--------- src/daemon/sledext.rs | 106 ++++++++++++++++++++++++------------------ 2 files changed, 88 insertions(+), 69 deletions(-) diff --git a/src/daemon/filter.rs b/src/daemon/filter.rs index de46e95..4e7963d 100644 --- a/src/daemon/filter.rs +++ b/src/daemon/filter.rs @@ -14,20 +14,25 @@ use crate::{ protocol::{Order, PatternStatus}, }; -use super::{shutdown::ShutdownToken, SledDbExt, SledTreeExt}; +use super::{shutdown::ShutdownToken, SledDbExt, Tree}; #[derive(Clone)] pub struct FilterManager { + /// the Filter managed filter: &'static Filter, + /// Has the filter at least an action with an after directive? has_after: bool, + /// Permits to limit concurrency of actions execution exec_limit: Option>, + /// Permits to run pending actions on shutdown shutdown: ShutdownToken, - /// sled::Tree equivalent to BTreeMap> - matches: sled::Tree, - /// sled::Tree equivalent to BTreeMap - ordered_times: sled::Tree, - /// sled::Tree equivalent to BTreeMap> - triggers: sled::Tree, + /// Saves all the current Matches for this Filter + matches: Tree>, + /// Alternative view of the current Matches for O(1) cleaning of old Matches + /// without added async Tasks to remove them + ordered_times: Tree, + /// Saves all the current Triggers for this Filter + triggers: Tree>, } #[allow(clippy::unwrap_used)] @@ -96,7 +101,7 @@ impl FilterManager { let mut cs: BTreeMap<_, _> = self .matches - .iter_ext::>() + .iter() // match filtering .filter(|(match_, _)| is_match(match_)) .map(|(match_, times)| { @@ -116,7 +121,7 @@ impl FilterManager { let now = Local::now(); for (match_, times) in self .triggers - .iter_ext::>() + .iter() // match filtering .filter(|(match_, _)| is_match(match_)) { @@ -189,13 +194,13 @@ impl FilterManager { fn add_match(&self, m: &Match, t: Time) { // FIXME do this in a transaction self.matches - .fetch_and_update_ext::, _>(m, |set| { + .fetch_and_update(m, |set| { let mut set = set.unwrap_or_default(); set.insert(t); Some(set) }) .unwrap(); - self.ordered_times.insert_ext::(&t, m).unwrap(); + self.ordered_times.insert(&t, m).unwrap(); } fn add_trigger(&self, m: &Match, t: Time) { @@ -203,7 +208,7 @@ impl FilterManager { if self.has_after { // Add the (Match, Time) to the triggers map self.triggers - .fetch_and_update_ext::, _>(m, |map| { + .fetch_and_update(m, |map| { let mut map = map.unwrap_or_default(); map.insert(t, self.filter.actions().len()); Some(map) @@ -215,9 +220,9 @@ impl FilterManager { // Completely remove a Match from the matches fn remove_match(&self, m: &Match) { // FIXME do this in a transaction - if let Some(times) = self.matches.remove_ext::>(m) { + if let Some(times) = self.matches.remove(m) { for t in times { - self.ordered_times.remove_ext::(&t); + self.ordered_times.remove(&t); } } } @@ -225,7 +230,7 @@ impl FilterManager { /// Completely remove a Match from the triggers fn remove_trigger(&self, m: &Match) { // FIXME do this in a transaction - self.triggers.remove_ext::>(m); + self.triggers.remove(m); } /// Returns whether we should still execute an action for this (Match, Time) trigger @@ -234,7 +239,7 @@ impl FilterManager { if self.has_after { let mut exec_needed = false; self.triggers - .fetch_and_update_ext::, _>(&m, |map| { + .fetch_and_update(&m, |map| { map.map(|mut map| { if let Some(counter) = map.get(&t) { exec_needed = true; @@ -261,7 +266,7 @@ impl FilterManager { let retry_duration = self.filter.retry_duration().unwrap_or_default(); while self .ordered_times - .first_ext::() + .first() .unwrap() .is_some_and(|(t, _)| t + retry_duration < now) { @@ -270,11 +275,11 @@ impl FilterManager { // second unwrap: we just checked in the condition that first is_some let (t, m) = self .ordered_times - .pop_min_ext::() + .pop_min() .unwrap() .unwrap(); self.matches - .fetch_and_update_ext::, _>(&m, |set| { + .fetch_and_update(&m, |set| { let mut set = set.unwrap(); set.remove(&t); Some(set) @@ -285,7 +290,7 @@ impl FilterManager { fn get_times(&self, m: &Match) -> usize { self.matches - .get_ext::>(m) + .get(m) .unwrap() .map(|v| v.len()) .unwrap_or(0) @@ -295,7 +300,7 @@ impl FilterManager { let longuest_action_duration = self.filter.longuest_action_duration(); let number_of_actions = self.filter.actions().len(); - for (m, map) in self.triggers.iter_ext::>() { + for (m, map) in self.triggers.iter() { let new_map: BTreeMap<_, _> = map .into_iter() // Keep only times that are still relevant @@ -306,12 +311,12 @@ impl FilterManager { if new_map.is_empty() { // No upcoming time, delete the entry from the Tree - self.triggers.remove_ext::>(&m); + self.triggers.remove(&m); } else { // Insert back the upcoming times let _ = self .triggers - .insert_ext::>(&m, &new_map); + .insert(&m, &new_map); // Schedule the upcoming times for t in new_map.into_keys() { diff --git a/src/daemon/sledext.rs b/src/daemon/sledext.rs index 5726109..15a5b5f 100644 --- a/src/daemon/sledext.rs +++ b/src/daemon/sledext.rs @@ -1,19 +1,26 @@ -use std::collections::BTreeSet; +use std::{ + collections::{BTreeMap, BTreeSet}, + marker::PhantomData, +}; use serde::{de::DeserializeOwned, Serialize}; use sled::Result; -use crate::concepts::{Config, Filter}; +use crate::concepts::{Config, Filter, Match, Time}; -/// This trait permits to streamline how to open the correct [`Tree`]s -/// so that we can reliably open the same Trees in multiple places -/// -/// [`Tree`]: https://docs.rs/sled/latest/sled/struct.Tree.html +/// This trait permits to manage in a single place what are the names of [`sled::Tree`]s in +/// reaction. It streamlines [`sled::Tree`]s opening so that we can reliably open the same Trees in +/// multiple places. +/// It also permits to manage the cleanup of unused trees. pub trait SledDbExt { - fn open_filter_matches_tree(&self, filter: &Filter) -> Result; - fn open_filter_ordered_times_tree(&self, filter: &Filter) -> Result; - fn open_filter_triggers_tree(&self, filter: &Filter) -> Result; + fn open_filter_matches_tree(&self, filter: &Filter) -> Result>>; + fn open_filter_ordered_times_tree(&self, filter: &Filter) -> Result>; + fn open_filter_triggers_tree( + &self, + filter: &Filter, + ) -> Result>>; + fn cleanup_unused_trees(&self, config: &Config); } @@ -34,14 +41,22 @@ fn filter_triggers_tree_name(filter: &Filter) -> String { } impl SledDbExt for sled::Db { - fn open_filter_matches_tree(&self, filter: &Filter) -> Result { + fn open_filter_matches_tree(&self, filter: &Filter) -> Result>> { self.open_tree(filter_matches_tree_name(filter).as_bytes()) + .map(|tree| Tree::new(tree)) } - fn open_filter_ordered_times_tree(&self, filter: &Filter) -> Result { + + fn open_filter_ordered_times_tree(&self, filter: &Filter) -> Result> { self.open_tree(filter_ordered_times_tree_name(filter).as_bytes()) + .map(|tree| Tree::new(tree)) } - fn open_filter_triggers_tree(&self, filter: &Filter) -> Result { + + fn open_filter_triggers_tree( + &self, + filter: &Filter, + ) -> Result>> { self.open_tree(filter_triggers_tree_name(filter).as_bytes()) + .map(|tree| Tree::new(tree)) } fn cleanup_unused_trees(&self, config: &Config) { @@ -75,44 +90,41 @@ impl SledDbExt for sled::Db { } } -/// This trait just permits to have less verbose typing in the next trait -pub trait KV: Serialize + DeserializeOwned {} -impl KV for T where T: Serialize + DeserializeOwned {} - -/// This trait permits to have typed Trees and avoid handling the de/serialization in business logic. -/// key and value types must be [`Serialize`] and [`Deserialize`]. -/// -/// [`Serialize`]: https://docs.rs/serde/latest/serde/trait.Serialize.html -/// [`Deserialize`]: https://docs.rs/serde/latest/serde/trait.Deserialize.html -pub trait SledTreeExt { - fn get_ext(&self, k: &K) -> Result>; - fn remove_ext(&self, k: &K) -> Option; - fn first_ext(&self) -> Result>; - fn pop_min_ext(&self) -> Result>; - fn fetch_and_update_ext(&self, k: &K, f: F) -> Result> - where - F: FnMut(Option) -> Option; - fn insert_ext(&self, k: &K, v: &V) -> Result>; - #[allow(clippy::needless_lifetimes)] // I find this clearer with 2 lifetimes - fn iter_ext<'a, 'b, K: KV, V: KV>(&'a self) -> impl Iterator + 'b; +/// This [`sled::Tree`] wrapper permits to have typed Trees and avoid handling the de/serialization in +/// business logic. +/// Key and value types must be [`serde::Serialize`] and [`serde::Deserialize`]. +#[derive(Clone)] +pub struct Tree { + tree: sled::Tree, + _k_marker: PhantomData, + _v_marker: PhantomData, } #[allow(clippy::unwrap_used)] -impl SledTreeExt for sled::Tree { - fn get_ext(&self, k: &K) -> Result> { - let k = bincode::serialize(k).unwrap(); - Ok(self.get(k)?.map(|v| bincode::deserialize(&v).unwrap())) +impl Tree { + fn new(tree: sled::Tree) -> Self { + Self { + tree, + _k_marker: PhantomData::, + _v_marker: PhantomData::, + } } - fn remove_ext(&self, key: &K) -> Option { + pub fn get(&self, k: &K) -> Result> { + let k = bincode::serialize(k).unwrap(); + Ok(self.tree.get(k)?.map(|v| bincode::deserialize(&v).unwrap())) + } + + pub fn remove(&self, key: &K) -> Option { let key = bincode::serialize(key).unwrap(); - self.remove(key) + self.tree + .remove(key) .unwrap() .map(|value| bincode::deserialize(&value).unwrap()) } - fn first_ext(&self) -> Result> { - let option = self.first()?; + pub fn first(&self) -> Result> { + let option = self.tree.first()?; match option { None => Ok(None), Some((k, v)) => { @@ -123,8 +135,8 @@ impl SledTreeExt for sled::Tree { } } - fn pop_min_ext(&self) -> Result> { - let option = self.pop_min()?; + pub fn pop_min(&self) -> Result> { + let option = self.tree.pop_min()?; match option { None => Ok(None), Some((k, v)) => { @@ -135,7 +147,7 @@ impl SledTreeExt for sled::Tree { } } - fn fetch_and_update_ext(&self, k: &K, mut f: F) -> Result> + pub fn fetch_and_update(&self, k: &K, mut f: F) -> Result> where F: FnMut(Option) -> Option, { @@ -145,14 +157,16 @@ impl SledTreeExt for sled::Tree { f(v).map(|v| bincode::serialize(&v).unwrap()) }; Ok(self + .tree .fetch_and_update(k, f)? .map(|v| bincode::deserialize::(&v).unwrap())) } - fn insert_ext(&self, k: &K, v: &V) -> Result> { + pub fn insert(&self, k: &K, v: &V) -> Result> { let k = bincode::serialize(k).unwrap(); let v = bincode::serialize(v).unwrap(); Ok(self + .tree .insert(k, v)? .map(|v| bincode::deserialize(&v).unwrap())) } @@ -160,8 +174,8 @@ impl SledTreeExt for sled::Tree { // The lifetime annotations permit to decouple the lifetime of self // from the limetime of the Iterator #[allow(clippy::needless_lifetimes)] // I find this clearer with 2 lifetimes - fn iter_ext<'a, 'b, K: KV, V: KV>(&'a self) -> impl Iterator + 'b { - self.iter().map(|elt| { + pub fn iter<'a, 'b>(&'a self) -> impl Iterator + 'b { + self.tree.iter().map(|elt| { let (k, v) = elt.unwrap(); let k: K = bincode::deserialize(&k).unwrap(); let v: V = bincode::deserialize(&v).unwrap();