From 2511939f4ee3c9eda29851ae74a1c5ac14323290 Mon Sep 17 00:00:00 2001 From: Avril Date: Tue, 22 Dec 2020 13:51:09 +0000 Subject: [PATCH] improve removeal code --- src/data/entry.rs | 6 ++ src/data/mutation.rs | 137 ++++++++++++++++++------------------------- 2 files changed, 64 insertions(+), 79 deletions(-) diff --git a/src/data/entry.rs b/src/data/entry.rs index efd1221..60759a8 100644 --- a/src/data/entry.rs +++ b/src/data/entry.rs @@ -59,6 +59,12 @@ impl Entry ..self } } + + /// Clone the data needed for a refresh of this entry in a store before it is mutated. + #[inline(always)] pub(super) fn prepare_for_refresh(&self) -> (EntryKey, Box<[String]>) + { + (self.hash().clone(), self.tags.clone().into_boxed_slice()) + } } impl Borrow for Entry diff --git a/src/data/mutation.rs b/src/data/mutation.rs index 221b182..7ca58c6 100644 --- a/src/data/mutation.rs +++ b/src/data/mutation.rs @@ -1,5 +1,6 @@ //! Handling store mutation use super::*; +use std::borrow::Borrow; impl Store { @@ -10,16 +11,8 @@ impl Store let hash_idx = self.data_hashes.insert(*ent.hash()); for tag in ent.tags.iter() { - if let Some(&ti) = self.tags.get(tag) { - // This tag has an entry already, append to it - self.tag_mappings.get_mut(ti).unwrap().insert(hash_idx); - } else { - // This tag has no entry, create it - let ti = self.tag_mappings.insert(iter![hash_idx].collect()); - self.tags.insert(tag.clone(), ti); - } + self.insert_tag_for_idx(tag, hash_idx); } - self.data.insert(ent); old } @@ -39,51 +32,62 @@ impl Store where F: FnOnce(&mut Entry) -> T { if let Some(mut ent) = self.data.take(ent_id) { - let ohash = ent.hash().clone(); - let otags = ent.tags.clone(); + let update = ent.prepare_for_refresh(); + let out = f(&mut ent); let new = ent; - let iidx = if new.hash() != &ohash { - // We need to update `data_hashes`. - for (_, hash) in self.data_hashes.iter_mut() - { - if hash == &ohash { - *hash = *new.hash(); - break; - } - } - self.reverse_index_lookup(new.hash()).unwrap() - } else { - self.reverse_index_lookup(&ohash).unwrap() - }; + self.refresh_for_entry(update, &new); - if &new.tags[..] != &otags[..] { - // We need to update tag mappings - - - let ntags: HashSet<_> = new.tags.iter().collect(); - let otags: HashSet<_> = otags.iter().collect(); - // Find the ones that were removed and added in parallel. - for (t, u) in ntags.iter().zip(otags.iter()) - { - if !otags.contains(t) { - // It was added - self.insert_tag_for_idx(t, iidx); - } - if !ntags.contains(u) { - // It was removed - self.remove_tag_for_idx(t, iidx); - } - } - - } self.data.insert(new); Some(out) } else { None } } + + /// Update an entry that may have been modified. + /// + /// The entries old hash and tags are passed, and it updates the store to reflect the new entry mutation. + /// You must still insert the new entry after this. + fn refresh_for_entry(&mut self, (ohash, otags): (impl Borrow, impl AsRef<[String]>), new: &Entry) + { + let ohash = ohash.borrow(); + let iidx = if new.hash() != ohash { + // We need to update `data_hashes`. + for (_, hash) in self.data_hashes.iter_mut() + { + if hash == ohash { + *hash = *new.hash(); + break; + } + } + self.reverse_index_lookup(new.hash()).unwrap() + } else { + self.reverse_index_lookup(ohash).unwrap() + }; + + let otags =otags.as_ref(); + if &new.tags[..] != &otags[..] { + // We need to update tag mappings + + let ntags: HashSet<_> = new.tags.iter().collect(); + let otags: HashSet<_> = otags.iter().collect(); + // Find the ones that were removed and added in parallel. + for (t, u) in ntags.iter().zip(otags.iter()) + { + if !otags.contains(t) { + // It was added + self.insert_tag_for_idx(t, iidx); + } + if !ntags.contains(u) { + // It was removed + self.remove_tag_for_idx(t, iidx); + } + } + + } + } /// Map the entry with this function, updating references to it if needed. /// @@ -92,18 +96,11 @@ impl Store where F: FnOnce(Entry) -> Entry { if let Some(ent) = self.data.take(ent_id) { - let ohash = ent.hash().clone(); + let update = ent.prepare_for_refresh(); let new = f(ent); - if new.hash() != &ohash { - // We need to update `data_hashes`. - for (_, hash) in self.data_hashes.iter_mut() - { - if hash == &ohash { - *hash = *new.hash(); - break; - } - } - } + + self.refresh_for_entry(update, &new); + self.data.insert(new); } } @@ -113,7 +110,7 @@ impl Store pub fn remove(&mut self, key: &EntryKey) -> Option { if let Some(entry) = self.data.take(key) { - Some(self.cleanup_remove_entry(entry)) + Some(self.cleanup_remove_entry(entry.with_no_cache())) } else { None } @@ -122,37 +119,19 @@ impl Store /// Preform cleanup on an entry *already removed* from `data`. fn cleanup_remove_entry(&mut self, ent: Entry) -> Entry { - let ent = ent.with_no_cache(); // Remove any unused tags - for (nm, ti) in precollect!(self.tag_index_lookup(&ent.tags[..]).map(|(nm, idx)| ({ - ent.tags.iter().filter(|y| y.as_str() == nm).next().unwrap() // swap the `nm` reference to the owned reference in `ent`'s tags... There should be a better way that this eh - }, idx))) { - if self.purge_tag_index(ti, ent.hash()) { - // No more mappings, remove this tag - self.tags.remove(nm); - // And its mapping - self.tag_mappings.remove(ti); + if let Some(hash_idx) = self.reverse_index_lookup(ent.hash()) { + for tag in ent.tags.iter() + { + self.remove_tag_for_idx(tag, hash_idx); } } + // Remove from data hashes can be deferred self.purge_if_needed(); ent } - /// Purge this tag index from the mappings for the entry `to_remove`. - /// Returns true if there are no more references to this tag and it can be removed. - #[inline] fn purge_tag_index(&mut self, idx: ArenaIndex, to_remove: &EntryKey) -> bool - { - let data_hashes = &mut self.data_hashes; - if let Some(map) = self.tag_mappings.get_mut(idx) { - map.retain(move |&hash_idx| data_hashes.get(hash_idx).map(|x| x != to_remove).unwrap_or(false)); - map.len() == 0 - } else { - // There is no reference in the tag mapping itself. - false - } - } - /// Remove dead mappings from `data_hashes` to `data`. #[inline] fn purge_data_hash_mappings(&mut self) {