parent
a1c9f4cd39
commit
dd4e231048
@ -0,0 +1,204 @@
|
|||||||
|
//! Handling store mutation
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
impl Store
|
||||||
|
{
|
||||||
|
/// Insert this entry into the data table, overwriting any identically hashed one and returning it.
|
||||||
|
pub fn insert_overwrite(&mut self, ent: Entry) -> Option<Entry>
|
||||||
|
{
|
||||||
|
let old = self.remove(ent.hash());
|
||||||
|
|
||||||
|
let hash_idx = self.data_hashes.insert(*ent.hash());
|
||||||
|
for tag in ent.tags.iter() {
|
||||||
|
if let Some(&ti) = self.tags.get(tag) {
|
||||||
|
// This tag has an entry already, append to it
|
||||||
|
self.tag_mappings.get_mut(ti).unwrap().insert(hash_idx);
|
||||||
|
} else {
|
||||||
|
// This tag has no entry, create it
|
||||||
|
let ti = self.tag_mappings.insert(iter![hash_idx].collect());
|
||||||
|
self.tags.insert(tag.clone(), ti);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.data.insert(ent);
|
||||||
|
old
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Insert this entry then return a reference to it.
|
||||||
|
pub fn insert(&mut self, ent: Entry) -> &Entry
|
||||||
|
{
|
||||||
|
let ffd = *ent.hash();
|
||||||
|
self.insert_overwrite(ent);
|
||||||
|
self.data.get(&ffd).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Mutate this entry in place if it exists.
|
||||||
|
///
|
||||||
|
/// See [`map_entry`].
|
||||||
|
pub fn mutate_entry_in_place<T, F>(&mut self, ent_id: &EntryKey, f: F) -> Option<T>
|
||||||
|
where F: FnOnce(&mut Entry) -> T
|
||||||
|
{
|
||||||
|
if let Some(mut ent) = self.data.take(ent_id) {
|
||||||
|
let ohash = ent.hash().clone();
|
||||||
|
let otags = ent.tags.clone();
|
||||||
|
let out = f(&mut ent);
|
||||||
|
let new = ent;
|
||||||
|
|
||||||
|
let iidx = if new.hash() != &ohash {
|
||||||
|
// We need to update `data_hashes`.
|
||||||
|
for (_, hash) in self.data_hashes.iter_mut()
|
||||||
|
{
|
||||||
|
if hash == &ohash {
|
||||||
|
*hash = *new.hash();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.reverse_index_lookup(new.hash()).unwrap()
|
||||||
|
} else {
|
||||||
|
self.reverse_index_lookup(&ohash).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
if &new.tags[..] != &otags[..] {
|
||||||
|
// We need to update tag mappings
|
||||||
|
|
||||||
|
|
||||||
|
let ntags: HashSet<_> = new.tags.iter().collect();
|
||||||
|
let otags: HashSet<_> = otags.iter().collect();
|
||||||
|
// Find the ones that were removed and added in parallel.
|
||||||
|
for (t, u) in ntags.iter().zip(otags.iter())
|
||||||
|
{
|
||||||
|
if !otags.contains(t) {
|
||||||
|
// It was added
|
||||||
|
self.insert_tag_for_idx(t, iidx);
|
||||||
|
}
|
||||||
|
if !ntags.contains(u) {
|
||||||
|
// It was removed
|
||||||
|
self.remove_tag_for_idx(t, iidx);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
self.data.insert(new);
|
||||||
|
Some(out)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Map the entry with this function, updating references to it if needed.
|
||||||
|
///
|
||||||
|
/// If the hash of the entry if modified by this map, then the hashes indecies are updated to the new hash.
|
||||||
|
pub fn map_entry<F>(&mut self, ent_id: &EntryKey, f: F)
|
||||||
|
where F: FnOnce(Entry) -> Entry
|
||||||
|
{
|
||||||
|
if let Some(ent) = self.data.take(ent_id) {
|
||||||
|
let ohash = ent.hash().clone();
|
||||||
|
let new = f(ent);
|
||||||
|
if new.hash() != &ohash {
|
||||||
|
// We need to update `data_hashes`.
|
||||||
|
for (_, hash) in self.data_hashes.iter_mut()
|
||||||
|
{
|
||||||
|
if hash == &ohash {
|
||||||
|
*hash = *new.hash();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.data.insert(new);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/// Remove this entry, and return it, if it was set.
|
||||||
|
pub fn remove(&mut self, key: &EntryKey) -> Option<Entry>
|
||||||
|
{
|
||||||
|
if let Some(entry) = self.data.take(key) {
|
||||||
|
Some(self.cleanup_remove_entry(entry))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Preform cleanup on an entry *already removed* from `data`.
|
||||||
|
fn cleanup_remove_entry(&mut self, ent: Entry) -> Entry
|
||||||
|
{
|
||||||
|
let ent = ent.with_no_cache();
|
||||||
|
// Remove any unused tags
|
||||||
|
for (nm, ti) in precollect!(self.tag_index_lookup(&ent.tags[..]).map(|(nm, idx)| ({
|
||||||
|
ent.tags.iter().filter(|y| y.as_str() == nm).next().unwrap() // swap the `nm` reference to the owned reference in `ent`'s tags... There should be a better way that this eh
|
||||||
|
}, idx))) {
|
||||||
|
if self.purge_tag_index(ti, ent.hash()) {
|
||||||
|
// No more mappings, remove this tag
|
||||||
|
self.tags.remove(nm);
|
||||||
|
// And its mapping
|
||||||
|
self.tag_mappings.remove(ti);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Remove from data hashes can be deferred
|
||||||
|
self.purge_if_needed();
|
||||||
|
ent
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Purge this tag index from the mappings for the entry `to_remove`.
|
||||||
|
/// Returns true if there are no more references to this tag and it can be removed.
|
||||||
|
#[inline] fn purge_tag_index(&mut self, idx: ArenaIndex, to_remove: &EntryKey) -> bool
|
||||||
|
{
|
||||||
|
let data_hashes = &mut self.data_hashes;
|
||||||
|
if let Some(map) = self.tag_mappings.get_mut(idx) {
|
||||||
|
map.retain(move |&hash_idx| data_hashes.get(hash_idx).map(|x| x != to_remove).unwrap_or(false));
|
||||||
|
map.len() == 0
|
||||||
|
} else {
|
||||||
|
// There is no reference in the tag mapping itself.
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove dead mappings from `data_hashes` to `data`.
|
||||||
|
#[inline] fn purge_data_hash_mappings(&mut self)
|
||||||
|
{
|
||||||
|
let data = &self.data;
|
||||||
|
self.data_hashes.retain(move |_, hash| data.get(hash).is_some());
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Purge the arena mapping if threshold of dead entries is reached, otherwise defer it.
|
||||||
|
#[inline] fn purge_if_needed(&mut self)
|
||||||
|
{
|
||||||
|
if self.purge_track.should_purge() {
|
||||||
|
self.purge_data_hash_mappings();
|
||||||
|
self.purge_track.num_removals = 0;
|
||||||
|
} else {
|
||||||
|
self.purge_track.num_removals += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tag specific stuff
|
||||||
|
impl Store
|
||||||
|
{
|
||||||
|
/// Remove a mapping for this tag string to this specific hash index, cleaning up the tag mappings if needed.
|
||||||
|
#[inline] fn remove_tag_for_idx(&mut self, tag: impl AsRef<str>, hash_idx: ArenaIndex)
|
||||||
|
{
|
||||||
|
let tag = tag.as_ref();
|
||||||
|
if let Some(&ti) = self.tags.get(tag) {
|
||||||
|
match self.tag_mappings.get_mut(ti).map(|x| {x.remove(&hash_idx); x.len()}) {
|
||||||
|
Some(0) => no_op!(self.tag_mappings.remove(ti)), // there is only 1 mapping, remove it and then remove the tag (TODO: Should we keep the tag in the btree as cache? TODO: Add this to `PurgeTrack`)
|
||||||
|
None => (), // there is no mapping, just remove the tag
|
||||||
|
_ => return, //don't remove the tag, there's other references in the mapping
|
||||||
|
}
|
||||||
|
self.tags.remove(tag);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/// Insert a mapping for this tag string to this single hash index, creating it if needed
|
||||||
|
#[inline] fn insert_tag_for_idx(&mut self, tag: impl AsRef<str>, hash_idx: ArenaIndex)
|
||||||
|
{
|
||||||
|
let tag = tag.as_ref();
|
||||||
|
if let Some(&ti) = self.tags.get(tag) {
|
||||||
|
// This tag has an entry already, append to it
|
||||||
|
self.tag_mappings.get_mut(ti).unwrap().insert(hash_idx);
|
||||||
|
} else {
|
||||||
|
// This tag has no entry, create it
|
||||||
|
let ti = self.tag_mappings.insert(iter![hash_idx].collect());
|
||||||
|
self.tags.insert(tag.to_owned(), ti);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in new issue