diff --git a/Cargo.toml b/Cargo.toml index a9faf3d..ccfb980 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "cht" -version = "0.1.1" +version = "0.1.2" authors = ["Gregory Meyer "] edition = "2018" description = "Lockfree resizeable concurrent hash table." diff --git a/src/lib.rs b/src/lib.rs index 9b139ce..1fc99ee 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -32,7 +32,6 @@ //! [Junction]: https://github.com/preshing/junction pub mod map; -mod set; // it's not ready yet pub use map::HashMap; diff --git a/src/map.rs b/src/map.rs index 43f0434..93d15b2 100644 --- a/src/map.rs +++ b/src/map.rs @@ -27,6 +27,7 @@ use std::{ borrow::Borrow, + collections::HashSet, hash::{BuildHasher, Hash, Hasher}, mem, sync::{ @@ -35,7 +36,7 @@ use std::{ }, }; -use crossbeam_epoch::{self, Atomic, Guard, Owned, Shared}; +use crossbeam_epoch::{self, Atomic, Guard, Owned, Pointer, Shared}; use fxhash::FxBuildHasher; /// A lockfree concurrent hash map implemented with open addressing and linear @@ -486,15 +487,10 @@ impl<'g, K: Hash + Eq, V, S: 'g + BuildHasher> HashMap { } let buckets = unsafe { buckets_ptr.deref() }; - let BucketAndParentPtr { - bucket_ptr: found_bucket_ptr, - parent_ptr: new_buckets_ptr, - } = buckets.get(key, hash, guard); - - if !new_buckets_ptr.is_null() { - self.buckets - .compare_and_set(buckets_ptr, new_buckets_ptr, Ordering::SeqCst, guard) - .ok(); + let (found_bucket_ptr, redirected) = buckets.get(key, hash, guard); + + if redirected { + self.try_swing_bucket_array_ptr(buckets_ptr, guard); } unsafe { found_bucket_ptr.as_ref() } @@ -513,10 +509,7 @@ impl<'g, K: Hash + Eq, V, S: 'g + BuildHasher> HashMap { }) .into_shared(guard); - let BucketAndParentPtr { - bucket_ptr: previous_bucket_ptr, - parent_ptr: new_buckets_ptr, - } = buckets.insert(new_bucket, hash, guard); + let (previous_bucket_ptr, redirected) = buckets.insert(new_bucket, hash, guard); if let Some(previous_bucket) = unsafe { previous_bucket_ptr.as_ref() } { if previous_bucket.maybe_value.is_none() { @@ -526,10 +519,12 @@ impl<'g, K: Hash + Eq, V, S: 'g + BuildHasher> HashMap { self.len.fetch_add(1, Ordering::SeqCst); } - if !new_buckets_ptr.is_null() { - self.buckets - .compare_and_set(buckets_ptr, new_buckets_ptr, Ordering::SeqCst, guard) - .ok(); + if !previous_bucket_ptr.is_null() { + unsafe { guard.defer_destroy(previous_bucket_ptr) }; + } + + if redirected { + self.try_swing_bucket_array_ptr(buckets_ptr, guard); } unsafe { previous_bucket_ptr.as_ref() } @@ -552,11 +547,16 @@ impl<'g, K: Hash + Eq, V, S: 'g + BuildHasher> HashMap { let buckets_ref = unsafe { buckets_ptr.deref() }; let hash = self.get_hash(key); - unsafe { buckets_ref.remove(key, hash, None, guard).as_ref() }.map(|b| { + let removed_ptr = buckets_ref.remove(key, hash, None, guard); + + if !removed_ptr.is_null() { + unsafe { guard.defer_destroy(removed_ptr) }; self.len.fetch_sub(1, Ordering::SeqCst); - b - }) + Some(unsafe { removed_ptr.deref() }) + } else { + None + } } fn get_hash(&self, key: &Q) -> u64 { @@ -583,7 +583,7 @@ impl<'g, K: Hash + Eq, V, S: 'g + BuildHasher> HashMap { }; match self.buckets.compare_and_set_weak( - buckets_ptr, + Shared::null(), new_buckets, Ordering::SeqCst, guard, @@ -599,6 +599,62 @@ impl<'g, K: Hash + Eq, V, S: 'g + BuildHasher> HashMap { } } } + + fn try_swing_bucket_array_ptr( + &self, + current_ptr: Shared<'g, BucketArray>, + guard: &'g Guard, + ) { + assert!(!current_ptr.is_null()); + + let current = unsafe { current_ptr.deref() }; + let next_array_ptr = current.next_array.load(Ordering::SeqCst, guard); + assert!(!next_array_ptr.is_null()); + + if self + .buckets + .compare_and_set_weak(current_ptr, next_array_ptr, Ordering::SeqCst, guard) + .is_ok() + { + unsafe { + guard.defer_destroy(current_ptr); + } + } + } +} + +impl Drop for HashMap { + fn drop(&mut self) { + let guard = unsafe { crossbeam_epoch::unprotected() }; + + let mut buckets_ptr = self.buckets.swap(Shared::null(), Ordering::SeqCst, guard); + let mut freed_buckets = HashSet::with_hasher(FxBuildHasher::default()); + + while !buckets_ptr.is_null() { + let this_bucket_array = unsafe { buckets_ptr.deref() }; + let new_buckets_ptr = + this_bucket_array + .next_array + .swap(Shared::null(), Ordering::SeqCst, guard); + + for this_bucket in this_bucket_array.buckets.iter() { + let this_bucket_ptr = this_bucket + .swap(Shared::null(), Ordering::SeqCst, guard) + .with_tag(0); + + if this_bucket_ptr.is_null() { + continue; + } + + if freed_buckets.insert(this_bucket_ptr.into_usize()) { + mem::drop(unsafe { this_bucket_ptr.into_owned() }); + } + } + + mem::drop(unsafe { buckets_ptr.into_owned() }); + buckets_ptr = new_buckets_ptr; + } + } } struct BucketArray { @@ -634,7 +690,7 @@ impl<'g, K: Hash + Eq, V, S: BuildHasher> BucketArray { key: &Q, hash: u64, guard: &'g Guard, - ) -> BucketAndParentPtr<'g, K, V, S> + ) -> (Shared<'g, Bucket>, bool) where K: Borrow, { @@ -650,7 +706,7 @@ impl<'g, K: Hash + Eq, V, S: BuildHasher> BucketArray { if this_bucket_ref.key.borrow() != key { continue; } else if this_bucket_ptr.tag() != REDIRECT_TAG { - return BucketAndParentPtr::without_parent(this_bucket_ptr); + return (this_bucket_ptr, false); } let next_array_ptr = self.next_array.load(Ordering::SeqCst, guard); @@ -658,15 +714,16 @@ impl<'g, K: Hash + Eq, V, S: BuildHasher> BucketArray { let next_array = unsafe { next_array_ptr.deref() }; self.grow_into(next_array, guard); - return next_array - .get(key, hash, guard) - .parent_ptr_or(next_array_ptr); + let mut result = next_array.get(key, hash, guard); + result.1 = true; + + return result; } else { - return BucketAndParentPtr::null(); + return (Shared::null(), false); } } - BucketAndParentPtr::null() + (Shared::null(), false) } fn insert( @@ -674,20 +731,21 @@ impl<'g, K: Hash + Eq, V, S: BuildHasher> BucketArray { bucket_ptr: Shared<'g, Bucket>, hash: u64, guard: &'g Guard, - ) -> BucketAndParentPtr<'g, K, V, S> { + ) -> (Shared<'g, Bucket>, bool) { assert!(!bucket_ptr.is_null()); let bucket = unsafe { bucket_ptr.deref() }; let capacity = self.buckets.len(); let len = self.len.load(Ordering::SeqCst); - let insert_into = |next_array_ptr: Shared<'g, BucketArray>| { + let insert_into = |next_array_ptr: Shared<'_, BucketArray>| { assert!(!next_array_ptr.is_null()); let next_array = unsafe { next_array_ptr.deref() }; - next_array - .insert(bucket_ptr, hash, guard) - .parent_ptr_or(next_array_ptr) + let mut result = next_array.insert(bucket_ptr, hash, guard); + result.1 = true; + + result }; // grow if inserting would push us over a load factor of 0.5 @@ -747,12 +805,9 @@ impl<'g, K: Hash + Eq, V, S: BuildHasher> BucketArray { if should_increment_len { // replaced a tombstone self.len.fetch_add(1, Ordering::SeqCst); - - return BucketAndParentPtr::null(); - } else { - // swapped with something - return BucketAndParentPtr::without_parent(this_bucket_ptr); } + + return (this_bucket_ptr, false); } Err(e) => this_bucket_ptr = e.current, } @@ -931,39 +986,6 @@ impl<'g, K: Hash + Eq, V, S: BuildHasher> BucketArray { } } -#[derive(Copy, Clone)] -struct BucketAndParentPtr<'g, K: Hash + Eq, V, S: BuildHasher> { - bucket_ptr: Shared<'g, Bucket>, - parent_ptr: Shared<'g, BucketArray>, -} - -impl<'g, K: Hash + Eq, V, S: BuildHasher> BucketAndParentPtr<'g, K, V, S> { - fn without_parent(bucket_ptr: Shared<'g, Bucket>) -> BucketAndParentPtr<'g, K, V, S> { - BucketAndParentPtr { - bucket_ptr, - parent_ptr: Shared::null(), - } - } - - fn null() -> BucketAndParentPtr<'g, K, V, S> { - BucketAndParentPtr { - bucket_ptr: Shared::null(), - parent_ptr: Shared::null(), - } - } - - fn parent_ptr_or( - mut self, - parent_ptr: Shared<'g, BucketArray>, - ) -> BucketAndParentPtr<'g, K, V, S> { - if self.parent_ptr.is_null() { - self.parent_ptr = parent_ptr; - } - - self - } -} - #[repr(align(2))] struct Bucket { key: K, diff --git a/src/set.rs b/src/set.rs deleted file mode 100644 index fbffe4b..0000000 --- a/src/set.rs +++ /dev/null @@ -1,102 +0,0 @@ -// MIT License -// -// Copyright (c) 2019 Gregory Meyer -// -// Permission is hereby granted, free of charge, to any person -// obtaining a copy of this software and associated documentation files -// (the "Software"), to deal in the Software without restriction, -// including without limitation the rights to use, copy, modify, merge, -// publish, distribute, sublicense, and/or sell copies of the Software, -// and to permit persons to whom the Software is furnished to do so, -// subject to the following conditions: -// -// The above copyright notice and this permission notice shall be -// included in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS -// BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN -// ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -// SOFTWARE. - -use crate::map::HashMap; - -use std::{ - borrow::Borrow, - hash::{BuildHasher, Hash}, -}; - -use fxhash::FxBuildHasher; - -pub struct HashSet { - map: HashMap, -} - -impl HashSet { - pub fn new() -> HashSet { - HashSet::with_capacity_and_hasher(0, FxBuildHasher::default()) - } - - pub fn with_capacity(capacity: usize) -> HashSet { - HashSet::with_capacity_and_hasher(capacity, FxBuildHasher::default()) - } -} - -impl HashSet { - pub fn with_hasher(hash_builder: S) -> HashSet { - HashSet::with_capacity_and_hasher(0, hash_builder) - } - - pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> HashSet { - HashSet { - map: HashMap::with_capacity_and_hasher(capacity, hash_builder), - } - } - - pub fn capacity(&self) -> usize { - self.map.capacity() - } - - pub fn len(&self) -> usize { - self.map.len() - } - - pub fn is_empty(&self) -> bool { - self.map.is_empty() - } - - pub fn contains(&self, value: &Q) -> bool - where - T: Borrow, - { - self.map.get_and(value, |_| ()).is_some() - } - - pub fn get(&self, value: &Q) -> Option - where - T: Borrow + Clone, - { - self.map.get_key_value(value).map(|(k, _)| k) - } - - pub fn insert(&self, value: T) -> bool { - self.map.insert_and(value, (), |_| ()).is_none() - } - - pub fn replace(&self, value: T) -> Option - where - T: Clone, - { - self.map.insert_entry(value, ()).map(|(k, _)| k) - } - - pub fn remove(&self, value: &Q) -> bool - where - T: Borrow + Clone, - { - self.map.remove_and(value, |_| ()).is_some() - } -}