Skip to content
This repository has been archived by the owner on Jan 4, 2024. It is now read-only.

Commit

Permalink
Make HamtMap::new() not allocate and make atomic orderings conservati…
Browse files Browse the repository at this point in the history
…ve until review.
  • Loading branch information
michaelwoerister committed May 31, 2018
1 parent df36d57 commit 0bde539
Show file tree
Hide file tree
Showing 2 changed files with 38 additions and 16 deletions.
18 changes: 9 additions & 9 deletions benches/benches.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ fn bench_hamt_find<IS: ItemStore<u64, u64>>(empty: HamtMap<u64, u64, IS>, count:
let val = val + (i as u64 & 1);

unsafe {
match map.find(&val) {
match test::black_box(map.find(&val)) {
Some(&x) => RESULTS[i] = Some(x),
None => RESULTS[i] = None,
}
Expand All @@ -93,7 +93,7 @@ fn bench_hamt_insert<IS: ItemStore<u64, u64>>(empty: HamtMap<u64, u64, IS>, coun

for i in (0usize .. BENCH_INSERT_COUNT) {
let val = keys[count + i];
map1 = map1.plus(val, val);
map1 = test::black_box(map1.plus(val, val));
}
})
}
Expand All @@ -105,7 +105,7 @@ fn bench_hamt_remove<IS: ItemStore<u64, u64>>(empty: HamtMap<u64, u64, IS>, coun
let mut map = map.clone();

for x in (0 .. count).filter(|x| x % 2 == 0) {
map = map.minus(&keys[x]);
map = test::black_box(map.minus(&keys[x]));
}
})
}
Expand All @@ -120,13 +120,13 @@ fn bench_std_hashmap_find(count: usize, bh: &mut Bencher) {

bh.iter(|| {
for i in (0usize .. BENCH_FIND_COUNT) {
let val = values[i % count];
let val = test::black_box(values[i % count]);

// lets make about half of the lookups fail
let val = val + (i as u64 & 1);

unsafe {
match map.get(&val) {
match test::black_box(map.get(&val)) {
Some(&x) => RESULTS[i] = Some(x),
None => RESULTS[i] = None,
}
Expand All @@ -147,8 +147,8 @@ fn bench_std_hashmap_insert(count: usize, bh: &mut Bencher) {
let mut map1 = map.clone();

for i in (0usize .. BENCH_INSERT_COUNT) {
let val = values[count + i];
map1.insert(val, val);
let val = test::black_box(values[count + i]);
test::black_box(map1.insert(val, val));
}
})
}
Expand All @@ -162,7 +162,7 @@ fn bench_std_hashmap_clone(count: usize, bh: &mut Bencher) {
}

bh.iter(|| {
map.clone();
test::black_box(map.clone());
})
}

Expand All @@ -178,7 +178,7 @@ fn bench_std_hashmap_remove(count: usize, bh: &mut Bencher) {
let mut map1 = map.clone();

for x in (0..count).filter(|x| x % 2 == 0) {
map1.remove(&values[x]);
test::black_box(map1.remove(&values[x]));
}
})
}
Expand Down
36 changes: 29 additions & 7 deletions src/hamt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ impl<K, V, IS, H> NodeRef<K, V, IS, H>

fn borrow_mut<'a>(&'a mut self) -> &'a mut UnsafeNode<K, V, IS, H> {
unsafe {
debug_assert!((*self.ptr).ref_count.load(Ordering::Acquire) == 1);
debug_assert!((*self.ptr).ref_count.load(Ordering::SeqCst) == 1);
mem::transmute(self.ptr)
}
}
Expand All @@ -82,7 +82,7 @@ impl<K, V, IS, H> NodeRef<K, V, IS, H>
// in-place modifications instead of unnecessarily copying data.
fn try_borrow_owned<'a>(&'a mut self) -> BorrowedNodeRef<'a, K, V, IS, H> {
unsafe {
if (*self.ptr).ref_count.load(Ordering::Acquire) == 1 {
if (*self.ptr).ref_count.load(Ordering::SeqCst) == 1 {
BorrowedNodeRef::Exclusive(mem::transmute(self.ptr))
} else {
BorrowedNodeRef::Shared(mem::transmute(self.ptr))
Expand All @@ -95,7 +95,7 @@ impl<K, V, IS, H> Drop for NodeRef<K, V, IS, H> {
fn drop(&mut self) {
unsafe {
let node: &mut UnsafeNode<K, V, IS, H> = mem::transmute(self.ptr);
let old_count = node.ref_count.fetch_sub(1, Ordering::Acquire);
let old_count = node.ref_count.fetch_sub(1, Ordering::SeqCst);
debug_assert!(old_count >= 1);
if old_count == 1 {
node.destroy();
Expand All @@ -108,7 +108,7 @@ impl<K, V, IS, H> Clone for NodeRef<K, V, IS, H> {
fn clone(&self) -> NodeRef<K, V, IS, H> {
unsafe {
let node: &mut UnsafeNode<K, V, IS, H> = mem::transmute(self.ptr);
let old_count = node.ref_count.fetch_add(1, Ordering::Release);
let old_count = node.ref_count.fetch_add(1, Ordering::SeqCst);
debug_assert!(old_count >= 1);
}

Expand Down Expand Up @@ -1165,9 +1165,29 @@ impl<K, V, IS, H> HamtMap<K, V, IS, H>
H: Hasher+Default
{
pub fn new() -> HamtMap<K, V, IS, H> {
HamtMap {
root: UnsafeNode::alloc(0, 0),
element_count: 0

static mut EMPTY: UnsafeNode<u128,
u128,
::item_store::CopyStore<u128, u128>,
StdHasher> = UnsafeNode {
ref_count: AtomicUsize::new(0xFF),
entry_types: 0,
mask: 0,
capacity: 0,
__entries: [],
};

unsafe {
// Yes, that's right, we are directly modifying the ref-count of
// the static mut UnsafeNode. Because we can.
EMPTY.ref_count.fetch_add(1, Ordering::SeqCst);

HamtMap {
root: NodeRef {
ptr: mem::transmute(&EMPTY)
},
element_count: 0
}
}
}

Expand Down Expand Up @@ -1541,6 +1561,7 @@ Iterator for HamtMapIterator<'a, K, V, IS, H>
}
}

#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(self.len, Some(self.len))
}
Expand All @@ -1549,6 +1570,7 @@ Iterator for HamtMapIterator<'a, K, V, IS, H>
//=-------------------------------------------------------------------------------------------------
// Utility functions
//=------------------------------------------------------------------------------------------------
#[inline]
fn get_index(mask: u32, index: usize) -> usize {
debug_assert!((mask & (1 << index)) != 0);

Expand Down

0 comments on commit 0bde539

Please sign in to comment.