diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 26d84524..286f9db1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -17,6 +17,8 @@ on: - 'LICENSE-*' - '**.md' - '**.txt' + workflow_dispatch: + schedule: [cron: "0 1 */7 * *"] env: CARGO_TERM_COLOR: always @@ -85,6 +87,8 @@ jobs: - wasm32-unknown-unknown - wasm32-unknown-emscripten - wasm32-wasip1 + - wasm32-wasip1-threads + - wasm32-wasip2 runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -103,13 +107,7 @@ jobs: - name: cargo build --target ${{ matrix.target }} run: | rustup target add ${{ matrix.target }} - cargo build --target ${{ matrix.target }} - if: matrix.target != 'wasm32-unknown-unknown' && matrix.target != 'wasm32-unknown-emscripten' && matrix.target != 'wasm32-wasip1' - - name: cargo build --target ${{ matrix.target }} - run: | - rustup target add ${{ matrix.target }} - cargo build --target ${{ matrix.target }} --no-default-features --features alloc - if: matrix.target == 'wasm32-unknown-unknown' || matrix.target == 'wasm32-unknown-emscripten' || matrix.target == 'wasm32-wasip1' + cargo build --target ${{ matrix.target }} --no-default-features --features bounded,unbounded,std build: name: build @@ -176,13 +174,13 @@ jobs: path: ~/.cargo key: ${{ runner.os }}-coverage-dotcargo - name: Run test (Unix) - run: RUSTFLAGS="--cfg all_orderwal_tests" cargo test --no-default-features --features alloc + run: RUSTFLAGS="--cfg all_orderwal_tests" cargo test --no-default-features --features bounded --lib if: matrix.os != 'windows-latest' - name: Run test (Windows) shell: pwsh run: | $env:RUSTFLAGS="--cfg all_orderwal_tests" - cargo test --no-default-features --features alloc + cargo test --no-default-features --features bounded --lib if: matrix.os == 'windows-latest' test: @@ -258,8 +256,8 @@ jobs: run: ci/sanitizer_generic.sh if: matrix.os != 'ubuntu-latest' - miri-tb: - name: miri-tb-${{ matrix.target }}-${{ matrix.cfg }} + miri-tb-bounded: + name: miri-tb-bounded-${{ matrix.target }}-${{ matrix.cfg }} strategy: matrix: os: @@ -272,14 +270,14 @@ jobs: - x86_64-apple-darwin - aarch64-apple-darwin cfg: - - swmr_insert - - swmr_iters - - swmr_get - - swmr_constructor - - swmr_multiple_version_insert - - swmr_multiple_version_iters - - swmr_multiple_version_get - - swmr_multiple_version_constructor + - generic_insert + - generic_iters + - generic_get + - generic_constructor + - dynamic_insert + - dynamic_iters + - dynamic_get + - dynamic_constructor # Exclude invalid combinations exclude: - os: ubuntu-latest @@ -309,15 +307,15 @@ jobs: run: cargo install cargo-hack - name: Miri run: | - bash ci/miri_tb.sh ${{ matrix.target }} ${{ matrix.cfg }} + bash ci/miri_tb_bounded.sh ${{ matrix.target }} ${{ matrix.cfg }} - # miri-sb: - # name: miri-sb-${{ matrix.target }}-${{ matrix.cfg }} + # miri-tb-unbounded: + # name: miri-tb-unbounded-${{ matrix.target }}-${{ matrix.cfg }} # strategy: # matrix: # os: # - ubuntu-latest - # - macos-latest + # # - macos-latest # target: # - x86_64-unknown-linux-gnu # - i686-unknown-linux-gnu @@ -325,30 +323,26 @@ jobs: # - x86_64-apple-darwin # - aarch64-apple-darwin # cfg: - # - unsync_insert - # - unsync_iters - # - unsync_get - # - unsync_constructor - # - swmr_insert - # - swmr_iters - # - swmr_get - # - swmr_constructor - # - swmr_insert - # - swmr_iters - # - swmr_get - # - swmr_constructor + # - generic_insert + # - generic_iters + # - generic_get + # - generic_constructor + # - dynamic_insert + # - dynamic_iters + # - dynamic_get + # - dynamic_constructor # # Exclude invalid combinations # exclude: # - os: ubuntu-latest # target: x86_64-apple-darwin # - os: ubuntu-latest # target: aarch64-apple-darwin - # - os: macos-latest - # target: x86_64-unknown-linux-gnu - # - os: macos-latest - # target: i686-unknown-linux-gnu - # - os: macos-latest - # target: powerpc64-unknown-linux-gnu + # # - os: macos-latest + # # target: x86_64-unknown-linux-gnu + # # - os: macos-latest + # # target: i686-unknown-linux-gnu + # # - os: macos-latest + # # target: powerpc64-unknown-linux-gnu # runs-on: ${{ matrix.os }} # steps: # - uses: actions/checkout@v4 @@ -366,7 +360,7 @@ jobs: # run: cargo install cargo-hack # - name: Miri # run: | - # bash ci/miri_sb.sh ${{ matrix.target }} ${{ matrix.cfg }} + # bash ci/miri_tb_unbounded.sh ${{ matrix.target }} ${{ matrix.cfg }} # valgrind valgrind: diff --git a/CHANGELOG.md b/CHANGELOG.md index a1a36292..3fffce2d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,11 @@ -# Rleases +# Releases + +## 0.6.0 (Dec 28th, 2024) + +- Refactor the project to support both dynamic key-value types and generic key-value types +- Allow users to implement their own memtable +- Support `range_remove`, `range_set` and `range_unset` operations +- Make the project comfortable with `-Z miri-strict-provenance` ## 0.5.0 (Oct 27th, 2024) @@ -9,8 +16,6 @@ ## 0.4.0 (Sep 30th, 2024) -FEATURES - - Support `K: ?Sized` and `V: ?Sized` for `OrderWal`. - Use `flush_header_and_range` instead of `flush_range` when insertion. diff --git a/Cargo.toml b/Cargo.toml index f8e83664..c5a99183 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "orderwal" -version = "0.5.1" +version = "0.6.0" edition = "2021" repository = "https://github.com/al8n/orderwal" homepage = "https://github.com/al8n/orderwal" @@ -17,10 +17,13 @@ name = "foo" harness = false [features] -default = ["memmap"] -alloc = ["rarena-allocator/alloc", "skl/alloc", "dbutils/alloc"] -std = ["rarena-allocator/default", "crossbeam-skiplist/default", "crossbeam-skiplist-mvcc/default", "bitflags/std", "dbutils/default", "among/default", "skl/std"] -memmap = ["std", "rarena-allocator/memmap", "skl/memmap"] +default = ["bounded", "unbounded", "memmap"] +alloc = ["rarena-allocator/alloc", "skl?/alloc", "dbutils/alloc"] +std = ["rarena-allocator/default", "crossbeam-skiplist-mvcc?/default", "either/default", "bitflags/std", "dbutils/default", "among/default", "skl?/std"] +memmap = ["std", "rarena-allocator/memmap", "skl?/memmap"] + +bounded = ["skl", "alloc"] +unbounded = ["crossbeam-skiplist-mvcc", "std"] xxhash3 = ["dbutils/xxhash3", "std"] xxhash64 = ["dbutils/xxhash64", "std"] @@ -30,16 +33,20 @@ tracing = ["dep:tracing", "dbutils/tracing"] [dependencies] among = { version = "0.1", default-features = false, features = ["either"] } bitflags = { version = "2", default-features = false } -dbutils = { version = "0.9", default-features = false, features = ["crc32fast"] } +dbutils = { version = "0.12", default-features = false, features = ["crc32fast", "triomphe01"] } +# dbutils = { version = "0.12", path = "../layer0/dbutils", default-features = false, features = ["crc32fast", "triomphe01"] } derive-where = "1" +either = { version = "1", default-features = false } ref-cast = "1" rarena-allocator = { version = "0.4", default-features = false } -crossbeam-skiplist = { version = "0.1", default-features = false, package = "crossbeam-skiplist-pr1132", optional = true } -crossbeam-skiplist-mvcc = { version = "0.2", optional = true } -skl = { version = "0.19", default-features = false, features = ["alloc"] } +crossbeam-skiplist-mvcc = { version = "0.4", optional = true } +skl = { version = "0.22", default-features = false, optional = true } +# skl = { version = "0.22", optional = true, default-features = false, path = "../skl-rs" } paste = "1" - +zerocopy = { version = "0.8", features = ["derive"] } tracing = { version = "0.1", default-features = false, optional = true } +triomphe = "0.1" + [dev-dependencies] criterion = "0.5" @@ -66,14 +73,14 @@ rust_2018_idioms = "warn" single_use_lifetimes = "warn" unexpected_cfgs = { level = "warn", check-cfg = [ 'cfg(all_orderwal_tests)', - 'cfg(test_swmr_constructor)', - 'cfg(test_swmr_insert)', - 'cfg(test_swmr_iters)', - 'cfg(test_swmr_get)', - 'cfg(test_swmr_multiple_version_constructor)', - 'cfg(test_swmr_multiple_version_insert)', - 'cfg(test_swmr_multiple_version_iters)', - 'cfg(test_swmr_multiple_version_get)', + 'cfg(test_generic_constructor)', + 'cfg(test_generic_insert)', + 'cfg(test_generic_iters)', + 'cfg(test_generic_get)', + 'cfg(test_dynamic_constructor)', + 'cfg(test_dynamic_insert)', + 'cfg(test_dynamic_iters)', + 'cfg(test_dynamic_get)', ] } [[example]] @@ -82,11 +89,16 @@ path = "examples/zero_cost.rs" required-features = ["memmap"] [[example]] -name = "multiple_version" -path = "examples/multiple_version.rs" +name = "basic" +path = "examples/basic.rs" required-features = ["memmap"] [[example]] name = "generic_not_sized" path = "examples/generic_not_sized.rs" required-features = ["memmap"] + +[[example]] +name = "descend" +path = "examples/descend.rs" +required-features = ["memmap"] diff --git a/README.md b/README.md index 01e25a37..a594977e 100644 --- a/README.md +++ b/README.md @@ -27,25 +27,31 @@ English | [简体中文][zh-cn-url] ## Installation +> [!TIP] +> +> - `bounded` feature will enable a memory table implementation based on ARENA-style [`SkipMap`](https://github.com/al8n/skl) +> +> - `unbouned` feature (cannot compile without `std`) will enable a memory table implementation based on linked-style [`SkipMap`](https://github.com/al8n/crossbeam-skiplist-mvcc) + - Default (with on-disk support) ```toml [dependencies] - orderwal = "0.5" + orderwal = "0.6" ``` -- `std` only (without on-disk support) +- `std` (without on-disk support) ```toml [dependencies] - orderwal = { version = "0.5", default-features = false, features = ["std"] } + orderwal = { version = "0.6", default-features = false, features = ["std", "bounded", "unbounded"] } ``` - `no-std` (`alloc` required) ```toml [dependencies] - orderwal = { version = "0.5", default-features = false, features = ["alloc"] } + orderwal = { version = "0.6", default-features = false, features = ["alloc", "bounded"] } ``` ## Example diff --git a/ci/miri_sb.sh b/ci/miri_sb.sh deleted file mode 100755 index baded7d0..00000000 --- a/ci/miri_sb.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash -set -e - -# Check if TARGET and CONFIG_FLAGS are provided, otherwise panic -if [ -z "$1" ]; then - echo "Error: TARGET is not provided" - exit 1 -fi - -if [ -z "$2" ]; then - echo "Error: CONFIG_FLAGS are not provided" - exit 1 -fi - -TARGET=$1 -CONFIG_FLAGS=$2 - -rustup toolchain install nightly --component miri -rustup override set nightly -cargo miri setup - -export MIRIFLAGS="-Zmiri-disable-isolation -Zmiri-symbolic-alignment-check" -export RUSTFLAGS="--cfg test_$CONFIG_FLAGS" - -cargo miri test --tests --target $TARGET --lib diff --git a/ci/miri_tb.sh b/ci/miri_tb_bounded.sh similarity index 70% rename from ci/miri_tb.sh rename to ci/miri_tb_bounded.sh index fb34df06..385adb6b 100755 --- a/ci/miri_tb.sh +++ b/ci/miri_tb_bounded.sh @@ -25,8 +25,6 @@ rustup override set nightly cargo miri setup # Zmiri-ignore-leaks needed because of https://github.com/crossbeam-rs/crossbeam/issues/579 -export MIRIFLAGS="-Zmiri-symbolic-alignment-check -Zmiri-disable-isolation -Zmiri-tree-borrows -Zmiri-ignore-leaks" +export MIRIFLAGS="-Zmiri-strict-provenance -Zmiri-disable-isolation -Zmiri-symbolic-alignment-check -Zmiri-tree-borrows -Zmiri-ignore-leaks" export RUSTFLAGS="--cfg test_$CONFIG_FLAGS" - -cargo miri test --tests --target $TARGET --lib 2>&1 | ts -i '%.s ' - +cargo miri test --tests --target $TARGET --lib --no-default-features --features bounded,std 2>&1 | ts -i '%.s ' diff --git a/ci/miri_tb_unbounded.sh b/ci/miri_tb_unbounded.sh new file mode 100755 index 00000000..2e6670ea --- /dev/null +++ b/ci/miri_tb_unbounded.sh @@ -0,0 +1,32 @@ +#!/bin/bash +set -euxo pipefail +IFS=$'\n\t' + +# We need 'ts' for the per-line timing +sudo apt-get -y install moreutils +echo + +# Check if TARGET and CONFIG_FLAGS are provided, otherwise panic +if [ -z "$1" ]; then + echo "Error: TARGET is not provided" + exit 1 +fi + +if [ -z "$2" ]; then + echo "Error: CONFIG_FLAGS are not provided" + exit 1 +fi + +TARGET=$1 +CONFIG_FLAGS=$2 + +rustup toolchain install nightly --component miri +rustup override set nightly +cargo miri setup + +# Zmiri-ignore-leaks needed because of https://github.com/crossbeam-rs/crossbeam/issues/579 +# -Zmiri-strict-provenance (crossbeam-epoch is not compatible with this flag) +RUSTFLAGS="--cfg test_$CONFIG_FLAGS" \ +MIRIFLAGS="-Zmiri-disable-isolation -Zmiri-symbolic-alignment-check -Zmiri-tree-borrows -Zmiri-ignore-leaks" \ +cargo miri test --tests --target $TARGET --lib --no-default-features --features unbounded,std 2>&1 | ts -i '%.s ' + diff --git a/examples/basic.rs b/examples/basic.rs new file mode 100644 index 00000000..9a3f3221 --- /dev/null +++ b/examples/basic.rs @@ -0,0 +1,38 @@ +use orderwal::{ + dynamic::{BoundedTable, OrderWal, Reader, Writer}, + memtable::Entry, + Builder, +}; + +fn main() { + let dir = tempfile::tempdir().unwrap(); + let path = dir.path().join("not_sized.wal"); + + let mut wal = unsafe { + Builder::new() + .with_capacity(1024 * 1024) + .with_create_new(true) + .with_read(true) + .with_write(true) + .map_mut::, _>(&path) + .unwrap() + }; + + wal.insert(1, b"a", b"a1".as_slice()).unwrap(); + wal.insert(3, b"a", b"a3".as_slice()).unwrap(); + wal.insert(3, b"a", b"a5".as_slice()).unwrap(); + wal.insert(1, b"c", b"c1".as_slice()).unwrap(); + wal.insert(3, b"c", b"c3".as_slice()).unwrap(); + + let a = wal.get(2, b"a").unwrap(); + let c = wal.get(2, b"c").unwrap(); + + assert_eq!(a.value(), b"a1"); + assert_eq!(c.value(), b"c1"); + + let a = wal.get(3, b"a").unwrap(); + let c = wal.get(3, b"c").unwrap(); + + assert_eq!(a.value(), b"a5"); + assert_eq!(c.value(), b"c3"); +} diff --git a/examples/descend.rs b/examples/descend.rs new file mode 100644 index 00000000..daf58521 --- /dev/null +++ b/examples/descend.rs @@ -0,0 +1,27 @@ +use orderwal::{ + generic::{BoundedTable, Descend, OrderWal, Reader, Writer}, + memtable::Entry, + Builder, +}; + +fn main() { + let dir = tempfile::tempdir().unwrap(); + let path = dir.path().join("descend.wal"); + + let mut wal = unsafe { + Builder::>::new() + .with_capacity(1024 * 1024) + .with_create_new(true) + .with_read(true) + .with_write(true) + .map_mut::>, _>(&path) + .unwrap() + }; + + for i in 0..10u64 { + wal.insert(0, &i, &i).unwrap(); + } + + let x = wal.iter(0).map(|ent| ent.value()).collect::>(); + assert_eq!(x, (0..10).rev().collect::>()); +} diff --git a/examples/generic_not_sized.rs b/examples/generic_not_sized.rs index 25cd2930..6d6a94f2 100644 --- a/examples/generic_not_sized.rs +++ b/examples/generic_not_sized.rs @@ -1,5 +1,6 @@ use orderwal::{ - base::{OrderWal, Reader, Writer}, + generic::{BoundedTable, OrderWal, Reader, Writer}, + memtable::Entry, Builder, }; @@ -13,16 +14,29 @@ fn main() { .with_create_new(true) .with_read(true) .with_write(true) - .map_mut::, _>(&path) + .map_mut::>, _>(&path) .unwrap() }; - wal.insert("a", b"a1".as_slice()).unwrap(); - wal.insert("c", b"c1".as_slice()).unwrap(); + wal.insert(1, "a", b"a1".as_slice()).unwrap(); + wal.insert(3, "a", b"a3".as_slice()).unwrap(); + wal.insert(1, "c", b"c1".as_slice()).unwrap(); + wal.insert(3, "c", b"c3".as_slice()).unwrap(); - let a = wal.get("a").unwrap(); - let c = wal.get("c").unwrap(); + let a = wal.get(2, "a").unwrap(); + let c = wal.get(2, "c").unwrap(); assert_eq!(a.value(), b"a1"); assert_eq!(c.value(), b"c1"); + + let a = wal.get(3, "a").unwrap(); + let c = wal.get(3, "c").unwrap(); + + assert_eq!(a.value(), b"a3"); + assert_eq!(c.value(), b"c3"); + + wal.insert(3, "a", b"a5".as_slice()).unwrap(); + + let a = wal.get(3, "a").unwrap(); + assert_eq!(a.value(), b"a5"); } diff --git a/examples/multiple_version.rs b/examples/multiple_version.rs deleted file mode 100644 index d57adc41..00000000 --- a/examples/multiple_version.rs +++ /dev/null @@ -1,36 +0,0 @@ -use orderwal::{ - multiple_version::{OrderWal, Reader, Writer}, - Builder, -}; - -fn main() { - let dir = tempfile::tempdir().unwrap(); - let path = dir.path().join("not_sized.wal"); - - let mut wal = unsafe { - Builder::new() - .with_capacity(1024 * 1024) - .with_create_new(true) - .with_read(true) - .with_write(true) - .map_mut::, _>(&path) - .unwrap() - }; - - wal.insert(1, "a", b"a1".as_slice()).unwrap(); - wal.insert(3, "a", b"a3".as_slice()).unwrap(); - wal.insert(1, "c", b"c1".as_slice()).unwrap(); - wal.insert(3, "c", b"c3".as_slice()).unwrap(); - - let a = wal.get(2, "a").unwrap(); - let c = wal.get(2, "c").unwrap(); - - assert_eq!(a.value(), b"a1"); - assert_eq!(c.value(), b"c1"); - - let a = wal.get(3, "a").unwrap(); - let c = wal.get(3, "c").unwrap(); - - assert_eq!(a.value(), b"a3"); - assert_eq!(c.value(), b"c3"); -} diff --git a/examples/zero_cost.rs b/examples/zero_cost.rs index 0ae38b89..024d5721 100644 --- a/examples/zero_cost.rs +++ b/examples/zero_cost.rs @@ -1,10 +1,12 @@ -use std::{cmp, sync::Arc, thread::spawn}; +use std::{sync::Arc, thread::spawn}; use dbutils::leb128::{decode_u64_varint, encode_u64_varint, encoded_u64_varint_len}; use orderwal::{ - base::{OrderWal, Reader, Writer}, - types::{KeyRef, Type, TypeRef}, - Builder, Comparable, Equivalent, + equivalent::{Comparable, Equivalent}, + generic::{BoundedTable, OrderWal, Reader, Writer}, + memtable::Entry, + types::{Type, TypeRef, VacantBuffer}, + Builder, }; #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] @@ -20,37 +22,21 @@ impl Person { name: names::Generator::default().next().unwrap(), } } + + fn as_person_ref(&self) -> PersonRef<'_> { + PersonRef { + id: self.id, + name: &self.name, + } + } } -#[derive(Debug, Clone, Copy)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] struct PersonRef<'a> { id: u64, name: &'a str, } -impl PartialEq for PersonRef<'_> { - fn eq(&self, other: &Self) -> bool { - self.id == other.id && self.name == other.name - } -} - -impl Eq for PersonRef<'_> {} - -impl PartialOrd for PersonRef<'_> { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for PersonRef<'_> { - fn cmp(&self, other: &Self) -> cmp::Ordering { - self - .id - .cmp(&other.id) - .then_with(|| self.name.cmp(other.name)) - } -} - impl Equivalent for PersonRef<'_> { fn equivalent(&self, key: &Person) -> bool { self.id == key.id && self.name == key.name @@ -78,29 +64,6 @@ impl Comparable> for Person { } } -impl<'a> KeyRef<'a, Person> for PersonRef<'a> { - fn compare(&self, a: &Q) -> cmp::Ordering - where - Q: ?Sized + Comparable, - { - Comparable::compare(a, self).reverse() - } - - unsafe fn compare_binary(this: &[u8], other: &[u8]) -> cmp::Ordering { - let (this_id_size, this_id) = decode_u64_varint(this).unwrap(); - let (other_id_size, other_id) = decode_u64_varint(other).unwrap(); - - PersonRef { - id: this_id, - name: std::str::from_utf8(&this[this_id_size..]).unwrap(), - } - .cmp(&PersonRef { - id: other_id, - name: std::str::from_utf8(&other[other_id_size..]).unwrap(), - }) - } -} - impl Type for Person { type Ref<'a> = PersonRef<'a>; type Error = dbutils::error::InsufficientBuffer; @@ -117,10 +80,7 @@ impl Type for Person { } #[inline] - fn encode_to_buffer( - &self, - buf: &mut orderwal::types::VacantBuffer<'_>, - ) -> Result { + fn encode_to_buffer(&self, buf: &mut VacantBuffer<'_>) -> Result { let id_size = buf.put_u64_varint(self.id)?; buf.put_slice_unchecked(self.name.as_bytes()); Ok(id_size + self.name.len()) @@ -153,7 +113,7 @@ fn main() { .with_create_new(true) .with_read(true) .with_write(true) - .map_mut::, _>(&path) + .map_mut::>, _>(&path) .unwrap() }; @@ -166,30 +126,27 @@ fn main() { let handles = readers.into_iter().enumerate().map(|(i, reader)| { let people = people.clone(); spawn(move || loop { - let (person, hello) = &people[i]; - let person_ref = PersonRef { - id: person.id, - name: &person.name, - }; - if let Some(p) = reader.get(person) { + let (person, hello) = people[i].clone(); + let person_ref = person.as_person_ref(); + if let Some(p) = reader.get(1, &person) { assert_eq!(p.key().id, person.id); assert_eq!(p.key().name, person.name); assert_eq!(p.value(), hello); break; } - if let Some(p) = reader.get(&person_ref) { + if let Some(p) = reader.get(1, &person_ref) { assert_eq!(p.key().id, person.id); assert_eq!(p.key().name, person.name); assert_eq!(p.value(), hello); break; - }; + } }) }); // Insert 100 people into the wal for (p, h) in people.iter() { - wal.insert(p, h).unwrap(); + wal.insert(1, p, h).unwrap(); } // Wait for all threads to finish diff --git a/rustfmt.toml b/rustfmt.toml index f54d5e6e..f38dc746 100644 --- a/rustfmt.toml +++ b/rustfmt.toml @@ -7,7 +7,7 @@ reorder_imports = true reorder_modules = true remove_nested_parens = true merge_derives = true +imports_granularity="Crate" use_try_shorthand = true use_field_init_shorthand = true -force_explicit_abi = true - +force_explicit_abi = true \ No newline at end of file diff --git a/src/batch.rs b/src/batch.rs index ce55883b..93c55275 100644 --- a/src/batch.rs +++ b/src/batch.rs @@ -1,181 +1,250 @@ +use core::{marker::PhantomData, ops::Bound}; + +use either::Either; + use crate::{ - memtable::BaseTable, - wal::{KeyPointer, ValuePointer}, + memtable::Memtable, + types::{EncodedEntryMeta, EncodedRangeEntryMeta, EntryFlags, RecordPointer}, }; -use super::{ - sealed::{WithVersion, WithoutVersion}, - types::{BufWriter, EncodedEntryMeta, EntryFlags}, -}; +pub(crate) enum Data { + InsertPoint { + key: K, + value: V, + meta: EncodedEntryMeta, + }, + RemovePoint { + key: K, + meta: EncodedEntryMeta, + }, + RangeRemove { + start_bound: Bound, + end_bound: Bound, + meta: EncodedRangeEntryMeta, + }, + RangeUnset { + start_bound: Bound, + end_bound: Bound, + meta: EncodedRangeEntryMeta, + }, + RangeSet { + start_bound: Bound, + end_bound: Bound, + value: V, + meta: EncodedRangeEntryMeta, + }, +} /// An entry can be inserted into the WALs through [`Batch`]. -pub struct BatchEntry { - pub(crate) key: K, - pub(crate) value: Option, +pub struct BatchEntry { + pub(crate) data: Data, pub(crate) flag: EntryFlags, - pub(crate) meta: EncodedEntryMeta, - pointers: Option<(KeyPointer, Option>)>, - pub(crate) version: Option, + pointers: Option, + pub(crate) version: u64, + _m: PhantomData, } impl BatchEntry where - M: BaseTable, - for<'a> M::Item<'a>: WithoutVersion, + M: Memtable, { - /// Creates a new entry. + /// Creates a new entry with version. #[inline] - pub const fn new(key: K, value: V) -> Self { + pub fn insert(version: u64, key: K, value: V) -> Self { Self { - key, - value: Some(value), + data: Data::InsertPoint { + key, + value, + meta: EncodedEntryMeta::placeholder(), + }, flag: EntryFlags::empty(), - meta: EncodedEntryMeta::batch_zero(false), pointers: None, - version: None, + version, + _m: PhantomData, } } - /// Creates a tombstone entry. + /// Creates a tombstone entry with version. #[inline] - pub const fn tombstone(key: K) -> Self { + pub fn remove(version: u64, key: K) -> Self { Self { - key, - value: None, + data: Data::RemovePoint { + key, + meta: EncodedEntryMeta::placeholder(), + }, flag: EntryFlags::REMOVED, - meta: EncodedEntryMeta::batch_zero(false), pointers: None, - version: None, + version, + _m: PhantomData, } } -} -impl BatchEntry -where - M: BaseTable, - for<'a> M::Item<'a>: WithVersion, -{ - /// Creates a new entry with version. + /// Creates a range remove entry with version. #[inline] - pub fn with_version(version: u64, key: K, value: V) -> Self { + pub fn range_remove(version: u64, start_bound: Bound, end_bound: Bound) -> Self { Self { - key, - value: Some(value), - flag: EntryFlags::empty() | EntryFlags::VERSIONED, - meta: EncodedEntryMeta::batch_zero(true), + data: Data::RangeRemove { + start_bound, + end_bound, + meta: EncodedRangeEntryMeta::placeholder(), + }, + flag: EntryFlags::RANGE_DELETION, pointers: None, - version: Some(version), + version, + _m: PhantomData, } } - /// Creates a tombstone entry with version. + /// Creates a range unset entry with version. #[inline] - pub fn tombstone_with_version(version: u64, key: K) -> Self { + pub fn range_unset(version: u64, start_bound: Bound, end_bound: Bound) -> Self { Self { - key, - value: None, - flag: EntryFlags::REMOVED | EntryFlags::VERSIONED, - meta: EncodedEntryMeta::batch_zero(true), + data: Data::RangeUnset { + start_bound, + end_bound, + meta: EncodedRangeEntryMeta::placeholder(), + }, + flag: EntryFlags::RANGE_UNSET, pointers: None, - version: Some(version), + version, + _m: PhantomData, + } + } + + /// Creates a range set entry with version. + #[inline] + pub fn range_set(version: u64, start_bound: Bound, end_bound: Bound, value: V) -> Self { + Self { + data: Data::RangeSet { + start_bound, + end_bound, + value, + meta: EncodedRangeEntryMeta::placeholder(), + }, + flag: EntryFlags::RANGE_SET, + pointers: None, + version, + _m: PhantomData, } } /// Returns the version of the entry. #[inline] pub const fn version(&self) -> u64 { - match self.version { - Some(version) => version, - None => unreachable!(), - } + self.version } /// Set the version of the entry. #[inline] pub fn set_version(&mut self, version: u64) { - self.version = Some(version); + self.version = version; } } impl BatchEntry where - M: BaseTable, + M: Memtable, { - /// Returns the length of the key. - #[inline] - pub fn key_len(&self) -> usize - where - K: BufWriter, - { - self.key.encoded_len() - } - - /// Returns the length of the value. + /// Returns the key. #[inline] - pub fn value_len(&self) -> usize - where - V: BufWriter, - { - self.value.as_ref().map_or(0, |v| v.encoded_len()) + pub const fn key(&self) -> &K { + match &self.data { + Data::InsertPoint { key, .. } | Data::RemovePoint { key, .. } => key, + Data::RangeRemove { .. } | Data::RangeUnset { .. } | Data::RangeSet { .. } => { + panic!("try to get key from range entry") + } + } } - /// Returns the key. + /// Returns the range key. #[inline] - pub const fn key(&self) -> &K { - &self.key + pub fn bounds(&self) -> (Bound<&K>, Bound<&K>) { + match &self.data { + Data::InsertPoint { .. } | Data::RemovePoint { .. } => { + panic!("try to get range key from point entry") + } + Data::RangeRemove { + start_bound, + end_bound, + .. + } + | Data::RangeUnset { + start_bound, + end_bound, + .. + } + | Data::RangeSet { + start_bound, + end_bound, + .. + } => (start_bound.as_ref(), end_bound.as_ref()), + } } /// Returns the value. #[inline] pub const fn value(&self) -> Option<&V> { - self.value.as_ref() + match &self.data { + Data::InsertPoint { value, .. } => Some(value), + Data::RemovePoint { .. } => None, + Data::RangeRemove { .. } | Data::RangeUnset { .. } => None, + Data::RangeSet { value, .. } => Some(value), + } } /// Consumes the entry and returns the key and value. #[inline] - pub fn into_components(self) -> (K, Option) { - (self.key, self.value) - } - - #[inline] - pub(crate) fn encoded_key_len(&self) -> usize - where - K: BufWriter, - V: BufWriter, - { - self.key.encoded_len() + pub fn into_components(self) -> (Either, Bound)>, Option) { + match self.data { + Data::InsertPoint { key, value, .. } => (Either::Left(key), Some(value)), + Data::RemovePoint { key, .. } => (Either::Left(key), None), + Data::RangeRemove { + start_bound, + end_bound, + .. + } + | Data::RangeUnset { + start_bound, + end_bound, + .. + } => (Either::Right((start_bound, end_bound)), None), + Data::RangeSet { + start_bound, + end_bound, + value, + .. + } => (Either::Right((start_bound, end_bound)), Some(value)), + } } #[inline] - pub(crate) const fn internal_version(&self) -> Option { + pub(crate) const fn internal_version(&self) -> u64 { self.version } #[inline] - pub(crate) fn take_pointer( - &mut self, - ) -> Option<(KeyPointer, Option>)> { + pub(crate) fn take_pointer(&mut self) -> Option { self.pointers.take() } #[inline] - pub(crate) fn set_pointer(&mut self, kp: KeyPointer, vp: Option>) { - self.pointers = Some((kp, vp)); + pub(crate) fn set_pointer(&mut self, p: RecordPointer) { + self.pointers = Some(p); } #[inline] - pub(crate) fn set_encoded_meta(&mut self, meta: EncodedEntryMeta) { - self.meta = meta; - } - - #[inline] - pub(crate) fn encoded_meta(&self) -> &EncodedEntryMeta { - &self.meta + pub(crate) fn encoded_meta(&self) -> Either<&EncodedEntryMeta, &EncodedRangeEntryMeta> { + match &self.data { + Data::InsertPoint { meta, .. } | Data::RemovePoint { meta, .. } => Either::Left(meta), + Data::RangeRemove { meta, .. } + | Data::RangeUnset { meta, .. } + | Data::RangeSet { meta, .. } => Either::Right(meta), + } } } /// A trait for batch insertions. -pub trait Batch { +pub trait Batch { /// Any type that can be converted into a key. type Key; /// Any type that can be converted into a value. @@ -200,7 +269,7 @@ pub trait Batch { impl Batch for T where - M: BaseTable, + M: Memtable, for<'a> &'a mut T: IntoIterator>, { type Key = K; diff --git a/src/builder.rs b/src/builder.rs index c8661333..762ef831 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -1,12 +1,10 @@ -use dbutils::checksum::Crc32; -use skl::KeySize; - -use super::{ +use crate::{ error::Error, - memtable::BaseTable, + log::Log, + memtable::Memtable, options::{arena_options, Options}, - sealed::Constructable, }; +use dbutils::checksum::Crc32; #[cfg(all(feature = "memmap", not(target_family = "wasm")))] #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] @@ -15,7 +13,7 @@ mod memmap; /// A write-ahead log builder. pub struct Builder where - M: BaseTable, + M: Memtable, { pub(super) opts: Options, pub(super) cks: S, @@ -24,7 +22,7 @@ where impl Default for Builder where - M: BaseTable, + M: Memtable, M::Options: Default, { #[inline] @@ -35,7 +33,7 @@ where impl Builder where - M: BaseTable, + M: Memtable, M::Options: Default, { /// Returns a new write-ahead log builder with the given options. @@ -51,16 +49,16 @@ where impl Builder where - M: BaseTable, + M: Memtable, { /// Returns a new write-ahead log builder with the new checksumer /// /// ## Example /// /// ```rust - /// use orderwal::{Builder, Crc32, multiple_version::DefaultTable}; + /// use orderwal::{Builder, checksum::Crc32, generic::BoundedTable}; /// - /// let opts = Builder::>::new().with_checksumer(Crc32::new()); + /// let opts = Builder::>::new().with_checksumer(Crc32::new()); /// ``` #[inline] pub fn with_checksumer(self, cks: NS) -> Builder { @@ -76,9 +74,9 @@ where /// ## Example /// /// ```rust - /// use orderwal::{Builder, Options, multiple_version::DefaultTable}; + /// use orderwal::{Builder, Options, generic::BoundedTable}; /// - /// let opts = Builder::>::new().with_options(Options::default()); + /// let opts = Builder::>::new().with_options(Options::default()); /// ``` #[inline] pub fn with_options(self, opts: Options) -> Self { @@ -94,9 +92,9 @@ where /// ## Example /// /// ```rust - /// use orderwal::{Builder, multiple_version::{ArenaTable, ArenaTableOptions}}; + /// use orderwal::{Builder, generic::{BoundedTable, BoundedTableOptions}}; /// - /// let opts = Builder::>::new().with_memtable_options(ArenaTableOptions::default()); + /// let opts = Builder::>::new().with_memtable_options(BoundedTableOptions::default()); /// ``` #[inline] pub fn with_memtable_options(self, opts: M::Options) -> Self { @@ -112,14 +110,14 @@ where /// ## Example /// /// ```rust - /// use orderwal::{Builder, multiple_version::{DefaultTable, ArenaTable}}; + /// use orderwal::{Builder, generic::{UnboundedTable, BoundedTable}}; /// - /// let opts = Builder::>::new().change_memtable::>(); + /// let opts = Builder::>::new().change_memtable::>(); /// ``` #[inline] pub fn change_memtable(self) -> Builder where - NM: BaseTable, + NM: Memtable, NM::Options: Default, { Builder { @@ -134,14 +132,14 @@ where /// ## Example /// /// ```rust - /// use orderwal::{Builder, multiple_version::{DefaultTable, ArenaTable, ArenaTableOptions}}; + /// use orderwal::{Builder, generic::{UnboundedTable, BoundedTable, BoundedTableOptions}}; /// - /// let opts = Builder::>::new().change_memtable_with_options::>(ArenaTableOptions::default().with_capacity(1000)); + /// let opts = Builder::>::new().change_memtable_with_options::>(BoundedTableOptions::default().with_capacity(1000)); /// ``` #[inline] pub fn change_memtable_with_options(self, opts: NM::Options) -> Builder where - NM: BaseTable, + NM: Memtable, { Builder { opts: self.opts, @@ -160,9 +158,9 @@ where /// ## Example /// /// ```rust - /// use orderwal::{Builder, multiple_version::DefaultTable}; + /// use orderwal::{Builder, generic::BoundedTable}; /// - /// let opts = Builder::>::new().with_reserved(8); + /// let opts = Builder::>::new().with_reserved(8); /// ``` #[inline] pub const fn with_reserved(mut self, reserved: u32) -> Self { @@ -180,9 +178,9 @@ where /// ## Example /// /// ```rust - /// use orderwal::{Builder, multiple_version::DefaultTable}; + /// use orderwal::{Builder, generic::BoundedTable}; /// - /// let opts = Builder::>::new().with_reserved(8); + /// let opts = Builder::>::new().with_reserved(8); /// /// assert_eq!(opts.reserved(), 8); /// ``` @@ -198,9 +196,9 @@ where /// ## Example /// /// ```rust - /// use orderwal::{Builder, multiple_version::DefaultTable}; + /// use orderwal::{Builder, generic::BoundedTable}; /// - /// let options = Builder::>::new().with_magic_version(1); + /// let options = Builder::>::new().with_magic_version(1); /// assert_eq!(options.magic_version(), 1); /// ``` #[inline] @@ -215,9 +213,9 @@ where /// ## Example /// /// ```rust - /// use orderwal::{Builder, multiple_version::DefaultTable}; + /// use orderwal::{Builder, generic::BoundedTable}; /// - /// let options = Builder::>::new().with_capacity(1000); + /// let options = Builder::>::new().with_capacity(1000); /// assert_eq!(options.capacity(), 1000); /// ``` #[inline] @@ -232,13 +230,13 @@ where /// ## Example /// /// ```rust - /// use orderwal::{Builder, KeySize, multiple_version::DefaultTable}; + /// use orderwal::{Builder, generic::BoundedTable}; /// - /// let options = Builder::>::new().with_maximum_key_size(KeySize::with(1024)); - /// assert_eq!(options.maximum_key_size(), KeySize::with(1024)); + /// let options = Builder::>::new().with_maximum_key_size(1024); + /// assert_eq!(options.maximum_key_size(), 1024); /// ``` #[inline] - pub const fn maximum_key_size(&self) -> KeySize { + pub const fn maximum_key_size(&self) -> u32 { self.opts.maximum_key_size() } @@ -249,9 +247,9 @@ where /// ## Example /// /// ```rust - /// use orderwal::{Builder, multiple_version::DefaultTable}; + /// use orderwal::{Builder, generic::BoundedTable}; /// - /// let options = Builder::>::new().with_maximum_value_size(1024); + /// let options = Builder::>::new().with_maximum_value_size(1024); /// assert_eq!(options.maximum_value_size(), 1024); /// ``` #[inline] @@ -266,9 +264,9 @@ where /// ## Example /// /// ```rust - /// use orderwal::{Builder, multiple_version::DefaultTable}; + /// use orderwal::{Builder, generic::BoundedTable}; /// - /// let options = Builder::>::new(); + /// let options = Builder::>::new(); /// assert_eq!(options.sync(), true); /// ``` #[inline] @@ -285,9 +283,9 @@ where /// ## Example /// /// ```rust - /// use orderwal::{Builder, multiple_version::DefaultTable}; + /// use orderwal::{Builder, generic::BoundedTable}; /// - /// let options = Builder::>::new().with_capacity(100); + /// let options = Builder::>::new().with_capacity(100); /// assert_eq!(options.capacity(), 100); /// ``` #[inline] @@ -301,13 +299,13 @@ where /// ## Example /// /// ```rust - /// use orderwal::{Builder, KeySize, multiple_version::DefaultTable}; + /// use orderwal::{Builder, generic::BoundedTable}; /// - /// let options = Builder::>::new().with_maximum_key_size(KeySize::with(1024)); - /// assert_eq!(options.maximum_key_size(), KeySize::with(1024)); + /// let options = Builder::>::new().with_maximum_key_size(1024); + /// assert_eq!(options.maximum_key_size(), 1024); /// ``` #[inline] - pub const fn with_maximum_key_size(mut self, size: KeySize) -> Self { + pub const fn with_maximum_key_size(mut self, size: u32) -> Self { self.opts = self.opts.with_maximum_key_size(size); self } @@ -317,9 +315,9 @@ where /// ## Example /// /// ```rust - /// use orderwal::{Builder, multiple_version::DefaultTable}; + /// use orderwal::{Builder, generic::BoundedTable}; /// - /// let options = Builder::>::new().with_maximum_value_size(1024); + /// let options = Builder::>::new().with_maximum_value_size(1024); /// assert_eq!(options.maximum_value_size(), 1024); /// ``` #[inline] @@ -335,9 +333,9 @@ where /// ## Example /// /// ```rust - /// use orderwal::{Builder, multiple_version::DefaultTable}; + /// use orderwal::{Builder, generic::BoundedTable}; /// - /// let options = Builder::>::new().with_sync(false); + /// let options = Builder::>::new().with_sync(false); /// assert_eq!(options.sync(), false); /// ``` #[inline] @@ -354,9 +352,9 @@ where /// /// ```rust /// - /// use orderwal::{Builder, multiple_version::DefaultTable}; + /// use orderwal::{Builder, generic::BoundedTable}; /// - /// let options = Builder::>::new().with_magic_version(1); + /// let options = Builder::>::new().with_magic_version(1); /// assert_eq!(options.magic_version(), 1); /// ``` #[inline] @@ -368,24 +366,36 @@ where impl Builder where - M: BaseTable, + M: Memtable, { /// Creates a new in-memory write-ahead log backed by an aligned vec. /// - /// ## Example + /// ## Examples + /// + /// ### Generic order WAL example /// /// ```rust + /// use orderwal::{generic::{OrderWal, BoundedTable}, Builder}; + /// + /// let wal = Builder::new() + /// .with_capacity(1024) + /// .alloc::>>() + /// .unwrap(); + /// ``` + /// + /// ### Dynamic order WAL example /// - /// use orderwal::{base::OrderWal, Builder}; + /// ```rust + /// use orderwal::{dynamic::{OrderWal, BoundedTable}, Builder}; /// /// let wal = Builder::new() /// .with_capacity(1024) - /// .alloc::>() + /// .alloc::>() /// .unwrap(); /// ``` - pub fn alloc(self) -> Result> + pub fn alloc(self) -> Result> where - W: Constructable, + L: Log, { let Self { opts, @@ -396,6 +406,6 @@ where .with_capacity(opts.capacity()) .alloc() .map_err(Error::from_insufficient_space) - .and_then(|arena| W::new_in(arena, opts, memtable_opts, cks).map(W::from_core)) + .and_then(|arena| L::new(arena, opts, memtable_opts, cks)) } } diff --git a/src/builder/memmap.rs b/src/builder/memmap.rs index 5895d6d4..d5c6a01c 100644 --- a/src/builder/memmap.rs +++ b/src/builder/memmap.rs @@ -1,33 +1,29 @@ use super::*; -use crate::{options::ArenaOptionsExt, sealed::Immutable}; - -use dbutils::{ - checksum::BuildChecksumer, - types::{KeyRef, Type}, -}; -use skl::either::Either; +use crate::{memtable::MutableMemtable, options::ArenaOptionsExt, Immutable}; +use dbutils::checksum::BuildChecksumer; +use either::Either; impl Builder where - M: BaseTable, + M: Memtable, { - /// Set if lock the meta of the WAL in the memory to prevent OS from swapping out the header of WAL. - /// When using memory map backed WAL, the meta of the WAL + /// Set if lock the meta of the LAL in the memory to prevent OS from swapping out the header of LAL. + /// Lhen using memory map backed LAL, the meta of the LAL /// is in the header, meta is frequently accessed, /// lock (`mlock` on the header) the meta can reduce the page fault, - /// but yes, this means that one WAL will have one page are locked in memory, + /// but yes, this means that one LAL will have one page are locked in memory, /// and will not be swapped out. So, this is a trade-off between performance and memory usage. /// /// Default is `true`. /// - /// This configuration has no effect on windows and vec backed WAL. + /// This configuration has no effect on windows and vec backed LAL. /// /// ## Example /// /// ```rust - /// use orderwal::{Builder, multiple_version::LinkedTable}; + /// use orderwal::{Builder, generic::UnboundedTable}; /// - /// let opts = Builder::>::new().with_lock_meta(false); + /// let opts = Builder::>::new().with_lock_meta(false); /// ``` #[inline] #[cfg(all(feature = "memmap", not(target_family = "wasm")))] @@ -45,9 +41,9 @@ where /// ## Examples /// /// ```rust - /// use orderwal::{Builder, multiple_version::LinkedTable}; + /// use orderwal::{Builder, generic::UnboundedTable}; /// - /// let opts = Builder::>::new().with_read(true); + /// let opts = Builder::>::new().with_read(true); /// ``` #[inline] #[cfg(all(feature = "memmap", not(target_family = "wasm")))] @@ -68,9 +64,9 @@ where /// ## Examples /// /// ```rust - /// use orderwal::{Builder, multiple_version::LinkedTable}; + /// use orderwal::{Builder, generic::UnboundedTable}; /// - /// let opts = Builder::>::new().with_write(true); + /// let opts = Builder::>::new().with_write(true); /// ``` #[inline] #[cfg(all(feature = "memmap", not(target_family = "wasm")))] @@ -115,9 +111,9 @@ where /// ## Examples /// /// ```rust - /// use orderwal::{Builder, multiple_version::LinkedTable}; + /// use orderwal::{Builder, generic::UnboundedTable}; /// - /// let opts = Builder::>::new().with_append(true); + /// let opts = Builder::>::new().with_append(true); /// ``` #[inline] #[cfg(all(feature = "memmap", not(target_family = "wasm")))] @@ -138,9 +134,9 @@ where /// ## Examples /// /// ```rust - /// use orderwal::{Builder, multiple_version::LinkedTable}; + /// use orderwal::{Builder, generic::UnboundedTable}; /// - /// let opts = Builder::>::new().with_write(true).with_truncate(true); + /// let opts = Builder::>::new().with_write(true).with_truncate(true); /// ``` #[inline] #[cfg(all(feature = "memmap", not(target_family = "wasm")))] @@ -163,9 +159,9 @@ where /// ## Examples /// /// ```rust - /// use orderwal::{Builder, multiple_version::LinkedTable}; + /// use orderwal::{Builder, generic::UnboundedTable}; /// - /// let opts = Builder::>::new().with_write(true).with_create(true); + /// let opts = Builder::>::new().with_write(true).with_create(true); /// ``` #[inline] #[cfg(all(feature = "memmap", not(target_family = "wasm")))] @@ -196,9 +192,9 @@ where /// ## Examples /// /// ```rust - /// use orderwal::{Builder, multiple_version::LinkedTable}; + /// use orderwal::{Builder, generic::UnboundedTable}; /// - /// let opts = Builder::>::new() + /// let opts = Builder::>::new() /// .with_write(true) /// .with_create_new(true); /// ``` @@ -212,16 +208,16 @@ where /// Configures the anonymous memory map to be suitable for a process or thread stack. /// - /// This option corresponds to the `MAP_STACK` flag on Linux. It has no effect on Windows. + /// This option corresponds to the `MAP_STACK` flag on Linux. It has no effect on Lindows. /// /// This option has no effect on file-backed memory maps and vec backed `Wal`. /// /// ## Example /// /// ``` - /// use orderwal::{Builder, multiple_version::LinkedTable}; + /// use orderwal::{Builder, generic::UnboundedTable}; /// - /// let stack = Builder::>::new().with_stack(true); + /// let stack = Builder::>::new().with_stack(true); /// ``` #[inline] #[cfg(all(feature = "memmap", not(target_family = "wasm")))] @@ -233,7 +229,7 @@ where /// Configures the anonymous memory map to be allocated using huge pages. /// - /// This option corresponds to the `MAP_HUGETLB` flag on Linux. It has no effect on Windows. + /// This option corresponds to the `MAP_HUGETLB` flag on Linux. It has no effect on Lindows. /// /// The size of the requested page can be specified in page bits. If not provided, the system /// default is requested. The requested length should be a multiple of this, or the mapping @@ -244,9 +240,9 @@ where /// ## Example /// /// ``` - /// use orderwal::{Builder, multiple_version::LinkedTable}; + /// use orderwal::{Builder, generic::UnboundedTable}; /// - /// let opts = Builder::>::new().with_huge(Some(8)); + /// let opts = Builder::>::new().with_huge(Some(8)); /// ``` #[inline] #[cfg(all(feature = "memmap", not(target_family = "wasm")))] @@ -260,16 +256,16 @@ where /// /// For a file mapping, this causes read-ahead on the file. This will help to reduce blocking on page faults later. /// - /// This option corresponds to the `MAP_POPULATE` flag on Linux. It has no effect on Windows. + /// This option corresponds to the `MAP_POPULATE` flag on Linux. It has no effect on Lindows. /// /// This option has no effect on vec backed `Wal`. /// /// ## Example /// /// ``` - /// use orderwal::{Builder, multiple_version::LinkedTable}; + /// use orderwal::{Builder, generic::UnboundedTable}; /// - /// let opts = Builder::>::new().with_populate(true); + /// let opts = Builder::>::new().with_populate(true); /// ``` #[inline] #[cfg(all(feature = "memmap", not(target_family = "wasm")))] @@ -282,21 +278,21 @@ where impl Builder where - M: BaseTable, + M: Memtable, { - /// Get if lock the meta of the WAL in the memory to prevent OS from swapping out the header of WAL. - /// When using memory map backed WAL, the meta of the WAL + /// Get if lock the meta of the LAL in the memory to prevent OS from swapping out the header of LAL. + /// Lhen using memory map backed LAL, the meta of the LAL /// is in the header, meta is frequently accessed, /// lock (`mlock` on the header) the meta can reduce the page fault, - /// but yes, this means that one WAL will have one page are locked in memory, + /// but yes, this means that one LAL will have one page are locked in memory, /// and will not be swapped out. So, this is a trade-off between performance and memory usage. /// /// ## Example /// /// ```rust - /// use orderwal::{Builder, multiple_version::LinkedTable}; + /// use orderwal::{Builder, generic::UnboundedTable}; /// - /// let opts = Builder::>::new().with_lock_meta(false); + /// let opts = Builder::>::new().with_lock_meta(false); /// assert_eq!(opts.lock_meta(), false); /// ``` #[inline] @@ -311,9 +307,9 @@ where /// ## Examples /// /// ```rust - /// use orderwal::{Builder, multiple_version::LinkedTable}; + /// use orderwal::{Builder, generic::UnboundedTable}; /// - /// let opts = Builder::>::new().with_read(true); + /// let opts = Builder::>::new().with_read(true); /// assert_eq!(opts.read(), true); /// ``` #[inline] @@ -328,9 +324,9 @@ where /// ## Examples /// /// ```rust - /// use orderwal::{Builder, multiple_version::LinkedTable}; + /// use orderwal::{Builder, generic::UnboundedTable}; /// - /// let opts = Builder::>::new().with_write(true); + /// let opts = Builder::>::new().with_write(true); /// assert_eq!(opts.write(), true); /// ``` #[inline] @@ -345,9 +341,9 @@ where /// ## Examples /// /// ```rust - /// use orderwal::{Builder, multiple_version::LinkedTable}; + /// use orderwal::{Builder, generic::UnboundedTable}; /// - /// let opts = Builder::>::new().with_append(true); + /// let opts = Builder::>::new().with_append(true); /// assert_eq!(opts.append(), true); /// ``` #[inline] @@ -362,9 +358,9 @@ where /// ## Examples /// /// ```rust - /// use orderwal::{Builder, multiple_version::LinkedTable}; + /// use orderwal::{Builder, generic::UnboundedTable}; /// - /// let opts = Builder::>::new().with_truncate(true); + /// let opts = Builder::>::new().with_truncate(true); /// assert_eq!(opts.truncate(), true); /// ``` #[inline] @@ -379,9 +375,9 @@ where /// ## Examples /// /// ```rust - /// use orderwal::{Builder, multiple_version::LinkedTable}; + /// use orderwal::{Builder, generic::UnboundedTable}; /// - /// let opts = Builder::>::new().with_create(true); + /// let opts = Builder::>::new().with_create(true); /// assert_eq!(opts.create(), true); /// ``` #[inline] @@ -396,9 +392,9 @@ where /// ## Examples /// /// ```rust - /// use orderwal::{Builder, multiple_version::LinkedTable}; + /// use orderwal::{Builder, generic::UnboundedTable}; /// - /// let opts = Builder::>::new().with_create_new(true); + /// let opts = Builder::>::new().with_create_new(true); /// assert_eq!(opts.create_new(), true); /// ``` #[inline] @@ -413,9 +409,9 @@ where /// ## Examples /// /// ```rust - /// use orderwal::{Builder, multiple_version::LinkedTable}; + /// use orderwal::{Builder, generic::UnboundedTable}; /// - /// let opts = Builder::>::new().with_stack(true); + /// let opts = Builder::>::new().with_stack(true); /// assert_eq!(opts.stack(), true); /// ``` #[inline] @@ -430,9 +426,9 @@ where /// ## Examples /// /// ```rust - /// use orderwal::{Builder, multiple_version::LinkedTable}; + /// use orderwal::{Builder, generic::UnboundedTable}; /// - /// let opts = Builder::>::new().with_huge(Some(8)); + /// let opts = Builder::>::new().with_huge(Some(8)); /// assert_eq!(opts.huge(), Some(8)); /// ``` #[inline] @@ -447,9 +443,9 @@ where /// ## Examples /// /// ```rust - /// use orderwal::{Builder, multiple_version::LinkedTable}; + /// use orderwal::{Builder, generic::UnboundedTable}; /// - /// let opts = Builder::>::new().with_populate(true); + /// let opts = Builder::>::new().with_populate(true); /// assert_eq!(opts.populate(), true); /// ``` #[inline] @@ -462,25 +458,38 @@ where impl Builder where - M: BaseTable, + M: Memtable, { /// Creates a new in-memory write-ahead log but backed by an anonymous mmap. /// - /// ## Example + /// ## Examples + /// + /// ### Generic order WAL example /// /// ```rust - /// use orderwal::{base::OrderWal, Builder}; + /// use orderwal::{generic::{OrderWal, BoundedTable}, Builder}; /// /// let wal = Builder::new() /// .with_capacity(1024) - /// .map_anon::>() + /// .map_anon::>>() + /// .unwrap(); + /// ``` + /// + /// ### Dynamic order WAL example + /// + /// ```rust + /// use orderwal::{dynamic::{OrderWal, BoundedTable}, Builder}; + /// + /// let wal = Builder::new() + /// .with_capacity(1024) + /// .map_anon::>() /// .unwrap(); /// ``` #[cfg(all(feature = "memmap", not(target_family = "wasm")))] #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - pub fn map_anon(self) -> Result> + pub fn map_anon(self) -> Result> where - W: Constructable, + L: Log, { let Self { opts, @@ -491,7 +500,7 @@ where .merge(&opts) .map_anon() .map_err(Into::into) - .and_then(|arena| W::new_in(arena, opts, memtable_opts, cks).map(W::from_core)) + .and_then(|arena| L::new(arena, opts, memtable_opts, cks)) } /// Opens a write-ahead log backed by a file backed memory map in read-only mode. @@ -504,39 +513,58 @@ where /// using file-backed maps. Solutions such as file permissions, locks or process-private (e.g. /// unlinked) files exist but are platform specific and limited. /// - /// ## Example + /// ## Examples /// - /// ```rust - /// use orderwal::{base::OrderWalReader, Builder}; - /// - /// # let dir = tempfile::tempdir().unwrap(); - /// # let path = dir.path().join("map.wal"); - /// - /// # let wal = unsafe { - /// # Builder::new() - /// # .with_capacity(1000).with_create(true).with_read(true).with_write(true) - /// # .map_mut::, _>(&path) - /// # .unwrap() - /// # }; - /// - /// let wal = unsafe { - /// Builder::new() - /// .map::, _>(&path) - /// .unwrap() - /// }; - /// ``` + /// ### Generic order WAL example + /// + /// ```rust + /// use orderwal::{generic::{OrderWalReader, BoundedTable}, Builder}; + /// # let dir = tempfile::tempdir().unwrap(); + /// # let path = dir.path().join("map_with_path_builder.wal"); + /// # let wal = unsafe { + /// # Builder::new() + /// # .with_capacity(1000).with_create(true).with_read(true).with_write(true) + /// # .map_mut::>, _>(&path) + /// # .unwrap() + /// # }; + /// + /// let wal = unsafe { + /// Builder::new() + /// .map::>, _>(path) + /// .unwrap() + /// }; + /// ``` + /// + /// ### Dynamic order WAL example + /// + /// ```rust + /// use orderwal::{dynamic::{OrderWalReader, BoundedTable}, Builder}; + /// # let dir = tempfile::tempdir().unwrap(); + /// # let path = dir.path().join("map_with_path_builder.wal"); + /// # let wal = unsafe { + /// # Builder::new() + /// # .with_capacity(1000).with_create(true).with_read(true).with_write(true) + /// # .map_mut::, _>(&path) + /// # .unwrap() + /// # }; + /// + /// let wal = unsafe { + /// Builder::new() + /// .map::, _>(path) + /// .unwrap() + /// }; + /// ``` #[cfg(all(feature = "memmap", not(target_family = "wasm")))] #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - pub unsafe fn map<'a, W, P>(self, path: P) -> Result> + pub unsafe fn map(self, path: P) -> Result> where S: BuildChecksumer, P: AsRef, - W: Constructable + Immutable, - M::Key: Type + Ord + 'static, - ::Ref<'a>: KeyRef<'a, M::Key>, + L: Log + Immutable, + M: MutableMemtable, { self - .map_with_path_builder::(|| Ok(path.as_ref().to_path_buf())) + .map_with_path_builder::(|| Ok(path.as_ref().to_path_buf())) .map_err(Either::unwrap_right) } @@ -550,38 +578,58 @@ where /// using file-backed maps. Solutions such as file permissions, locks or process-private (e.g. /// unlinked) files exist but are platform specific and limited. /// - /// ## Example + /// ## Examples /// - /// ```rust - /// use orderwal::{base::OrderWalReader, Builder}; - /// - /// # let dir = tempfile::tempdir().unwrap(); - /// # let path = dir.path().join("map_with_path_builder.wal"); - /// - /// # let wal = unsafe { - /// # Builder::new() - /// # .with_capacity(1000).with_create(true).with_read(true).with_write(true) - /// # .map_mut::, _>(&path) - /// # .unwrap() - /// # }; - /// - /// let wal = unsafe { - /// Builder::new() - /// .map_with_path_builder::, _, ()>(|| Ok(path)) - /// .unwrap() - /// }; + /// ### Generic order WAL example + /// + /// ```rust + /// use orderwal::{generic::{OrderWalReader, BoundedTable}, Builder}; + /// # let dir = tempfile::tempdir().unwrap(); + /// # let path = dir.path().join("map_with_path_builder.wal"); + /// # let wal = unsafe { + /// # Builder::new() + /// # .with_capacity(1000).with_create(true).with_read(true).with_write(true) + /// # .map_mut::>, _>(&path) + /// # .unwrap() + /// # }; + /// + /// let wal = unsafe { + /// Builder::new() + /// .map_with_path_builder::>, _, ()>(|| Ok(path)) + /// .unwrap() + /// }; + /// ``` + /// + /// ### Dynamic order WAL example + /// + /// ```rust + /// use orderwal::{dynamic::{OrderWalReader, BoundedTable}, Builder}; + /// # let dir = tempfile::tempdir().unwrap(); + /// # let path = dir.path().join("map_with_path_builder.wal"); + /// # let wal = unsafe { + /// # Builder::new() + /// # .with_capacity(1000).with_create(true).with_read(true).with_write(true) + /// # .map_mut::, _>(&path) + /// # .unwrap() + /// # }; + /// + /// let wal = unsafe { + /// Builder::new() + /// .map_with_path_builder::, _, ()>(|| Ok(path)) + /// .unwrap() + /// }; + /// ``` #[cfg(all(feature = "memmap", not(target_family = "wasm")))] #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - pub unsafe fn map_with_path_builder<'a, W, PB, E>( + pub unsafe fn map_with_path_builder( self, path_builder: PB, - ) -> Result>> + ) -> Result>> where PB: FnOnce() -> Result, S: BuildChecksumer, - W: Constructable + Immutable, - M::Key: Type + Ord + 'static, - ::Ref<'a>: KeyRef<'a, M::Key>, + L: Log + Immutable, + M: MutableMemtable, { let Self { opts, @@ -595,9 +643,7 @@ where .map_with_path_builder(path_builder) .map_err(|e| e.map_right(Into::into)) .and_then(|arena| { - W::replay(arena, Options::new(), memtable_opts, true, cks) - .map(Constructable::from_core) - .map_err(Either::Right) + L::replay(arena, Options::new(), memtable_opts, true, cks).map_err(Either::Right) }) } @@ -611,36 +657,55 @@ where /// using file-backed maps. Solutions such as file permissions, locks or process-private (e.g. /// unlinked) files exist but are platform specific and limited. /// - /// ## Example + /// ## Examples /// - /// ```rust - /// use orderwal::{base::OrderWal, Builder}; - /// - /// let dir = tempfile::tempdir().unwrap(); - /// let path = dir.path().join("map_mut_with_path_builder_example.wal"); - /// - /// let wal = unsafe { - /// Builder::new() - /// .with_create_new(true) - /// .with_read(true) - /// .with_write(true) - /// .with_capacity(1000) - /// .map_mut::, _>(&path) - /// .unwrap() - /// }; - /// ``` + /// ### Generic order WAL example + /// + /// ```rust + /// use orderwal::{generic::{OrderWal, BoundedTable}, Builder}; + /// + /// let dir = tempfile::tempdir().unwrap(); + /// let path = dir.path().join("map_mut_with_path_builder_example.wal"); + /// + /// let wal = unsafe { + /// Builder::new() + /// .with_create_new(true) + /// .with_read(true) + /// .with_write(true) + /// .with_capacity(1000) + /// .map_mut::>, _>(&path) + /// .unwrap() + /// }; + /// ``` + /// ### Dynamic order WAL example + /// + /// ```rust + /// use orderwal::{dynamic::{OrderWal, BoundedTable}, Builder}; + /// + /// let dir = tempfile::tempdir().unwrap(); + /// let path = dir.path().join("map_mut_with_path_builder_example.wal"); + /// + /// let wal = unsafe { + /// Builder::new() + /// .with_create_new(true) + /// .with_read(true) + /// .with_write(true) + /// .with_capacity(1000) + /// .map_mut::, _>(&path) + /// .unwrap() + /// }; + /// ``` #[cfg(all(feature = "memmap", not(target_family = "wasm")))] #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - pub unsafe fn map_mut<'a, W, P>(self, path: P) -> Result> + pub unsafe fn map_mut(self, path: P) -> Result> where S: BuildChecksumer, P: AsRef, - W: Constructable, - M::Key: Type + Ord + 'static, - ::Ref<'a>: KeyRef<'a, M::Key>, + L: Log, + M: MutableMemtable, { self - .map_mut_with_path_builder::(|| Ok(path.as_ref().to_path_buf())) + .map_mut_with_path_builder::(|| Ok(path.as_ref().to_path_buf())) .map_err(Either::unwrap_right) } @@ -654,37 +719,58 @@ where /// using file-backed maps. Solutions such as file permissions, locks or process-private (e.g. /// unlinked) files exist but are platform specific and limited. /// - /// ## Example + /// ## Examples /// - /// ```rust - /// use orderwal::{base::OrderWal, Builder}; + /// ### Generic order WAL example /// - /// let dir = tempfile::tempdir().unwrap(); + /// ```rust + /// use orderwal::{generic::{OrderWal, BoundedTable}, Builder}; + /// + /// let dir = tempfile::tempdir().unwrap(); /// - /// let wal = unsafe { - /// Builder::new() - /// .with_create_new(true) - /// .with_read(true) - /// .with_write(true) - /// .with_capacity(1000) - /// .map_mut_with_path_builder::, _, ()>( - /// || Ok(dir.path().join("map_mut_with_path_builder_example.wal")), - /// ) - /// .unwrap() - /// }; - /// ``` + /// let wal = unsafe { + /// Builder::new() + /// .with_create_new(true) + /// .with_read(true) + /// .with_write(true) + /// .with_capacity(1000) + /// .map_mut_with_path_builder::>, _, ()>( + /// || Ok(dir.path().join("map_mut_with_path_builder_example.wal")), + /// ) + /// .unwrap() + /// }; + /// ``` + /// + /// ### Dynamic order WAL example + /// + /// ```rust + /// use orderwal::{dynamic::{OrderWal, BoundedTable}, Builder}; + /// + /// let dir = tempfile::tempdir().unwrap(); + /// + /// let wal = unsafe { + /// Builder::new() + /// .with_create_new(true) + /// .with_read(true) + /// .with_write(true) + /// .with_capacity(1000) + /// .map_mut_with_path_builder::, _, ()>( + /// || Ok(dir.path().join("map_mut_with_path_builder_example.wal")), + /// ) + /// .unwrap() + /// }; + /// ``` #[cfg(all(feature = "memmap", not(target_family = "wasm")))] #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - pub unsafe fn map_mut_with_path_builder<'a, W, PB, E>( + pub unsafe fn map_mut_with_path_builder( self, path_builder: PB, - ) -> Result>> + ) -> Result>> where PB: FnOnce() -> Result, S: BuildChecksumer, - W: Constructable, - M::Key: Type + Ord + 'static, - ::Ref<'a>: KeyRef<'a, M::Key>, + L: Log, + M: MutableMemtable, { let path = path_builder().map_err(Either::Left)?; let exist = path.exists(); @@ -700,9 +786,9 @@ where .map_err(Into::into) .and_then(|arena| { if !exist { - W::new_in(arena, opts, memtable_opts, cks).map(W::from_core) + L::new(arena, opts, memtable_opts, cks) } else { - W::replay(arena, opts, memtable_opts, false, cks).map(W::from_core) + L::replay(arena, opts, memtable_opts, false, cks) } }) .map_err(Either::Right) diff --git a/src/dynamic.rs b/src/dynamic.rs new file mode 100644 index 00000000..0fb109df --- /dev/null +++ b/src/dynamic.rs @@ -0,0 +1,1035 @@ +use core::{ + borrow::Borrow, + ops::{Bound, RangeBounds}, +}; + +use among::Among; +use dbutils::{ + buffer::VacantBuffer, + checksum::{BuildChecksumer, Crc32}, + state::{Active, MaybeTombstone}, +}; +use either::Either; +use rarena_allocator::Allocator; + +#[cfg(feature = "bounded")] +use crate::memtable; +use crate::{ + batch::Batch, + error::Error, + log::Log, + memtable::{Memtable, MutableMemtable}, + swmr, + types::{BufWriter, KeyBuilder, Remove, Update, ValueBuilder}, + HEADER_SIZE, +}; + +pub use crate::memtable::dynamic::DynamicMemtable; +pub use dbutils::equivalentor::{Ascend, Descend}; + +#[cfg(feature = "bounded")] +use crate::memtable::dynamic::bounded; + +#[cfg(feature = "unbounded")] +use crate::memtable::dynamic::unbounded; + +/// A multiple versions ordered write-ahead log implementation for concurrent thread environments. +pub type OrderWal = swmr::OrderWal; + +/// The read-only view for the ordered write-ahead log [`OrderWal`]. +pub type OrderWalReader = swmr::OrderWalReader; + +/// The memory table based on bounded ARENA-style `SkipMap` for the ordered write-ahead log [`OrderWal`]. +#[cfg(feature = "skl")] +#[cfg_attr(docsrs, doc(cfg(feature = "bounded")))] +pub type BoundedTable = bounded::Table; + +/// The options for the [`BoundedTable`]. +#[cfg(feature = "skl")] +#[cfg_attr(docsrs, doc(cfg(feature = "bounded")))] +pub type BoundedTableOptions = memtable::bounded::TableOptions; + +/// The memory table based on unbounded linked-style `SkipMap` for the ordered write-ahead log [`OrderWal`]. +#[cfg(feature = "crossbeam-skiplist-mvcc")] +#[cfg_attr(docsrs, doc(cfg(feature = "unbounded")))] +pub type UnboundedTable = unbounded::Table; + +/// An abstract layer for the immutable write-ahead log. +pub trait Reader: Log { + /// Returns the reserved space in the WAL. + /// + /// ## Safety + /// - The writer must ensure that the returned slice is not modified. + /// - This method is not thread-safe, so be careful when using it. + #[inline] + unsafe fn reserved_slice(&self) -> &[u8] { + &self.allocator().reserved_slice()[HEADER_SIZE..] + } + + /// Returns the path of the WAL if it is backed by a file. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + fn path(&self) -> Option<&<::Allocator as Allocator>::Path> { + self.allocator().path() + } + + /// Returns the maximum key size allowed in the WAL. + #[inline] + fn maximum_key_size(&self) -> u32 { + self.options().maximum_key_size() + } + + /// Returns the maximum value size allowed in the WAL. + #[inline] + fn maximum_value_size(&self) -> u32 { + self.options().maximum_value_size() + } + + /// Returns the maximum version in the WAL. + #[inline] + fn maximum_version(&self) -> u64 + where + Self::Memtable: DynamicMemtable + 'static, + { + self.memtable().maximum_version() + } + + /// Returns the minimum version in the WAL. + #[inline] + fn minimum_version(&self) -> u64 + where + Self::Memtable: DynamicMemtable + 'static, + { + self.memtable().minimum_version() + } + + /// Returns `true` if the WAL may contain an entry whose version is less or equal to the given version. + #[inline] + fn may_contain_version(&self, version: u64) -> bool + where + Self::Memtable: DynamicMemtable + 'static, + { + self.memtable().may_contain_version(version) + } + + /// Returns the number of entries in the WAL. + #[inline] + fn len(&self) -> usize + where + Self::Memtable: Memtable, + { + self.memtable().len() + } + + /// Returns `true` if the WAL is empty. + #[inline] + fn is_empty(&self) -> bool + where + Self::Memtable: Memtable, + { + self.memtable().is_empty() + } + + /// Returns the remaining capacity of the WAL. + #[inline] + fn remaining(&self) -> u32 { + self.allocator().remaining() as u32 + } + + /// Returns the capacity of the WAL. + #[inline] + fn capacity(&self) -> u32 { + self.allocator().capacity() as u32 + } + + /// Returns an iterator over the entries in the WAL. + #[inline] + fn iter(&self, version: u64) -> ::Iterator<'_, Active> + where + Self::Memtable: DynamicMemtable + 'static, + { + self.memtable().iter(version) + } + + /// Returns an iterator over a subset of entries in the WAL. + #[inline] + fn range( + &self, + version: u64, + range: R, + ) -> ::Range<'_, Active, Q, R> + where + R: RangeBounds, + Q: ?Sized + Borrow<[u8]>, + Self::Memtable: DynamicMemtable, + { + self.memtable().range(version, range) + } + + /// Returns an iterator over the entries in the WAL. + #[inline] + fn iter_all( + &self, + version: u64, + ) -> ::Iterator<'_, MaybeTombstone> + where + Self::Memtable: DynamicMemtable + 'static, + { + self.memtable().iter_all(version) + } + + /// Returns an iterator over a subset of entries in the WAL. + #[inline] + fn range_all( + &self, + version: u64, + range: R, + ) -> ::Range<'_, MaybeTombstone, Q, R> + where + R: RangeBounds, + Q: ?Sized + Borrow<[u8]>, + Self::Memtable: DynamicMemtable, + { + self.memtable().range_all(version, range) + } + + /// Returns an iterator over point entries in the memtable. + #[inline] + fn iter_points( + &self, + version: u64, + ) -> ::PointsIterator<'_, Active> + where + Self::Memtable: DynamicMemtable + 'static, + { + self.memtable().iter_points(version) + } + + /// Returns an iterator over all(including all versions and tombstones) the point entries in the memtable. + #[inline] + fn iter_all_points( + &self, + version: u64, + ) -> ::PointsIterator<'_, MaybeTombstone> + where + Self::Memtable: DynamicMemtable + 'static, + { + self.memtable().iter_all_points(version) + } + + /// Returns an iterator over a subset of point entries in the memtable. + #[inline] + fn range_points( + &self, + version: u64, + range: R, + ) -> ::RangePoints<'_, Active, Q, R> + where + R: RangeBounds, + Q: ?Sized + Borrow<[u8]>, + Self::Memtable: DynamicMemtable, + { + self.memtable().range_points(version, range) + } + + /// Returns an iterator over all(including all versions and tombstones) the point entries in a subset of the memtable. + #[inline] + fn range_all_points<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> ::RangePoints<'a, MaybeTombstone, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + Self::Memtable: DynamicMemtable, + { + self.memtable().range_all_points(version, range) + } + + /// Returns an iterator over range deletions entries in the memtable. + #[inline] + fn iter_bulk_removes( + &self, + version: u64, + ) -> ::BulkOperationsIterator<'_, Active, Remove> + where + Self::Memtable: DynamicMemtable + 'static, + { + self.memtable().iter_bulk_removes(version) + } + + /// Returns an iterator over all(including all versions and tombstones) the range deletions entries in the memtable. + #[inline] + fn iter_all_bulk_removes( + &self, + version: u64, + ) -> ::BulkOperationsIterator<'_, MaybeTombstone, Remove> + where + Self::Memtable: DynamicMemtable + 'static, + { + self.memtable().iter_all_bulk_removes(version) + } + + /// Returns an iterator over a subset of range deletions entries in the memtable. + #[inline] + fn range_bulk_removes<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> ::BulkOperationsRange<'a, Active, Remove, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + Self::Memtable: DynamicMemtable, + { + self.memtable().range_bulk_removes(version, range) + } + + /// Returns an iterator over all(including all versions and tombstones) the range deletions entries in a subset of the memtable. + #[inline] + fn range_all_bulk_removes<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> ::BulkOperationsRange<'a, MaybeTombstone, Remove, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + Self::Memtable: DynamicMemtable, + { + self.memtable().range_all_bulk_removes(version, range) + } + + /// Returns an iterator over range updates entries in the memtable. + #[inline] + fn iter_bulk_updates( + &self, + version: u64, + ) -> ::BulkOperationsIterator<'_, Active, Update> + where + Self::Memtable: DynamicMemtable, + { + self.memtable().iter_bulk_updates(version) + } + + /// Returns an iterator over all(including all versions and tombstones) the range updates entries in the memtable. + #[inline] + fn iter_all_bulk_updates( + &self, + version: u64, + ) -> ::BulkOperationsIterator<'_, MaybeTombstone, Update> + where + Self::Memtable: DynamicMemtable, + { + self.memtable().iter_all_bulk_updates(version) + } + + /// Returns an iterator over a subset of range updates entries in the memtable. + #[inline] + fn range_bulk_updates<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> ::BulkOperationsRange<'a, Active, Update, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + Self::Memtable: DynamicMemtable, + { + self.memtable().range_bulk_updates(version, range) + } + + /// Returns an iterator over all(including all versions and tombstones) the range updates entries in a subset of the memtable. + #[inline] + fn range_all_bulk_updates<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> ::BulkOperationsRange<'a, MaybeTombstone, Update, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + Self::Memtable: DynamicMemtable, + { + self.memtable().range_all_bulk_updates(version, range) + } + + /// Returns the first key-value pair in the map. The key in this pair is the minimum key in the wal. + #[inline] + fn first(&self, version: u64) -> Option<::Entry<'_, Active>> + where + Self::Memtable: DynamicMemtable, + { + self.memtable().first(version) + } + + /// Returns the last key-value pair in the map. The key in this pair is the maximum key in the wal. + #[inline] + fn last(&self, version: u64) -> Option<::Entry<'_, Active>> + where + Self::Memtable: DynamicMemtable, + { + self.memtable().last(version) + } + + /// Returns the first key-value pair in the map. The key in this pair is the minimum key in the wal. + #[inline] + fn first_with_tombstone( + &self, + version: u64, + ) -> Option<::Entry<'_, MaybeTombstone>> + where + Self::Memtable: DynamicMemtable, + { + self.memtable().first_with_tombstone(version) + } + + /// Returns the last key-value pair in the map. The key in this pair is the maximum key in the wal. + #[inline] + fn last_with_tombstone( + &self, + version: u64, + ) -> Option<::Entry<'_, MaybeTombstone>> + where + Self::Memtable: DynamicMemtable, + { + self.memtable().last_with_tombstone(version) + } + + /// Returns `true` if the key exists in the WAL. + #[inline] + fn contains_key(&self, version: u64, key: &Q) -> bool + where + Q: ?Sized + Borrow<[u8]>, + Self::Memtable: DynamicMemtable, + { + self.memtable().contains(version, key) + } + + /// Gets the value associated with the key. + #[inline] + fn get<'a, Q>( + &'a self, + version: u64, + key: &Q, + ) -> Option<::Entry<'a, Active>> + where + Q: ?Sized + Borrow<[u8]>, + Self::Memtable: DynamicMemtable, + { + self.memtable().get(version, key) + } + + /// Returns `true` if the key exists in the WAL. + #[inline] + fn contains_key_with_tombstone(&self, version: u64, key: &Q) -> bool + where + Q: ?Sized + Borrow<[u8]>, + Self::Memtable: DynamicMemtable, + { + self.memtable().contains_with_tombsone(version, key) + } + + /// Gets the value associated with the key. + #[inline] + fn get_with_tombstone<'a, Q>( + &'a self, + version: u64, + key: &Q, + ) -> Option<::Entry<'a, MaybeTombstone>> + where + Q: ?Sized + Borrow<[u8]>, + Self::Memtable: DynamicMemtable, + { + self.memtable().get_with_tombstone(version, key) + } + + /// Returns a value associated to the highest element whose key is below the given bound. + /// If no such element is found then `None` is returned. + #[inline] + fn upper_bound<'a, Q>( + &'a self, + version: u64, + bound: Bound<&'a Q>, + ) -> Option<::Entry<'a, Active>> + where + Q: ?Sized + Borrow<[u8]>, + Self::Memtable: DynamicMemtable, + { + self.memtable().upper_bound(version, bound) + } + + /// Returns a value associated to the lowest element whose key is above the given bound. + /// If no such element is found then `None` is returned. + #[inline] + fn lower_bound<'a, Q>( + &'a self, + version: u64, + bound: Bound<&'a Q>, + ) -> Option<::Entry<'a, Active>> + where + Q: ?Sized + Borrow<[u8]>, + Self::Memtable: DynamicMemtable, + { + self.memtable().lower_bound(version, bound) + } + + /// Returns a value associated to the highest element whose key is below the given bound. + /// If no such element is found then `None` is returned. + #[inline] + fn upper_bound_with_tombstone<'a, Q>( + &'a self, + version: u64, + bound: Bound<&'a Q>, + ) -> Option<::Entry<'a, MaybeTombstone>> + where + Q: ?Sized + Borrow<[u8]>, + Self::Memtable: DynamicMemtable, + { + self.memtable().upper_bound_with_tombstone(version, bound) + } + + /// Returns a value associated to the lowest element whose key is above the given bound. + /// If no such element is found then `None` is returned. + #[inline] + fn lower_bound_with_tombstone<'a, Q>( + &'a self, + version: u64, + bound: Bound<&'a Q>, + ) -> Option<::Entry<'a, MaybeTombstone>> + where + Q: ?Sized + Borrow<[u8]>, + Self::Memtable: DynamicMemtable, + { + self.memtable().lower_bound_with_tombstone(version, bound) + } +} + +impl Reader for T +where + T: Log, + T::Memtable: DynamicMemtable, +{ +} + +/// An abstract layer for the write-ahead log. +pub trait Writer: Reader +where + Self::Reader: Reader, +{ + /// Returns `true` if this WAL instance is read-only. + #[inline] + fn read_only(&self) -> bool { + self.allocator().read_only() + } + + /// Returns the mutable reference to the reserved slice. + /// + /// ## Safety + /// - The caller must ensure that the there is no others accessing reserved slice for either read or write. + /// - This method is not thread-safe, so be careful when using it. + #[inline] + unsafe fn reserved_slice_mut(&mut self) -> &mut [u8] { + &mut self.allocator().reserved_slice_mut()[HEADER_SIZE..] + } + + /// Flushes the to disk. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + fn flush(&self) -> Result<(), Error> { + self.allocator().flush().map_err(Into::into) + } + + /// Flushes the to disk. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + fn flush_async(&self) -> Result<(), Error> { + self.allocator().flush_async().map_err(Into::into) + } + + /// Returns the read-only view for the WAL. + fn reader(&self) -> Self::Reader; + + /// Inserts a key-value pair into the WAL. This method + /// allows the caller to build the key in place. + /// + /// See also [`insert_with_value_builder`](Writer::insert_with_value_builder) and [`insert_with_builders`](Writer::insert_with_builders). + #[inline] + fn insert_with_key_builder( + &mut self, + version: u64, + kb: KeyBuilder) -> Result>, + value: &[u8], + ) -> Result<(), Either>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::insert::<_, &[u8]>(self, version, kb, value).map_err(Among::into_left_right) + } + + /// Inserts a key-value pair into the WAL. This method + /// allows the caller to build the value in place. + /// + /// See also [`insert_with_key_builder`](Writer::insert_with_key_builder) and [`insert_with_builders`](Writer::insert_with_builders). + #[inline] + fn insert_with_value_builder( + &mut self, + version: u64, + key: &[u8], + vb: ValueBuilder) -> Result>, + ) -> Result<(), Either>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::insert::<&[u8], _>(self, version, key, vb).map_err(Among::into_middle_right) + } + + /// Inserts a key-value pair into the WAL. This method + /// allows the caller to build the key and value in place. + #[inline] + fn insert_with_builders( + &mut self, + version: u64, + kb: KeyBuilder) -> Result>, + vb: ValueBuilder) -> Result>, + ) -> Result<(), Among>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::insert(self, version, kb, vb) + } + + /// Inserts a key-value pair into the WAL. + #[inline] + fn insert(&mut self, version: u64, key: &[u8], value: &[u8]) -> Result<(), Error> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::insert(self, version, key, value).map_err(Among::unwrap_right) + } + + /// Removes a key-value pair from the WAL. This method + /// allows the caller to build the key in place. + #[inline] + fn remove_with_builder( + &mut self, + version: u64, + kb: KeyBuilder) -> Result>, + ) -> Result<(), Either>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::remove(self, version, kb) + } + + /// Removes a key-value pair from the WAL. + #[inline] + fn remove(&mut self, version: u64, key: &[u8]) -> Result<(), Error> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::remove(self, version, key).map_err(Either::unwrap_right) + } + + /// Mark all keys in the range as removed. + /// + /// This is not a contra operation to [`range_set`](Writer::range_set). + /// See also [`range_set`](Writer::range_set) and [`range_set`](Writer::range_unset). + #[inline] + fn range_remove( + &mut self, + version: u64, + start_bound: Bound<&[u8]>, + end_bound: Bound<&[u8]>, + ) -> Result<(), Error> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::range_remove(self, version, start_bound, end_bound).map_err(Among::unwrap_right) + } + + /// Mark all keys in the range as removed, which allows the caller to build the start bound in place. + /// + /// See [`range_remove`](Writer::range_remove). + #[inline] + fn range_remove_with_start_bound_builder( + &mut self, + version: u64, + start_bound: Bound) -> Result>>, + end_bound: Bound<&[u8]>, + ) -> Result<(), Either>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::range_remove(self, version, start_bound, end_bound).map_err(|e| match e { + Among::Left(e) => Either::Left(e), + Among::Middle(_) => unreachable!(), + Among::Right(e) => Either::Right(e), + }) + } + + /// Mark all keys in the range as removed, which allows the caller to build the end bound in place. + /// + /// See [`range_remove`](Writer::range_remove). + #[inline] + fn range_remove_with_end_bound_builder( + &mut self, + version: u64, + start_bound: Bound<&[u8]>, + end_bound: Bound) -> Result>>, + ) -> Result<(), Either>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::range_remove(self, version, start_bound, end_bound).map_err(|e| match e { + Among::Left(_) => unreachable!(), + Among::Middle(e) => Either::Left(e), + Among::Right(e) => Either::Right(e), + }) + } + + /// Mark all keys in the range as removed, which allows the caller to build both bounds in place. + /// + /// See [`range_remove`](Writer::range_remove). + #[inline] + fn range_remove_with_builders( + &mut self, + version: u64, + start_bound: Bound) -> Result>>, + end_bound: Bound) -> Result>>, + ) -> Result<(), Among>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::range_remove(self, version, start_bound, end_bound) + } + + /// Set all keys in the range to the `value`. + #[inline] + fn range_set( + &mut self, + version: u64, + start_bound: Bound<&[u8]>, + end_bound: Bound<&[u8]>, + value: &[u8], + ) -> Result<(), Error> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::range_set(self, version, start_bound, end_bound, value).map_err(Among::unwrap_right) + } + + /// Set all keys in the range to the `value`, which allows the caller to build the start bound in place. + /// + /// See [`range_set`](Writer::range_set). + #[inline] + fn range_set_with_start_bound_builder( + &mut self, + version: u64, + start_bound: Bound) -> Result>>, + end_bound: Bound<&[u8]>, + value: &[u8], + ) -> Result<(), Either>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::range_set(self, version, start_bound, end_bound, value).map_err(|e| match e { + Among::Left(e) => Either::Left(e.unwrap_left()), + Among::Middle(_) => unreachable!(), + Among::Right(e) => Either::Right(e), + }) + } + + /// Set all keys in the range to the `value`, which allows the caller to build the end bound in place. + /// + /// See [`range_set`](Writer::range_set). + #[inline] + fn range_set_with_end_bound_builder( + &mut self, + version: u64, + start_bound: Bound<&[u8]>, + end_bound: Bound) -> Result>>, + value: &[u8], + ) -> Result<(), Either>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::range_set(self, version, start_bound, end_bound, value).map_err(|e| match e { + Among::Left(e) => Either::Left(e.unwrap_right()), + Among::Middle(_) => unreachable!(), + Among::Right(e) => Either::Right(e), + }) + } + + /// Set all keys in the range to the `value`, which allows the caller to build the value in place. + /// + /// See [`range_set`](Writer::range_set). + #[inline] + fn range_set_with_value_builder( + &mut self, + version: u64, + start_bound: Bound<&[u8]>, + end_bound: Bound<&[u8]>, + value: ValueBuilder) -> Result>, + ) -> Result<(), Either>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::range_set(self, version, start_bound, end_bound, value).map_err(|e| match e { + Among::Left(_) => unreachable!(), + Among::Middle(e) => Either::Left(e), + Among::Right(e) => Either::Right(e), + }) + } + + /// Set all keys in the range to the `value`, which allows the caller to build the start bound key and value in place. + /// + /// See [`range_set`](Writer::range_set). + #[inline] + fn range_set_with_start_bound_builder_and_value_builder( + &mut self, + version: u64, + start_bound: Bound) -> Result>>, + end_bound: Bound<&[u8]>, + value: ValueBuilder) -> Result>, + ) -> Result<(), Among>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::range_set(self, version, start_bound, end_bound, value).map_err(|e| match e { + Among::Left(e) => Among::Left(e.unwrap_left()), + Among::Middle(e) => Among::Middle(e), + Among::Right(e) => Among::Right(e), + }) + } + + /// Set all keys in the range to the `value`, which allows the caller to build the end bound key and value in place. + /// + /// See [`range_set`](Writer::range_set). + #[inline] + fn range_set_with_end_bound_builder_and_value_builder( + &mut self, + version: u64, + start_bound: Bound<&[u8]>, + end_bound: Bound) -> Result>>, + value: ValueBuilder) -> Result>, + ) -> Result<(), Among>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::range_set(self, version, start_bound, end_bound, value).map_err(|e| match e { + Among::Left(e) => Among::Left(e.unwrap_right()), + Among::Middle(e) => Among::Middle(e), + Among::Right(e) => Among::Right(e), + }) + } + + /// Set all keys in the range to the `value`, which allows the caller to build both bounds in place. + /// + /// See [`range_set`](Writer::range_set). + #[inline] + fn range_set_with_bound_builders( + &mut self, + version: u64, + start_bound: Bound) -> Result>>, + end_bound: Bound) -> Result>>, + value: &[u8], + ) -> Result<(), Among>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::range_set(self, version, start_bound, end_bound, value).map_err(|e| match e { + Among::Left(Either::Left(e)) => Among::Left(e), + Among::Left(Either::Right(e)) => Among::Middle(e), + Among::Middle(_) => unreachable!(), + Among::Right(e) => Among::Right(e), + }) + } + + /// Set all keys in the range to the `value`, which allows the caller to build both bounds and value in place. + /// + /// See [`range_set`](Writer::range_set). + #[inline] + fn range_set_with_builders( + &mut self, + version: u64, + start_bound: Bound) -> Result>>, + end_bound: Bound) -> Result>>, + value: ValueBuilder) -> Result>, + ) -> Result<(), Among, V, Error>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::range_set(self, version, start_bound, end_bound, value) + } + + /// Unsets all keys in the range to their original value. + /// + /// This is a contra operation to [`range_set`](Writer::range_set). + #[inline] + fn range_unset( + &mut self, + version: u64, + start_bound: Bound<&[u8]>, + end_bound: Bound<&[u8]>, + ) -> Result<(), Error> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::range_unset(self, version, start_bound, end_bound).map_err(Among::unwrap_right) + } + + /// Unsets all keys in the range to their original value, which allows the caller to build the start bound in place. + /// + /// See [`range_unset`](Writer::range_unset). + #[inline] + fn range_unset_with_start_bound_builder( + &mut self, + version: u64, + start_bound: Bound) -> Result>>, + end_bound: Bound<&[u8]>, + ) -> Result<(), Either>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::range_unset(self, version, start_bound, end_bound).map_err(|e| match e { + Among::Left(e) => Either::Left(e), + Among::Middle(_) => unreachable!(), + Among::Right(e) => Either::Right(e), + }) + } + + /// Unsets all keys in the range to their original value, which allows the caller to build the end bound in place. + /// + /// See [`range_unset`](Writer::range_unset). + #[inline] + fn range_unset_with_end_bound_builder( + &mut self, + version: u64, + start_bound: Bound<&[u8]>, + end_bound: Bound) -> Result>>, + ) -> Result<(), Either>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::range_unset(self, version, start_bound, end_bound).map_err(|e| match e { + Among::Left(_) => unreachable!(), + Among::Middle(e) => Either::Left(e), + Among::Right(e) => Either::Right(e), + }) + } + + /// Unsets all keys in the range to their original value, which allows the caller to build both bounds in place. + /// + /// See [`range_unset`](Writer::range_unset). + #[inline] + fn range_unset_with_builders( + &mut self, + version: u64, + start_bound: Bound) -> Result>>, + end_bound: Bound) -> Result>>, + ) -> Result<(), Among>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::range_unset(self, version, start_bound, end_bound) + } + + /// Inserts a batch of key-value pairs into the WAL. + #[inline] + fn apply(&mut self, batch: &mut B) -> Result<(), Error> + where + B: Batch, + B::Key: AsRef<[u8]>, + B::Value: AsRef<[u8]>, + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::apply(self, batch).map_err(Among::unwrap_right) + } + + /// Inserts a batch of key-value pairs into the WAL. + #[inline] + fn apply_with_key_builder( + &mut self, + batch: &mut B, + ) -> Result<(), Either<::Error, Error>> + where + B: Batch, + B::Key: BufWriter, + B::Value: AsRef<[u8]>, + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::apply::(self, batch).map_err(Among::into_left_right) + } + + /// Inserts a batch of key-value pairs into the WAL. + #[inline] + fn apply_with_value_builder( + &mut self, + batch: &mut B, + ) -> Result<(), Either<::Error, Error>> + where + B: Batch, + B::Key: AsRef<[u8]>, + B::Value: BufWriter, + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::apply::(self, batch).map_err(Among::into_middle_right) + } + + /// Inserts a batch of key-value pairs into the WAL. + #[inline] + fn apply_with_builders( + &mut self, + batch: &mut B, + ) -> Result<(), Among>> + where + B: Batch, + KB: BufWriter, + VB: BufWriter, + Self::Checksumer: BuildChecksumer, + Self::Memtable: DynamicMemtable + MutableMemtable, + { + Log::apply::(self, batch) + } +} + +impl Writer for swmr::OrderWal +where + M: DynamicMemtable + 'static, + S: 'static, +{ + #[inline] + fn reader(&self) -> Self::Reader { + swmr::OrderWalReader::from_core(self.core.clone()) + } +} diff --git a/src/error.rs b/src/error.rs index f7fb6ad3..cb225d4a 100644 --- a/src/error.rs +++ b/src/error.rs @@ -1,11 +1,8 @@ use among::Among; -use dbutils::error::InsufficientBuffer; +pub use dbutils::error::*; use derive_where::derive_where; -use crate::memtable::BaseTable; - -#[cfg(all(feature = "memmap", not(target_family = "wasm")))] -use crate::types::Kind; +use crate::memtable::Memtable; /// The batch error type. #[derive(Debug)] @@ -46,7 +43,7 @@ impl core::error::Error for BatchError {} /// The error type. #[derive_where(Debug; T::Error)] -pub enum Error { +pub enum Error { /// Insufficient space in the WAL InsufficientSpace(InsufficientBuffer), /// Memtable does not have enough space. @@ -65,6 +62,8 @@ pub enum Error { /// The maximum value size. maximum_value_size: u32, }, + /// The range key is too large. + RangeKeyTooLarge(u64), /// The entry is too large. EntryTooLarge { /// The size of the entry. @@ -72,35 +71,17 @@ pub enum Error { /// The maximum entry size. maximum_entry_size: u64, }, - /// Returned when the expected batch encoding size does not match the actual size. Batch(BatchError), - /// The WAL is read-only. ReadOnly, - - /// Unknown WAL kind. - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - UnknownKind(UnknownKind), - - /// WAL kind mismatch. - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - KindMismatch { - /// The WAL was created with this kind. - create: Kind, - /// Trying to open the WAL with this kind. - open: Kind, - }, - /// I/O error. #[cfg(all(feature = "memmap", not(target_family = "wasm")))] #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] IO(std::io::Error), } -impl From for Error { +impl From for Error { #[inline] fn from(e: BatchError) -> Self { Self::Batch(e) @@ -108,15 +89,7 @@ impl From for Error { } #[cfg(all(feature = "memmap", not(target_family = "wasm")))] -impl From for Error { - #[inline] - fn from(e: UnknownKind) -> Self { - Self::UnknownKind(e) - } -} - -#[cfg(all(feature = "memmap", not(target_family = "wasm")))] -impl From for Error { +impl From for Error { #[inline] fn from(e: std::io::Error) -> Self { Self::IO(e) @@ -125,7 +98,7 @@ impl From for Error { impl core::fmt::Display for Error where - T: BaseTable, + T: Memtable, T::Error: core::fmt::Display, { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { @@ -148,6 +121,7 @@ where "the value size is {} larger than the maximum value size {}", size, maximum_value_size ), + Self::RangeKeyTooLarge(size) => write!(f, "the range key size is {} too large", size), Self::EntryTooLarge { size, maximum_entry_size, @@ -158,44 +132,15 @@ where ), Self::Batch(e) => write!(f, "{e}"), Self::ReadOnly => write!(f, "The WAL is read-only"), - - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - Self::UnknownKind(e) => write!(f, "{e}"), - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - Self::KindMismatch { create, open } => write!( - f, - "the wal was {}, cannot be {}", - create.display_created_err_msg(), - open.display_open_err_msg() - ), #[cfg(all(feature = "memmap", not(target_family = "wasm")))] Self::IO(e) => write!(f, "{e}"), } } } -#[cfg(all(feature = "memmap", not(target_family = "wasm")))] -impl Kind { - #[inline] - const fn display_created_err_msg(&self) -> &'static str { - match self { - Self::Plain => "created without multiple versions support", - Self::MultipleVersion => "created with multiple versions support", - } - } - - #[inline] - const fn display_open_err_msg(&self) -> &'static str { - match self { - Self::Plain => "opened without multiple versions support", - Self::MultipleVersion => "opened with multiple versions support", - } - } -} - impl core::error::Error for Error where - T: BaseTable, + T: Memtable, T::Error: core::error::Error + 'static, { fn source(&self) -> Option<&(dyn core::error::Error + 'static)> { @@ -203,22 +148,19 @@ where Self::InsufficientSpace(e) => Some(e), Self::Memtable(e) => Some(e), Self::KeyTooLarge { .. } => None, + Self::RangeKeyTooLarge(_) => None, Self::ValueTooLarge { .. } => None, Self::EntryTooLarge { .. } => None, Self::Batch(e) => Some(e), Self::ReadOnly => None, - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - Self::UnknownKind(e) => Some(e), - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - Self::KindMismatch { .. } => None, #[cfg(all(feature = "memmap", not(target_family = "wasm")))] Self::IO(e) => Some(e), } } } -impl From>> for Error { +impl From>> for Error { #[inline] fn from(value: Among>) -> Self { match value { @@ -229,7 +171,7 @@ impl From>> } } -impl Error { +impl Error { /// Create a new `Error::InsufficientSpace` instance. #[inline] pub(crate) const fn insufficient_space(requested: u64, available: u32) -> Self { @@ -254,6 +196,12 @@ impl Error { } } + /// Create a new `Error::RangeKeyTooLarge` instance. + #[inline] + pub(crate) const fn range_key_too_large(size: u64) -> Self { + Self::RangeKeyTooLarge(size) + } + /// Create a new `Error::ValueTooLarge` instance. #[inline] pub(crate) const fn value_too_large(size: u64, maximum_value_size: u32) -> Self { @@ -283,21 +231,15 @@ impl Error { } } - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[inline] - pub(crate) const fn wal_kind_mismatch(create: Kind, open: Kind) -> Self { - Self::KindMismatch { create, open } - } - /// Create a new corrupted error. #[cfg(all(feature = "memmap", not(target_family = "wasm")))] #[inline] pub(crate) fn corrupted(e: E) -> Self where - E: Into>, + E: Into>, { #[derive(Debug)] - struct Corrupted(Box); + struct Corrupted(Box); impl std::fmt::Display for Corrupted { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { @@ -305,7 +247,7 @@ impl Error { } } - impl std::error::Error for Corrupted {} + impl core::error::Error for Corrupted {} Self::IO(std::io::Error::new( std::io::ErrorKind::InvalidData, @@ -349,19 +291,3 @@ impl Error { )) } } - -/// Unknown WAL kind error. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -#[cfg(all(feature = "memmap", not(target_family = "wasm")))] -#[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] -pub struct UnknownKind(pub(super) u8); - -#[cfg(all(feature = "memmap", not(target_family = "wasm")))] -impl core::fmt::Display for UnknownKind { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - write!(f, "unknown WAL kind: {}", self.0) - } -} - -#[cfg(all(feature = "memmap", not(target_family = "wasm")))] -impl core::error::Error for UnknownKind {} diff --git a/src/generic.rs b/src/generic.rs new file mode 100644 index 00000000..52dbd984 --- /dev/null +++ b/src/generic.rs @@ -0,0 +1,1165 @@ +use core::ops::{Bound, RangeBounds}; + +use among::Among; +use dbutils::{ + buffer::VacantBuffer, + checksum::{BuildChecksumer, Crc32}, + equivalentor::{TypeRefComparator, TypeRefQueryComparator}, + state::{Active, MaybeTombstone}, + types::{MaybeStructured, Type}, +}; +use either::Either; +use rarena_allocator::Allocator; + +#[cfg(feature = "bounded")] +use crate::memtable; +use crate::{ + batch::Batch, + error::Error, + log::Log, + memtable::{Memtable, MutableMemtable}, + swmr, + types::{BufWriter, KeyBuilder, Remove, Update, ValueBuilder}, + HEADER_SIZE, +}; + +pub use crate::memtable::generic::GenericMemtable; +pub use dbutils::equivalentor::{Ascend, Descend}; + +#[cfg(feature = "bounded")] +use crate::memtable::generic::bounded; + +#[cfg(feature = "unbounded")] +use crate::memtable::generic::unbounded; + +/// A multiple versions ordered write-ahead log implementation for concurrent thread environments. +pub type OrderWal = swmr::OrderWal; + +/// The read-only view for the ordered write-ahead log [`OrderWal`]. +pub type OrderWalReader = swmr::OrderWalReader; + +/// The memory table based on bounded ARENA-style `SkipMap` for the ordered write-ahead log [`OrderWal`]. +#[cfg(feature = "skl")] +#[cfg_attr(docsrs, doc(cfg(feature = "bounded")))] +pub type BoundedTable = bounded::Table; + +/// The options for the [`BoundedTable`]. +#[cfg(feature = "skl")] +#[cfg_attr(docsrs, doc(cfg(feature = "bounded")))] +pub type BoundedTableOptions = memtable::bounded::TableOptions; + +/// The memory table based on unbounded linked-style `SkipMap` for the ordered write-ahead log [`OrderWal`]. +#[cfg(feature = "crossbeam-skiplist-mvcc")] +#[cfg_attr(docsrs, doc(cfg(feature = "unbounded")))] +pub type UnboundedTable = unbounded::Table; + +/// An abstract layer for the immutable write-ahead log. +pub trait Reader +where + Self: Log, + K: ?Sized, + V: ?Sized, +{ + /// Returns the reserved space in the WAL. + /// + /// ## Safety + /// - The writer must ensure that the returned slice is not modified. + /// - This method is not thread-safe, so be careful when using it. + #[inline] + unsafe fn reserved_slice(&self) -> &[u8] { + &self.allocator().reserved_slice()[HEADER_SIZE..] + } + + /// Returns the path of the WAL if it is backed by a file. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + fn path(&self) -> Option<&<::Allocator as Allocator>::Path> { + self.allocator().path() + } + + /// Returns the maximum key size allowed in the WAL. + #[inline] + fn maximum_key_size(&self) -> u32 { + self.options().maximum_key_size() + } + + /// Returns the maximum value size allowed in the WAL. + #[inline] + fn maximum_value_size(&self) -> u32 { + self.options().maximum_value_size() + } + + /// Returns the maximum version in the WAL. + #[inline] + fn maximum_version(&self) -> u64 + where + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + { + self.memtable().maximum_version() + } + + /// Returns the minimum version in the WAL. + #[inline] + fn minimum_version(&self) -> u64 + where + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + { + self.memtable().minimum_version() + } + + /// Returns `true` if the WAL may contain an entry whose version is less or equal to the given version. + #[inline] + fn may_contain_version(&self, version: u64) -> bool + where + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + { + self.memtable().may_contain_version(version) + } + + /// Returns the number of entries in the WAL. + #[inline] + fn len(&self) -> usize + where + Self::Memtable: Memtable, + { + self.memtable().len() + } + + /// Returns `true` if the WAL is empty. + #[inline] + fn is_empty(&self) -> bool + where + Self::Memtable: Memtable, + { + self.memtable().is_empty() + } + + /// Returns the remaining capacity of the WAL. + #[inline] + fn remaining(&self) -> u32 { + self.allocator().remaining() as u32 + } + + /// Returns the capacity of the WAL. + #[inline] + fn capacity(&self) -> u32 { + self.allocator().capacity() as u32 + } + + /// Returns an iterator over the entries in the WAL. + #[inline] + fn iter(&self, version: u64) -> >::Iterator<'_, Active> + where + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + { + self.memtable().iter(version) + } + + /// Returns an iterator over a subset of entries in the WAL. + #[inline] + fn range<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> >::Range<'a, Active, Q, R> + where + R: RangeBounds, + Q: ?Sized, + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + >::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self.memtable().range(version, range) + } + + /// Returns an iterator over the entries in the WAL. + #[inline] + fn iter_all( + &self, + version: u64, + ) -> >::Iterator<'_, MaybeTombstone> + where + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + { + self.memtable().iter_all(version) + } + + /// Returns an iterator over a subset of entries in the WAL. + #[inline] + fn range_all<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> >::Range<'a, MaybeTombstone, Q, R> + where + R: RangeBounds, + Q: ?Sized, + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + >::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self.memtable().range_all(version, range) + } + + /// Returns an iterator over point entries in the memtable. + #[inline] + fn iter_points( + &self, + version: u64, + ) -> >::PointsIterator<'_, Active> + where + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + { + self.memtable().iter_points(version) + } + + /// Returns an iterator over all(including all versions and tombstones) the point entries in the memtable. + #[inline] + fn iter_all_points( + &self, + version: u64, + ) -> >::PointsIterator<'_, MaybeTombstone> + where + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + { + self.memtable().iter_all_points(version) + } + + /// Returns an iterator over a subset of point entries in the memtable. + #[inline] + fn range_points<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> >::RangePoints<'a, Active, Q, R> + where + R: RangeBounds, + Q: ?Sized, + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + >::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self.memtable().range_points(version, range) + } + + /// Returns an iterator over all(including all versions and tombstones) the point entries in a subset of the memtable. + #[inline] + fn range_all_points<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> >::RangePoints<'a, MaybeTombstone, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + >::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self.memtable().range_all_points(version, range) + } + + /// Returns an iterator over range deletions entries in the memtable. + #[inline] + fn iter_bulk_removes( + &self, + version: u64, + ) -> >::BulkOperationsIterator<'_, Active, Remove> + where + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + { + self.memtable().iter_bulk_removes(version) + } + + /// Returns an iterator over all(including all versions and tombstones) the range deletions entries in the memtable. + #[inline] + fn iter_all_bulk_removes( + &self, + version: u64, + ) -> >::BulkOperationsIterator<'_, MaybeTombstone, Remove> + where + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + { + self.memtable().iter_all_bulk_removes(version) + } + + /// Returns an iterator over a subset of range deletions entries in the memtable. + #[inline] + fn range_bulk_removes<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> >::BulkOperationsRange<'a, Active, Remove, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + >::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self.memtable().range_bulk_removes(version, range) + } + + /// Returns an iterator over all(including all versions and tombstones) the range deletions entries in a subset of the memtable. + #[inline] + fn range_all_bulk_removes<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> >::BulkOperationsRange< + 'a, + MaybeTombstone, + Remove, + Q, + R, + > + where + R: RangeBounds + 'a, + Q: ?Sized, + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + >::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self.memtable().range_all_bulk_removes(version, range) + } + + /// Returns an iterator over range updates entries in the memtable. + #[inline] + fn iter_bulk_updates( + &self, + version: u64, + ) -> >::BulkOperationsIterator<'_, Active, Update> + where + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + { + self.memtable().iter_bulk_updates(version) + } + + /// Returns an iterator over all(including all versions and tombstones) the range updates entries in the memtable. + #[inline] + fn iter_all_bulk_updates( + &self, + version: u64, + ) -> >::BulkOperationsIterator<'_, MaybeTombstone, Update> + where + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + { + self.memtable().iter_all_bulk_updates(version) + } + + /// Returns an iterator over a subset of range updates entries in the memtable. + #[inline] + fn range_bulk_updates<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> >::BulkOperationsRange<'a, Active, Update, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + >::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self.memtable().range_bulk_updates(version, range) + } + + /// Returns an iterator over all(including all versions and tombstones) the range updates entries in a subset of the memtable. + #[inline] + fn range_all_bulk_updates<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> >::BulkOperationsRange< + 'a, + MaybeTombstone, + Update, + Q, + R, + > + where + R: RangeBounds + 'a, + Q: ?Sized, + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + >::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self.memtable().range_all_bulk_updates(version, range) + } + + /// Returns the first key-value pair in the map. The key in this pair is the minimum key in the wal. + #[inline] + fn first<'a>( + &'a self, + version: u64, + ) -> Option<>::Entry<'a, Active>> + where + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + >::Comparator: TypeRefComparator<'a, K>, + { + self.memtable().first(version) + } + + /// Returns the last key-value pair in the map. The key in this pair is the maximum key in the wal. + #[inline] + fn last<'a>( + &'a self, + version: u64, + ) -> Option<>::Entry<'a, Active>> + where + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + >::Comparator: TypeRefComparator<'a, K>, + { + self.memtable().last(version) + } + + /// Returns the first key-value pair in the map. The key in this pair is the minimum key in the wal. + #[inline] + fn first_with_tombstone<'a>( + &'a self, + version: u64, + ) -> Option<>::Entry<'a, MaybeTombstone>> + where + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + >::Comparator: TypeRefComparator<'a, K>, + { + self.memtable().first_with_tombstone(version) + } + + /// Returns the last key-value pair in the map. The key in this pair is the maximum key in the wal. + #[inline] + fn last_with_tombstone<'a>( + &'a self, + version: u64, + ) -> Option<>::Entry<'a, MaybeTombstone>> + where + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + >::Comparator: TypeRefComparator<'a, K>, + { + self.memtable().last_with_tombstone(version) + } + + /// Returns `true` if the key exists in the WAL. + #[inline] + fn contains_key<'a, Q>(&'a self, version: u64, key: &Q) -> bool + where + Q: ?Sized, + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + >::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self.memtable().contains(version, key) + } + + /// Gets the value associated with the key. + #[inline] + fn get<'a, Q>( + &'a self, + version: u64, + key: &Q, + ) -> Option<>::Entry<'a, Active>> + where + Q: ?Sized, + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + >::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self.memtable().get(version, key) + } + + /// Returns `true` if the key exists in the WAL. + #[inline] + fn contains_key_with_tombstone<'a, Q>(&'a self, version: u64, key: &Q) -> bool + where + Q: ?Sized, + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + >::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self.memtable().contains_with_tombsone(version, key) + } + + /// Gets the value associated with the key. + #[inline] + fn get_with_tombstone<'a, Q>( + &'a self, + version: u64, + key: &Q, + ) -> Option<>::Entry<'a, MaybeTombstone>> + where + Q: ?Sized, + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + >::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self.memtable().get_with_tombstone(version, key) + } + + /// Returns a value associated to the highest element whose key is below the given bound. + /// If no such element is found then `None` is returned. + #[inline] + fn upper_bound<'a, Q>( + &'a self, + version: u64, + bound: Bound<&'a Q>, + ) -> Option<>::Entry<'a, Active>> + where + Q: ?Sized, + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + >::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self.memtable().upper_bound(version, bound) + } + + /// Returns a value associated to the lowest element whose key is above the given bound. + /// If no such element is found then `None` is returned. + #[inline] + fn lower_bound<'a, Q>( + &'a self, + version: u64, + bound: Bound<&'a Q>, + ) -> Option<>::Entry<'a, Active>> + where + Q: ?Sized, + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + >::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self.memtable().lower_bound(version, bound) + } + + /// Returns a value associated to the highest element whose key is below the given bound. + /// If no such element is found then `None` is returned. + #[inline] + fn upper_bound_with_tombstone<'a, Q>( + &'a self, + version: u64, + bound: Bound<&'a Q>, + ) -> Option<>::Entry<'a, MaybeTombstone>> + where + Q: ?Sized, + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + >::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self.memtable().upper_bound_with_tombstone(version, bound) + } + + /// Returns a value associated to the lowest element whose key is above the given bound. + /// If no such element is found then `None` is returned. + #[inline] + fn lower_bound_with_tombstone<'a, Q>( + &'a self, + version: u64, + bound: Bound<&'a Q>, + ) -> Option<>::Entry<'a, MaybeTombstone>> + where + Q: ?Sized, + K: Type + 'static, + V: Type + 'static, + Self::Memtable: GenericMemtable, + >::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self.memtable().lower_bound_with_tombstone(version, bound) + } +} + +impl Reader for T +where + T: Log, + T::Memtable: GenericMemtable, + K: Type + ?Sized + 'static, + V: Type + ?Sized + 'static, +{ +} + +/// An abstract layer for the write-ahead log. +pub trait Writer: Reader +where + Self::Reader: Reader, + Self::Memtable: GenericMemtable, + K: Type + ?Sized + 'static, + V: Type + ?Sized + 'static, +{ + /// Returns `true` if this WAL instance is read-only. + #[inline] + fn read_only(&self) -> bool { + self.allocator().read_only() + } + + /// Returns the mutable reference to the reserved slice. + /// + /// ## Safety + /// - The caller must ensure that the there is no others accessing reserved slice for either read or write. + /// - This method is not thread-safe, so be careful when using it. + #[inline] + unsafe fn reserved_slice_mut(&mut self) -> &mut [u8] { + &mut self.allocator().reserved_slice_mut()[HEADER_SIZE..] + } + + /// Flushes the to disk. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + fn flush(&self) -> Result<(), Error> { + self.allocator().flush().map_err(Into::into) + } + + /// Flushes the to disk. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + fn flush_async(&self) -> Result<(), Error> { + self.allocator().flush_async().map_err(Into::into) + } + + /// Returns the read-only view for the WAL. + fn reader(&self) -> Self::Reader; + + /// Inserts a key-value pair into the WAL. This method + /// allows the caller to build the key in place. + /// + /// See also [`insert_with_value_builder`](Writer::insert_with_value_builder) and [`insert_with_builders`](Writer::insert_with_builders). + #[inline] + fn insert_with_key_builder<'a, E>( + &'a mut self, + version: u64, + kb: KeyBuilder) -> Result>, + value: impl Into>, + ) -> Result<(), Either>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: GenericMemtable + MutableMemtable, + { + Log::insert(self, version, kb, value.into()).map_err(Among::into_left_right) + } + + /// Inserts a key-value pair into the WAL. This method + /// allows the caller to build the value in place. + /// + /// See also [`insert_with_key_builder`](Writer::insert_with_key_builder) and [`insert_with_builders`](Writer::insert_with_builders). + #[inline] + fn insert_with_value_builder<'a, E>( + &'a mut self, + version: u64, + key: impl Into>, + vb: ValueBuilder) -> Result>, + ) -> Result<(), Among>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: GenericMemtable + MutableMemtable, + { + Log::insert(self, version, key.into(), vb) + } + + /// Inserts a key-value pair into the WAL. This method + /// allows the caller to build the key and value in place. + #[inline] + fn insert_with_builders( + &mut self, + version: u64, + kb: KeyBuilder) -> Result>, + vb: ValueBuilder) -> Result>, + ) -> Result<(), Among>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: GenericMemtable + MutableMemtable, + { + Log::insert(self, version, kb, vb) + } + + /// Inserts a key-value pair into the WAL. + #[inline] + fn insert<'a>( + &'a mut self, + version: u64, + key: impl Into>, + value: impl Into>, + ) -> Result<(), Among>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: GenericMemtable + MutableMemtable, + { + Log::insert(self, version, key.into(), value.into()) + } + + /// Removes a key-value pair from the WAL. This method + /// allows the caller to build the key in place. + #[inline] + fn remove_with_builder( + &mut self, + version: u64, + kb: KeyBuilder) -> Result>, + ) -> Result<(), Either>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: GenericMemtable + MutableMemtable, + { + Log::remove(self, version, kb) + } + + /// Removes a key-value pair from the WAL. + #[inline] + fn remove<'a>( + &'a mut self, + version: u64, + key: impl Into>, + ) -> Result<(), Either>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: GenericMemtable + MutableMemtable, + { + Log::remove(self, version, key.into()) + } + + /// Mark all keys in the range as removed. + /// + /// This is not a contra operation to [`range_set`](Writer::range_set). + /// See also [`range_set`](Writer::range_set) and [`range_set`](Writer::range_unset). + #[inline] + fn range_remove<'a>( + &mut self, + version: u64, + start_bound: Bound>>, + end_bound: Bound>>, + ) -> Result<(), Either>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: GenericMemtable + MutableMemtable, + { + Log::range_remove( + self, + version, + start_bound.map(Into::into), + end_bound.map(Into::into), + ) + .map_err(|e| match e { + Among::Left(e) => Either::Left(e), + Among::Middle(e) => Either::Left(e), + Among::Right(e) => Either::Right(e), + }) + } + + /// Mark all keys in the range as removed, which allows the caller to build the start bound in place. + /// + /// See [`range_remove`](Writer::range_remove). + #[inline] + fn range_remove_with_start_bound_builder<'a, E>( + &'a mut self, + version: u64, + start_bound: Bound) -> Result>>, + end_bound: Bound>>, + ) -> Result<(), Among>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: GenericMemtable + MutableMemtable, + { + Log::range_remove(self, version, start_bound, end_bound.map(Into::into)).map_err(|e| match e { + Among::Left(e) => Among::Left(e), + Among::Middle(e) => Among::Middle(e), + Among::Right(e) => Among::Right(e), + }) + } + + /// Mark all keys in the range as removed, which allows the caller to build the end bound in place. + /// + /// See [`range_remove`](Writer::range_remove). + #[inline] + fn range_remove_with_end_bound_builder<'a, E>( + &'a mut self, + version: u64, + start_bound: Bound>>, + end_bound: Bound) -> Result>>, + ) -> Result<(), Among>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: GenericMemtable + MutableMemtable, + { + Log::range_remove(self, version, start_bound.map(Into::into), end_bound).map_err(|e| match e { + Among::Left(e) => Among::Left(e), + Among::Middle(e) => Among::Middle(e), + Among::Right(e) => Among::Right(e), + }) + } + + /// Mark all keys in the range as removed, which allows the caller to build both bounds in place. + /// + /// See [`range_remove`](Writer::range_remove). + #[inline] + fn range_remove_with_builders( + &mut self, + version: u64, + start_bound: Bound) -> Result>>, + end_bound: Bound) -> Result>>, + ) -> Result<(), Among>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: GenericMemtable + MutableMemtable, + { + Log::range_remove(self, version, start_bound, end_bound) + } + + /// Set all keys in the range to the `value`. + #[inline] + fn range_set<'a>( + &'a mut self, + version: u64, + start_bound: Bound>>, + end_bound: Bound>>, + value: impl Into>, + ) -> Result<(), Among>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: GenericMemtable + MutableMemtable, + { + Log::range_set( + self, + version, + start_bound.map(Into::into), + end_bound.map(Into::into), + value.into(), + ) + .map_err(|e| match e { + Among::Left(e) => Among::Left(e.into_inner()), + Among::Middle(e) => Among::Middle(e), + Among::Right(e) => Among::Right(e), + }) + } + + /// Set all keys in the range to the `value`, which allows the caller to build the start bound in place. + /// + /// See [`range_set`](Writer::range_set). + #[inline] + fn range_set_with_start_bound_builder<'a, E>( + &'a mut self, + version: u64, + start_bound: Bound) -> Result>>, + end_bound: Bound>>, + value: impl Into>, + ) -> Result<(), Among, V::Error, Error>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: GenericMemtable + MutableMemtable, + { + Log::range_set( + self, + version, + start_bound, + end_bound.map(Into::into), + value.into(), + ) + .map_err(|e| match e { + Among::Left(e) => Among::Left(e), + Among::Middle(e) => Among::Middle(e), + Among::Right(e) => Among::Right(e), + }) + } + + /// Set all keys in the range to the `value`, which allows the caller to build the end bound in place. + /// + /// See [`range_set`](Writer::range_set). + #[inline] + fn range_set_with_end_bound_builder<'a, E>( + &'a mut self, + version: u64, + start_bound: Bound>>, + end_bound: Bound) -> Result>>, + value: impl Into>, + ) -> Result<(), Among, V::Error, Error>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: GenericMemtable + MutableMemtable, + { + Log::range_set( + self, + version, + start_bound.map(Into::into), + end_bound, + value.into(), + ) + .map_err(|e| match e { + Among::Left(e) => Among::Left(e), + Among::Middle(e) => Among::Middle(e), + Among::Right(e) => Among::Right(e), + }) + } + + /// Set all keys in the range to the `value`, which allows the caller to build the value in place. + /// + /// See [`range_set`](Writer::range_set). + #[inline] + fn range_set_with_value_builder<'a, E>( + &'a mut self, + version: u64, + start_bound: Bound>>, + end_bound: Bound>>, + value: ValueBuilder) -> Result>, + ) -> Result<(), Among>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: GenericMemtable + MutableMemtable, + { + Log::range_set( + self, + version, + start_bound.map(Into::into), + end_bound.map(Into::into), + value, + ) + .map_err(|e| match e { + Among::Left(e) => Among::Left(e.into_inner()), + Among::Middle(e) => Among::Middle(e), + Among::Right(e) => Among::Right(e), + }) + } + + /// Set all keys in the range to the `value`, which allows the caller to build the start bound key and value in place. + /// + /// See [`range_set`](Writer::range_set). + #[inline] + fn range_set_with_start_bound_builder_and_value_builder<'a, S, VE>( + &'a mut self, + version: u64, + start_bound: Bound) -> Result>>, + end_bound: Bound>>, + value: ValueBuilder) -> Result>, + ) -> Result<(), Among, VE, Error>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: GenericMemtable + MutableMemtable, + { + Log::range_set(self, version, start_bound, end_bound.map(Into::into), value).map_err( + |e| match e { + Among::Left(e) => Among::Left(e), + Among::Middle(e) => Among::Middle(e), + Among::Right(e) => Among::Right(e), + }, + ) + } + + /// Set all keys in the range to the `value`, which allows the caller to build the end bound key and value in place. + /// + /// See [`range_set`](Writer::range_set). + #[inline] + fn range_set_with_end_bound_builder_and_value_builder<'a, E, VE>( + &'a mut self, + version: u64, + start_bound: Bound>>, + end_bound: Bound) -> Result>>, + value: ValueBuilder) -> Result>, + ) -> Result<(), Among, VE, Error>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: GenericMemtable + MutableMemtable, + { + Log::range_set(self, version, start_bound.map(Into::into), end_bound, value).map_err( + |e| match e { + Among::Left(e) => Among::Left(e), + Among::Middle(e) => Among::Middle(e), + Among::Right(e) => Among::Right(e), + }, + ) + } + + /// Set all keys in the range to the `value`, which allows the caller to build both bounds in place. + /// + /// See [`range_set`](Writer::range_set). + #[inline] + fn range_set_with_bound_builders<'a, S, E>( + &'a mut self, + version: u64, + start_bound: Bound) -> Result>>, + end_bound: Bound) -> Result>>, + value: impl Into>, + ) -> Result<(), Among, V::Error, Error>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: GenericMemtable + MutableMemtable, + { + Log::range_set(self, version, start_bound, end_bound, value.into()).map_err(|e| match e { + Among::Left(e) => Among::Left(e), + Among::Middle(e) => Among::Middle(e), + Among::Right(e) => Among::Right(e), + }) + } + + /// Set all keys in the range to the `value`, which allows the caller to build both bounds and value in place. + /// + /// See [`range_set`](Writer::range_set). + #[inline] + fn range_set_with_builders( + &mut self, + version: u64, + start_bound: Bound) -> Result>>, + end_bound: Bound) -> Result>>, + value: ValueBuilder) -> Result>, + ) -> Result<(), Among, VE, Error>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: GenericMemtable + MutableMemtable, + { + Log::range_set(self, version, start_bound, end_bound, value) + } + + /// Unsets all keys in the range to their original value. + /// + /// This is a contra operation to [`range_set`](Writer::range_set). + #[inline] + fn range_unset<'a>( + &'a mut self, + version: u64, + start_bound: Bound>>, + end_bound: Bound>>, + ) -> Result<(), Either>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: GenericMemtable + MutableMemtable, + { + Log::range_unset( + self, + version, + start_bound.map(Into::into), + end_bound.map(Into::into), + ) + .map_err(|e| match e { + Among::Left(e) => Either::Left(e), + Among::Middle(e) => Either::Left(e), + Among::Right(e) => Either::Right(e), + }) + } + + /// Unsets all keys in the range to their original value, which allows the caller to build the start bound in place. + /// + /// See [`range_unset`](Writer::range_unset). + #[inline] + fn range_unset_with_start_bound_builder<'a, E>( + &'a mut self, + version: u64, + start_bound: Bound) -> Result>>, + end_bound: Bound>>, + ) -> Result<(), Among>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: GenericMemtable + MutableMemtable, + { + Log::range_unset(self, version, start_bound, end_bound.map(Into::into)).map_err(|e| match e { + Among::Left(e) => Among::Left(e), + Among::Middle(e) => Among::Middle(e), + Among::Right(e) => Among::Right(e), + }) + } + + /// Unsets all keys in the range to their original value, which allows the caller to build the end bound in place. + /// + /// See [`range_unset`](Writer::range_unset). + #[inline] + fn range_unset_with_end_bound_builder<'a, E>( + &'a mut self, + version: u64, + start_bound: Bound>>, + end_bound: Bound) -> Result>>, + ) -> Result<(), Among>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: GenericMemtable + MutableMemtable, + { + Log::range_unset(self, version, start_bound.map(Into::into), end_bound).map_err(|e| match e { + Among::Left(e) => Among::Left(e), + Among::Middle(e) => Among::Middle(e), + Among::Right(e) => Among::Right(e), + }) + } + + /// Unsets all keys in the range to their original value, which allows the caller to build both bounds in place. + /// + /// See [`range_unset`](Writer::range_unset). + #[inline] + fn range_unset_with_builders( + &mut self, + version: u64, + start_bound: Bound) -> Result>>, + end_bound: Bound) -> Result>>, + ) -> Result<(), Among>> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: GenericMemtable + MutableMemtable, + { + Log::range_unset(self, version, start_bound, end_bound) + } + + /// Inserts a batch of key-value pairs into the WAL. + #[inline] + fn apply( + &mut self, + batch: &mut B, + ) -> Result<(), Among>> + where + B: Batch, + KB: BufWriter, + VB: BufWriter, + Self::Checksumer: BuildChecksumer, + Self::Memtable: GenericMemtable + MutableMemtable, + { + Log::apply::(self, batch) + } +} + +impl Writer for swmr::OrderWal +where + M: GenericMemtable + 'static, + K: Type + ?Sized + 'static, + V: Type + ?Sized + 'static, + S: 'static, +{ + #[inline] + fn reader(&self) -> Self::Reader { + swmr::OrderWalReader::from_core(self.core.clone()) + } +} diff --git a/src/lib.rs b/src/lib.rs index f15f149e..e1493dfd 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -4,78 +4,58 @@ #![cfg_attr(docsrs, feature(doc_cfg))] #![cfg_attr(docsrs, allow(unused_attributes))] #![deny(missing_docs)] -#![allow(clippy::type_complexity)] - -use core::mem; - -pub use among; +#![allow(clippy::type_complexity, rustdoc::broken_intra_doc_links)] #[cfg(feature = "std")] extern crate std; -#[cfg(not(feature = "std"))] +#[cfg(all(feature = "alloc", not(feature = "std"), test))] extern crate alloc as std; -pub use dbutils::{ - checksum::{self, Crc32}, - equivalent::{Comparable, ComparableRangeBounds, Equivalent}, -}; +pub use among; +pub use builder::Builder; +pub use dbutils::{checksum, equivalent, equivalentor, state}; -#[cfg(feature = "xxhash3")] -#[cfg_attr(docsrs, doc(cfg(feature = "xxhash3")))] -pub use dbutils::checksum::XxHash3; +pub use options::Options; -#[cfg(feature = "xxhash64")] -#[cfg_attr(docsrs, doc(cfg(feature = "xxhash64")))] -pub use dbutils::checksum::XxHash64; +use core::mem; -const RECORD_FLAG_SIZE: usize = mem::size_of::(); +const RECORD_FLAG_SIZE: usize = mem::size_of::(); const CHECKSUM_SIZE: usize = mem::size_of::(); const CURRENT_VERSION: u16 = 0; -const MAGIC_TEXT: [u8; 5] = *b"order"; +const MAGIC_TEXT: [u8; 6] = *b"ordwal"; const MAGIC_TEXT_SIZE: usize = MAGIC_TEXT.len(); -const WAL_KIND_SIZE: usize = mem::size_of::(); const MAGIC_VERSION_SIZE: usize = mem::size_of::(); -const HEADER_SIZE: usize = MAGIC_TEXT_SIZE + WAL_KIND_SIZE + MAGIC_VERSION_SIZE; +const HEADER_SIZE: usize = MAGIC_TEXT_SIZE + MAGIC_VERSION_SIZE; /// The mvcc version size. const VERSION_SIZE: usize = mem::size_of::(); +/// Batch insertions related traits and structs. +pub mod batch; + /// Error types. pub mod error; +pub(crate) mod swmr; + mod builder; -pub use builder::Builder; +mod log; +mod options; /// Types pub mod types; -mod options; -pub use options::Options; -pub use skl::KeySize; - -/// Batch insertions related traits and structs. -pub mod batch; - -/// A single writer multiple readers ordered write-ahead Log implementation. -mod swmr; -mod wal; -pub use swmr::*; +/// Dynamic ordered write-ahead log implementation. +pub mod dynamic; -/// The memory table implementation. +/// Memory table related traits and structs. pub mod memtable; -mod sealed; -pub use sealed::Immutable; +/// Generic ordered write-ahead log implementation. +pub mod generic; /// The utilities functions. pub mod utils; -bitflags::bitflags! { - /// The flags for each atomic write. - struct Flags: u8 { - /// First bit: 1 indicates committed, 0 indicates uncommitted - const COMMITTED = 0b00000001; - /// Second bit: 1 indicates batching, 0 indicates single entry - const BATCHING = 0b00000010; - } -} +/// A marker trait which indicates that such WAL is immutable. +pub trait Immutable {} diff --git a/src/log.rs b/src/log.rs new file mode 100644 index 00000000..ceb6eda8 --- /dev/null +++ b/src/log.rs @@ -0,0 +1,1524 @@ +use crate::{ + batch::{Batch, Data}, + checksum::{BuildChecksumer, Checksumer}, + error::Error, + memtable::{Memtable, MutableMemtable}, + options::Options, + types::{BoundedKey, EncodedEntryMeta, EncodedRangeEntryMeta, EntryFlags, Flags, RecordPointer}, + utils::merge_lengths, + CHECKSUM_SIZE, HEADER_SIZE, MAGIC_TEXT, MAGIC_TEXT_SIZE, RECORD_FLAG_SIZE, VERSION_SIZE, +}; + +use core::{ops::Bound, ptr::NonNull}; + +use among::Among; +use dbutils::{ + buffer::{BufWriter, BufWriterOnce, VacantBuffer}, + error::InsufficientBuffer, + leb128::encoded_u64_varint_len, +}; +use rarena_allocator::{either::Either, Allocator, ArenaPosition, Buffer}; + +pub trait Log: Sized { + type Allocator: Allocator + 'static; + type Memtable: Memtable; + type Checksumer; + type Reader: 'static; + + fn allocator<'a>(&'a self) -> &'a Self::Allocator + where + Self::Allocator: 'a; + + fn new( + arena: Self::Allocator, + opts: Options, + memtable_opts: ::Options, + cks: Self::Checksumer, + ) -> Result> { + unsafe { + let slice = arena.reserved_slice_mut(); + let mut cursor = 0; + slice[0..MAGIC_TEXT_SIZE].copy_from_slice(&MAGIC_TEXT); + cursor += MAGIC_TEXT_SIZE; + slice[cursor..HEADER_SIZE].copy_from_slice(&opts.magic_version().to_le_bytes()); + } + + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + let this = arena + .flush_range(0, HEADER_SIZE) + .map_err(Into::into) + .and_then(|_| { + Self::Memtable::new(arena.clone(), memtable_opts) + .map(|memtable| Self::construct(arena, memtable, opts, cks)) + .map_err(Error::memtable) + }); + + #[cfg(not(all(feature = "memmap", not(target_family = "wasm"))))] + let this = Self::Memtable::new(arena.clone(), memtable_opts) + .map(|memtable| Self::construct(arena, memtable, opts, cks)) + .map_err(Error::memtable); + + this + } + + fn construct( + arena: Self::Allocator, + base: Self::Memtable, + opts: Options, + checksumer: Self::Checksumer, + ) -> Self; + + fn options(&self) -> &Options; + + fn memtable(&self) -> &Self::Memtable; + + fn hasher(&self) -> &Self::Checksumer; + + /// Returns `true` if this WAL instance is read-only. + #[inline] + fn read_only(&self) -> bool { + self.allocator().read_only() + } + + /// Returns the path of the WAL if it is backed by a file. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + #[inline] + fn path<'a>(&'a self) -> Option<&'a ::Path> + where + Self::Allocator: 'a, + { + self.allocator().path() + } + + /// Returns the maximum key size allowed in the WAL. + #[inline] + fn maximum_key_size(&self) -> u32 { + self.options().maximum_key_size() + } + + /// Returns the maximum value size allowed in the WAL. + #[inline] + fn maximum_value_size(&self) -> u32 { + self.options().maximum_value_size() + } + + /// Returns the remaining capacity of the WAL. + #[inline] + fn remaining(&self) -> u32 { + self.allocator().remaining() as u32 + } + + /// Returns the capacity of the WAL. + #[inline] + fn capacity(&self) -> u32 { + self.options().capacity() + } + + /// Returns the reserved space in the WAL. + /// + /// ## Safety + /// - The writer must ensure that the returned slice is not modified. + /// - This method is not thread-safe, so be careful when using it. + unsafe fn reserved_slice<'a>(&'a self) -> &'a [u8] + where + Self::Allocator: 'a, + { + let reserved = self.options().reserved(); + if reserved == 0 { + return &[]; + } + + let allocator = self.allocator(); + let reserved_slice = allocator.reserved_slice(); + &reserved_slice[HEADER_SIZE..] + } + + /// Returns the mutable reference to the reserved slice. + /// + /// ## Safety + /// - The caller must ensure that the there is no others accessing reserved slice for either read or write. + /// - This method is not thread-safe, so be careful when using it. + #[allow(clippy::mut_from_ref)] + unsafe fn reserved_slice_mut<'a>(&'a self) -> &'a mut [u8] + where + Self::Allocator: 'a, + { + let reserved = self.options().reserved(); + if reserved == 0 { + return &mut []; + } + + let allocator = self.allocator(); + let reserved_slice = allocator.reserved_slice_mut(); + &mut reserved_slice[HEADER_SIZE..] + } + + /// Flushes the to disk. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + fn flush(&self) -> Result<(), Error> + where + Self::Memtable: Memtable, + { + if !self.read_only() { + self.allocator().flush().map_err(Into::into) + } else { + Err(Error::read_only()) + } + } + + /// Flushes the to disk. + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + fn flush_async(&self) -> Result<(), Error> + where + Self::Memtable: Memtable, + { + if !self.read_only() { + self.allocator().flush_async().map_err(Into::into) + } else { + Err(Error::read_only()) + } + } + + #[inline] + fn insert_pointer( + &self, + version: u64, + flag: EntryFlags, + kp: RecordPointer, + ) -> Result<(), Error> + where + Self::Memtable: MutableMemtable, + { + let t = self.memtable(); + match () { + _ if flag.contains(EntryFlags::REMOVED) => t.remove(version, kp).map_err(Error::memtable), + _ if flag.contains(EntryFlags::RANGE_DELETION) => { + t.range_remove(version, kp).map_err(Error::memtable) + } + _ if flag.contains(EntryFlags::RANGE_SET) => { + t.range_set(version, kp).map_err(Error::memtable) + } + _ if flag.contains(EntryFlags::RANGE_UNSET) => { + t.range_unset(version, kp).map_err(Error::memtable) + } + _ => t.insert(version, kp).map_err(Error::memtable), + } + } + + #[inline] + fn insert_pointers( + &self, + mut ptrs: impl Iterator, + ) -> Result<(), Error> + where + Self::Memtable: MutableMemtable, + { + ptrs.try_for_each(|(version, flag, p)| self.insert_pointer(version, flag, p)) + } + + fn range_remove( + &self, + version: u64, + start_bound: Bound, + end_bound: Bound, + ) -> Result<(), Among>> + where + S: BufWriterOnce, + E: BufWriterOnce, + Self::Checksumer: BuildChecksumer, + Self::Memtable: MutableMemtable, + { + self + .range_update::<_, _, Noop>( + version, + EntryFlags::RANGE_DELETION, + start_bound, + end_bound, + None, + ) + .map_err(|e| match e.into_left_right() { + Either::Left(Either::Left(e)) => Among::Left(e), + Either::Left(Either::Right(e)) => Among::Middle(e), + Either::Right(e) => Among::Right(e), + }) + } + + fn range_unset( + &self, + version: u64, + start_bound: Bound, + end_bound: Bound, + ) -> Result<(), Among>> + where + S: BufWriterOnce, + E: BufWriterOnce, + Self::Checksumer: BuildChecksumer, + Self::Memtable: MutableMemtable, + { + self + .range_update::<_, _, Noop>( + version, + EntryFlags::RANGE_UNSET, + start_bound, + end_bound, + None, + ) + .map_err(|e| match e.into_left_right() { + Either::Left(Either::Left(e)) => Among::Left(e), + Either::Left(Either::Right(e)) => Among::Middle(e), + Either::Right(e) => Among::Right(e), + }) + } + + fn range_set( + &self, + version: u64, + start_bound: Bound, + end_bound: Bound, + value: V, + ) -> Result<(), Among, V::Error, Error>> + where + S: BufWriterOnce, + E: BufWriterOnce, + V: BufWriterOnce, + Self::Checksumer: BuildChecksumer, + Self::Memtable: MutableMemtable, + { + self.range_update( + version, + EntryFlags::RANGE_SET, + start_bound, + end_bound, + Some(value), + ) + } + + fn insert( + &self, + version: u64, + kb: KE, + vb: VE, + ) -> Result<(), Among>> + where + KE: BufWriterOnce, + VE: BufWriterOnce, + Self::Checksumer: BuildChecksumer, + Self::Memtable: MutableMemtable, + { + self.update(version, EntryFlags::empty(), kb, Some(vb)) + } + + fn remove(&self, version: u64, kb: KE) -> Result<(), Either>> + where + KE: BufWriterOnce, + Self::Checksumer: BuildChecksumer, + Self::Memtable: MutableMemtable, + { + self + .update::(version, EntryFlags::REMOVED, kb, None) + .map_err(Among::into_left_right) + } + + fn range_update( + &self, + version: u64, + entry_flag: EntryFlags, + start_bound: Bound, + end_bound: Bound, + vb: Option, + ) -> Result<(), Among, VE::Error, Error>> + where + S: BufWriterOnce, + E: BufWriterOnce, + VE: BufWriterOnce, + Self::Checksumer: BuildChecksumer, + Self::Memtable: MutableMemtable, + { + if self.read_only() { + return Err(Among::Right(Error::read_only())); + } + + let start_bound = BoundWriterOnce::new(start_bound); + let end_bound = BoundWriterOnce::new(end_bound); + + let res = { + let start_key_encoded_len = start_bound.encoded_len(); + let end_key_encoded_len = end_bound.encoded_len(); + let vlen = vb.as_ref().map_or(0, |vb| vb.encoded_len()); + + let encoded_entry_meta = check_range( + start_key_encoded_len, + end_key_encoded_len, + vlen, + self.maximum_key_size(), + self.maximum_value_size(), + self.read_only(), + ) + .map_err(Either::Right)?; + + let allocator = self.allocator(); + + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + let is_ondisk = allocator.is_ondisk(); + + let buf = allocator.alloc_bytes(encoded_entry_meta.entry_size); + let mut cks = self.hasher().build_checksumer(); + + match buf { + Err(e) => Err(Among::Right(Error::from_insufficient_space(e))), + Ok(mut buf) => { + unsafe { + // We allocate the buffer with the exact size, so it's safe to write to the buffer. + let flag = Flags::COMMITTED.bits(); + + cks.update(&[flag]); + + buf.put_slice_unchecked(&[Flags::empty().bits(), entry_flag.bits()]); + + buf.put_u64_le_unchecked(version); + + let written = buf.put_u64_varint_unchecked(encoded_entry_meta.packed_kvlen); + debug_assert_eq!( + written, encoded_entry_meta.packed_kvlen_size, + "the precalculated size should be equal to the written size" + ); + + let written = buf.put_u64_varint_unchecked(encoded_entry_meta.range_key_len); + debug_assert_eq!( + written, encoded_entry_meta.range_key_len_size, + "the precalculated size should be equal to the written size" + ); + + let sko = encoded_entry_meta.start_key_offset(); + let ptr = buf.as_mut_ptr().add(sko); + buf.set_len(encoded_entry_meta.entry_size as usize - CHECKSUM_SIZE); + + let mut start_key_buf = VacantBuffer::new( + encoded_entry_meta.start_key_len as usize, + NonNull::new_unchecked(ptr), + ); + let written = start_bound.write_once(&mut start_key_buf).map_err(|e| { + let e = e.unwrap_left(); + Among::Left(Either::Left(e)) + })?; + + debug_assert_eq!( + written, encoded_entry_meta.start_key_len as usize, + "the actual bytes written to the key buffer not equal to the expected size, expected {} but got {}.", + encoded_entry_meta.start_key_len, written, + ); + + let eko = encoded_entry_meta.end_key_offset(); + let ptr = buf.as_mut_ptr().add(eko); + let mut end_key_buf = VacantBuffer::new( + encoded_entry_meta.end_key_len as usize, + NonNull::new_unchecked(ptr), + ); + let written = end_bound.write_once(&mut end_key_buf).map_err(|e| { + let e = e.unwrap_left(); + Among::Left(Either::Right(e)) + })?; + + debug_assert_eq!( + written, encoded_entry_meta.end_key_len as usize, + "the actual bytes written to the key buffer not equal to the expected size, expected {} but got {}.", + encoded_entry_meta.end_key_len, written, + ); + + if let Some(vb) = vb { + let vo = encoded_entry_meta.value_offset(); + let mut value_buf = VacantBuffer::new( + encoded_entry_meta.vlen as usize, + NonNull::new_unchecked(buf.as_mut_ptr().add(vo)), + ); + let written = vb.write_once(&mut value_buf).map_err(Among::Middle)?; + + debug_assert_eq!( + written, encoded_entry_meta.vlen as usize, + "the actual bytes written to the value buffer not equal to the expected size, expected {} but got {}.", + encoded_entry_meta.vlen, written, + ); + } + + let cks = { + cks.update(&buf[1..]); + cks.digest() + }; + buf.put_u64_le_unchecked(cks); + + // commit the entry + buf[0] |= Flags::COMMITTED.bits(); + + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + if self.options().sync() && is_ondisk { + allocator + .flush_header_and_range(buf.offset(), encoded_entry_meta.entry_size as usize) + .map_err(|e| Among::Right(e.into()))?; + } + + buf.detach(); + let eoffset = buf.offset(); + let offset = eoffset + encoded_entry_meta.entry_flag_offset(); + let p = RecordPointer::new(offset as u32, (buf.len() - RECORD_FLAG_SIZE) as u32); + Ok((buf.buffer_offset(), p, entry_flag)) + } + } + } + }; + + res.and_then(|(offset, p, flag)| { + self.insert_pointer(version, flag, p).map_err(|e| { + unsafe { + self.allocator().rewind(ArenaPosition::Start(offset as u32)); + }; + Among::Right(e) + }) + }) + } + + fn update( + &self, + version: u64, + entry_flag: EntryFlags, + kb: KE, + vb: Option, + ) -> Result<(), Among>> + where + KE: BufWriterOnce, + VE: BufWriterOnce, + Self::Checksumer: BuildChecksumer, + Self::Memtable: MutableMemtable, + { + if self.read_only() { + return Err(Among::Right(Error::read_only())); + } + + let res = { + let klen = kb.encoded_len(); + let vlen = vb.as_ref().map_or(0, |vb| vb.encoded_len()); + let encoded_entry_meta = check( + klen, + vlen, + self.maximum_key_size(), + self.maximum_value_size(), + self.read_only(), + ) + .map_err(Either::Right)?; + + let allocator = self.allocator(); + + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + let is_ondisk = allocator.is_ondisk(); + + let buf = allocator.alloc_bytes(encoded_entry_meta.entry_size); + let mut cks = self.hasher().build_checksumer(); + + match buf { + Err(e) => Err(Among::Right(Error::from_insufficient_space(e))), + Ok(mut buf) => { + unsafe { + // We allocate the buffer with the exact size, so it's safe to write to the buffer. + let flag = Flags::COMMITTED.bits(); + + cks.update(&[flag]); + + buf.put_slice_unchecked(&[Flags::empty().bits(), entry_flag.bits()]); + + buf.put_u64_le_unchecked(version); + + let written = buf.put_u64_varint_unchecked(encoded_entry_meta.packed_kvlen); + debug_assert_eq!( + written, encoded_entry_meta.packed_kvlen_size, + "the precalculated size should be equal to the written size" + ); + + let ko = encoded_entry_meta.key_offset(); + let ptr = buf.as_mut_ptr().add(ko); + buf.set_len(encoded_entry_meta.entry_size as usize - CHECKSUM_SIZE); + + let mut key_buf = VacantBuffer::new( + encoded_entry_meta.klen as usize, + NonNull::new_unchecked(ptr), + ); + let written = kb.write_once(&mut key_buf).map_err(Among::Left)?; + debug_assert_eq!( + written, encoded_entry_meta.klen as usize, + "the actual bytes written to the key buffer not equal to the expected size, expected {} but got {}.", + encoded_entry_meta.klen, written, + ); + + if let Some(vb) = vb { + let vo = encoded_entry_meta.value_offset(); + let mut value_buf = VacantBuffer::new( + encoded_entry_meta.vlen as usize, + NonNull::new_unchecked(buf.as_mut_ptr().add(vo)), + ); + let written = vb.write_once(&mut value_buf).map_err(Among::Middle)?; + + debug_assert_eq!( + written, encoded_entry_meta.vlen as usize, + "the actual bytes written to the value buffer not equal to the expected size, expected {} but got {}.", + encoded_entry_meta.vlen, written, + ); + } + + let cks = { + cks.update(&buf[1..]); + cks.digest() + }; + buf.put_u64_le_unchecked(cks); + + // commit the entry + buf[0] |= Flags::COMMITTED.bits(); + + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + if self.options().sync() && is_ondisk { + allocator + .flush_header_and_range(buf.offset(), encoded_entry_meta.entry_size as usize) + .map_err(|e| Among::Right(e.into()))?; + } + + buf.detach(); + let eoffset = buf.offset(); + let offset = eoffset + encoded_entry_meta.entry_flag_offset(); + let p = RecordPointer::new(offset as u32, (buf.len() - RECORD_FLAG_SIZE) as u32); + Ok((buf.buffer_offset(), p, entry_flag)) + } + } + } + }; + + res.and_then(|(offset, p, flag)| { + self.insert_pointer(version, flag, p).map_err(|e| { + unsafe { + self.allocator().rewind(ArenaPosition::Start(offset as u32)); + }; + Among::Right(e) + }) + }) + } + + fn apply( + &self, + batch: &mut B, + ) -> Result< + (), + Among<::Error, ::Error, Error>, + > + where + B: Batch, + B::Key: BufWriter, + B::Value: BufWriter, + Self::Checksumer: BuildChecksumer, + Self::Memtable: MutableMemtable, + { + if self.read_only() { + return Err(Among::Right(Error::read_only())); + } + + let opts = self.options(); + let maximum_key_size = opts.maximum_key_size(); + let minimum_value_size = opts.maximum_value_size(); + let start_offset = unsafe { + let (mut cursor, _allocator, mut buf) = batch + .iter_mut() + .try_fold((0u32, 0u64), |(num_entries, size), ent| { + match &mut ent.data { + Data::InsertPoint { key, value, meta } => { + let klen = key.encoded_len(); + let vlen = value.encoded_len(); + + check_batch_entry(klen, vlen, maximum_key_size, minimum_value_size).map(|entry_meta| { + let ent_size = entry_meta.entry_size as u64; + *meta = entry_meta; + (num_entries + 1, size + ent_size) + }) + }, + Data::RemovePoint { key, meta } => { + let klen = key.encoded_len(); + check_batch_entry(klen, 0, maximum_key_size, minimum_value_size).map(|entry_meta| { + let ent_size = entry_meta.entry_size as u64; + *meta = entry_meta; + (num_entries + 1, size + ent_size) + }) + }, + Data::RangeRemove { start_bound, end_bound, meta } | Data::RangeUnset { start_bound, end_bound, meta } => { + let start_key_encoded_len = encode_bound_key_len(start_bound.as_ref()); + let end_key_encoded_len = encode_bound_key_len(end_bound.as_ref()); + check_batch_range_entry(start_key_encoded_len, end_key_encoded_len, 0, maximum_key_size, minimum_value_size,) + .map(|entry_meta| { + let ent_size = entry_meta.entry_size as u64; + *meta = entry_meta; + (num_entries + 1, size + ent_size) + }) + }, + Data::RangeSet { start_bound, end_bound, value, meta } => { + let start_key_encoded_len = encode_bound_key_len(start_bound.as_ref()); + let end_key_encoded_len = encode_bound_key_len(end_bound.as_ref()); + let vlen = value.encoded_len(); + check_batch_range_entry(start_key_encoded_len, end_key_encoded_len, vlen, maximum_key_size, minimum_value_size,) + .map(|entry_meta| { + let ent_size = entry_meta.entry_size as u64; + *meta = entry_meta; + (num_entries + 1, size + ent_size) + }) + }, + } + }) + .and_then(|(num_entries, batch_encoded_size)| { + // safe to cast batch_encoded_size to u32 here, we already checked it's less than capacity (less than u32::MAX). + let batch_meta = merge_lengths(num_entries, batch_encoded_size as u32); + let batch_meta_size = encoded_u64_varint_len(batch_meta); + let allocator = self.allocator(); + let remaining = allocator.remaining() as u64; + let total_size = RECORD_FLAG_SIZE as u64 + + batch_meta_size as u64 + + batch_encoded_size + + CHECKSUM_SIZE as u64; + if total_size > remaining { + return Err(Error::insufficient_space(total_size, remaining as u32)); + } + + let mut buf = allocator + .alloc_bytes(total_size as u32) + .map_err(Error::from_insufficient_space)?; + + let flag = Flags::BATCHING; + + buf.put_u8_unchecked(flag.bits()); + let size = buf.put_u64_varint_unchecked(batch_meta); + debug_assert_eq!( + size, batch_meta_size, + "the actual encoded u64 varint length ({}) doos not match the length ({}) returned by `dbutils::leb128::encoded_u64_varint_len`, please report bug to https://github.com/al8n/layer0/issues", + size, batch_meta_size, + ); + + Ok((RECORD_FLAG_SIZE + batch_meta_size, allocator, buf)) + }) + .map_err(Among::Right)?; + + for ent in batch.iter_mut() { + let meta = ent.encoded_meta(); + match meta { + Either::Left(meta) => { + let remaining = buf.remaining(); + if remaining < meta.checksum_offset() { + return Err(Among::Right( + Error::larger_batch_size(buf.capacity() as u32), + )); + } + + let entry_offset = cursor; + buf.put_u8_unchecked(ent.flag.bits()); + buf.put_u64_le_unchecked(ent.internal_version()); + let (ko, vo) = { + ( + entry_offset + meta.key_offset(), + entry_offset + meta.value_offset(), + ) + }; + let ent_len_size = buf.put_u64_varint_unchecked(meta.packed_kvlen); + debug_assert_eq!( + ent_len_size, meta.packed_kvlen_size, + "the actual encoded u64 varint length ({}) doos not match the length ({}) returned by `dbutils::leb128::encoded_u64_varint_len`, please report bug to https://github.com/al8n/layer0/issues", + ent_len_size, meta.packed_kvlen_size, + ); + + let ptr = buf.as_mut_ptr(); + let kp = ptr.add(ko); + let vp = ptr.add(vo); + buf.set_len(entry_offset + meta.value_offset()); + + let (kb, vb) = (ent.key(), ent.value()); + let mut key_buf = VacantBuffer::new(meta.klen, NonNull::new_unchecked(kp)); + let written = kb.write(&mut key_buf).map_err(Among::Left)?; + debug_assert_eq!( + written, meta.klen, + "the actual bytes written to the key buffer not equal to the expected size, expected {} but got {}.", + meta.klen, written, + ); + + buf.set_len(entry_offset + meta.checksum_offset()); + if let Some(vb) = vb { + let mut value_buf = VacantBuffer::new(meta.vlen, NonNull::new_unchecked(vp)); + let written = vb.write(&mut value_buf).map_err(Among::Middle)?; + + debug_assert_eq!( + written, meta.vlen, + "the actual bytes written to the value buffer not equal to the expected size, expected {} but got {}.", + meta.vlen, written, + ); + } + let entry_size = meta.entry_size as usize; + ent.set_pointer(RecordPointer::new( + entry_offset as u32 + buf.offset() as u32, + meta.entry_size, + )); + cursor += entry_size; + } + Either::Right(meta) => { + let remaining = buf.remaining(); + if remaining < meta.checksum_offset() { + return Err(Among::Right( + Error::larger_batch_size(buf.capacity() as u32), + )); + } + + let entry_offset = cursor; + buf.put_u8_unchecked(ent.flag.bits()); + buf.put_u64_le_unchecked(ent.internal_version()); + let ent_len_size = buf.put_u64_varint_unchecked(meta.packed_kvlen); + debug_assert_eq!( + ent_len_size, meta.packed_kvlen_size, + "the actual encoded u64 varint length ({}) doos not match the length ({}) returned by `dbutils::leb128::encoded_u64_varint_len`, please report bug to https://github.com/al8n/layer0/issues", + ent_len_size, meta.packed_kvlen_size, + ); + + let range_key_len_size = buf.put_u64_varint_unchecked(meta.range_key_len); + debug_assert_eq!( + range_key_len_size, meta.range_key_len_size, + "the actual encoded u64 varint length ({}) doos not match the length ({}) returned by `dbutils::leb128::encoded_u64_varint_len`, please report bug to https://github.com/al8n/layer0/issues", + range_key_len_size, meta.range_key_len_size, + ); + + let ptr = buf.as_mut_ptr(); + let start_key_ptr = ptr.add(cursor + meta.start_key_offset()); + let end_key_ptr = ptr.add(cursor + meta.end_key_offset()); + let value_ptr = ptr.add(cursor + meta.value_offset()); + buf.set_len(cursor + meta.checksum_offset()); + + let (start_bound, end_bound) = ent.bounds(); + let vb = ent.value(); + let mut start_bound_buf = + VacantBuffer::new(meta.start_key_len, NonNull::new_unchecked(start_key_ptr)); + let mut end_bound_buf = + VacantBuffer::new(meta.end_key_len, NonNull::new_unchecked(end_key_ptr)); + + let written = BoundWriter::new(start_bound) + .write(&mut start_bound_buf) + .map_err(Among::Left)?; + debug_assert_eq!( + written, meta.start_key_len, + "the actual bytes written to the key buffer not equal to the expected size, expected {} but got {}.", + meta.start_key_len, written, + ); + + let written = BoundWriter::new(end_bound) + .write(&mut end_bound_buf) + .map_err(Among::Left)?; + debug_assert_eq!( + written, meta.end_key_len, + "the actual bytes written to the key buffer not equal to the expected size, expected {} but got {}.", + meta.end_key_len, written, + ); + + if let Some(vb) = vb { + let mut value_buf = VacantBuffer::new(meta.vlen, NonNull::new_unchecked(value_ptr)); + let written = vb.write(&mut value_buf).map_err(Among::Middle)?; + + debug_assert_eq!( + written, meta.vlen, + "the actual bytes written to the value buffer not equal to the expected size, expected {} but got {}.", + meta.vlen, written, + ); + } + + let entry_size = meta.entry_size as usize; + ent.set_pointer(RecordPointer::new(entry_offset as u32, meta.entry_size)); + cursor += entry_size; + } + } + } + + let total_size = buf.capacity(); + if cursor + CHECKSUM_SIZE != total_size { + return Err(Among::Right(Error::batch_size_mismatch( + total_size as u32 - CHECKSUM_SIZE as u32, + cursor as u32, + ))); + } + + let mut cks = self.hasher().build_checksumer(); + let committed_flag = Flags::BATCHING | Flags::COMMITTED; + cks.update(&[committed_flag.bits()]); + cks.update(&buf[1..]); + let checksum = cks.digest(); + buf.put_u64_le_unchecked(checksum); + + // commit the entry + buf[0] = committed_flag.bits(); + + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + if self.options().sync() && _allocator.is_ondisk() { + _allocator + .flush_header_and_range(Buffer::offset(&buf), buf.capacity()) + .map_err(|e| Among::Right(e.into()))?; + } + buf.detach(); + Buffer::buffer_offset(&buf) + }; + + self + .insert_pointers(batch.iter_mut().map(|e| { + let p = e.take_pointer().unwrap(); + (e.internal_version(), e.flag, p) + })) + .map_err(|e| { + // Safety: the writer is single threaded, the memory chunk in buf cannot be accessed by other threads, + // so it's safe to rewind the arena. + unsafe { + self + .allocator() + .rewind(ArenaPosition::Start(start_offset as u32)); + } + Among::Right(e) + }) + } + + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] + fn replay( + arena: Self::Allocator, + opts: Options, + memtable_opts: ::Options, + ro: bool, + checksumer: Self::Checksumer, + ) -> Result> + where + Self::Checksumer: BuildChecksumer, + Self::Memtable: MutableMemtable, + { + use crate::utils::split_lengths; + use dbutils::leb128::decode_u64_varint; + use rarena_allocator::IncompleteBuffer; + + let slice = arena.reserved_slice(); + let mut cursor = 0; + let magic_text = &slice[0..MAGIC_TEXT_SIZE]; + if magic_text != MAGIC_TEXT { + return Err(Error::magic_text_mismatch()); + } + cursor += MAGIC_TEXT_SIZE; + + let magic_version = u16::from_le_bytes(slice[cursor..HEADER_SIZE].try_into().unwrap()); + if magic_version != opts.magic_version() { + return Err(Error::magic_version_mismatch()); + } + + let set = Self::Memtable::new(arena.clone(), memtable_opts).map_err(Error::memtable)?; + + let mut cursor = arena.data_offset(); + let allocated = arena.allocated(); + let mut minimum_version = u64::MAX; + let mut maximum_version = 0; + + loop { + unsafe { + // we reached the end of the arena, if we have any remaining, then if means two possibilities: + // 1. the remaining is a partial entry, but it does not be persisted to the disk, so following the write-ahead log principle, we should discard it. + // 2. our file may be corrupted, so we discard the remaining. + if cursor + RECORD_FLAG_SIZE > allocated { + if !ro && cursor < allocated { + arena.rewind(ArenaPosition::Start(cursor as u32)); + arena.flush()?; + } + break; + } + + let header = arena.get_u8_unchecked(cursor); + let flag = Flags::from_bits_retain(header); + + if !flag.contains(Flags::BATCHING) { + let mut sub_cursor = cursor + RECORD_FLAG_SIZE; + let entry_offset = sub_cursor; + let entry_flag = arena.get_u8(sub_cursor).map_err(|e| { + #[cfg(feature = "tracing")] + tracing::error!(err=%e); + + Error::corrupted(e) + })?; + sub_cursor += RECORD_FLAG_SIZE; + + let entry_flag = EntryFlags::from_bits_retain(entry_flag); + let version = { + let version = arena.get_u64_le(sub_cursor).map_err(|e| { + #[cfg(feature = "tracing")] + tracing::error!(err=%e); + + Error::corrupted(e) + })?; + sub_cursor += VERSION_SIZE; + minimum_version = minimum_version.min(version); + maximum_version = maximum_version.max(version); + version + }; + + let (readed, encoded_len) = arena.get_u64_varint(sub_cursor).map_err(|e| { + #[cfg(feature = "tracing")] + tracing::error!(err=%e); + + Error::corrupted(e) + })?; + let (key_len, value_len) = split_lengths(encoded_len); + let key_len = key_len as usize; + let value_len = value_len as usize; + sub_cursor += readed + key_len + value_len; + + // Same as above, if we reached the end of the arena, we should discard the remaining. + let cks_offset = sub_cursor - cursor; + if cks_offset + CHECKSUM_SIZE > allocated { + // If the entry is committed, then it means our file is truncated, so we should report corrupted. + if flag.contains(Flags::COMMITTED) { + return Err(Error::corrupted("file is truncated")); + } + + if !ro { + arena.rewind(ArenaPosition::Start(cursor as u32)); + arena.flush()?; + } + + break; + } + + let cks = arena.get_u64_le_unchecked(cursor + cks_offset); + if cks != checksumer.checksum_one(arena.get_bytes(cursor, cks_offset)) { + return Err(Error::corrupted("checksum mismatch")); + } + + // If the entry is not committed, we should rewind + if !flag.contains(Flags::COMMITTED) { + if !ro { + arena.rewind(ArenaPosition::Start(cursor as u32)); + arena.flush()?; + } + + break; + } + + let pointer = RecordPointer::new(entry_offset as u32, sub_cursor as u32); + + match () { + _ if entry_flag.contains(EntryFlags::REMOVED) => { + set.remove(version, pointer).map_err(Error::memtable)? + } + _ if entry_flag.contains(EntryFlags::RANGE_DELETION) => set + .range_remove(version, pointer) + .map_err(Error::memtable)?, + _ if entry_flag.contains(EntryFlags::RANGE_SET) => { + set.range_set(version, pointer).map_err(Error::memtable)? + } + _ if entry_flag.contains(EntryFlags::RANGE_UNSET) => { + set.range_unset(version, pointer).map_err(Error::memtable)? + } + _ => set.insert(version, pointer).map_err(Error::memtable)?, + } + + cursor = sub_cursor + CHECKSUM_SIZE; + } else { + let (readed, encoded_len) = + arena + .get_u64_varint(cursor + RECORD_FLAG_SIZE) + .map_err(|e| { + #[cfg(feature = "tracing")] + tracing::error!(err=%e); + + Error::corrupted(e) + })?; + + let (num_entries, encoded_data_len) = split_lengths(encoded_len); + // Same as above, if we reached the end of the arena, we should discard the remaining. + let cks_offset = RECORD_FLAG_SIZE + readed + encoded_data_len as usize; + let total_size = cks_offset + CHECKSUM_SIZE; + + if total_size > allocated { + // If the entry is committed, then it means our file is truncated, so we should report corrupted. + if flag.contains(Flags::COMMITTED) { + return Err(Error::corrupted("file is truncated")); + } + + if !ro { + arena.rewind(ArenaPosition::Start(cursor as u32)); + arena.flush()?; + } + + break; + } + let cks = arena.get_u64_le(cursor + cks_offset).unwrap(); + let mut batch_data_buf = arena.get_bytes(cursor, cks_offset); + if cks != checksumer.checksum_one(batch_data_buf) { + return Err(Error::corrupted("checksum mismatch")); + } + + let mut sub_cursor = 0; + batch_data_buf = &batch_data_buf[RECORD_FLAG_SIZE + readed..]; + for _ in 0..num_entries { + if batch_data_buf.len() < EntryFlags::SIZE { + return Err(Error::corrupted(IncompleteBuffer::new())); + } + + let mut entry_cursor = 0; + let entry_flag = EntryFlags::from_bits_retain(batch_data_buf[0]); + entry_cursor += EntryFlags::SIZE; + + let version = { + if batch_data_buf.len() < entry_cursor + VERSION_SIZE { + return Err(Error::corrupted(IncompleteBuffer::new())); + } + + let version = u64::from_le_bytes( + batch_data_buf[entry_cursor..entry_cursor + VERSION_SIZE] + .try_into() + .unwrap(), + ); + entry_cursor += VERSION_SIZE; + minimum_version = minimum_version.min(version); + maximum_version = maximum_version.max(version); + version + }; + + let (kvlen, ent_len) = + decode_u64_varint(&batch_data_buf[entry_cursor..]).map_err(|e| { + #[cfg(feature = "tracing")] + tracing::error!(err=%e); + + Error::corrupted(e) + })?; + + let (klen, vlen) = split_lengths(ent_len); + let klen = klen as usize; + let vlen = vlen as usize; + + let entry_offset = cursor + RECORD_FLAG_SIZE + readed + sub_cursor; + entry_cursor += kvlen + klen + vlen; + sub_cursor += entry_cursor; + let pointer = RecordPointer::new(entry_offset as u32, entry_cursor as u32); + + match () { + _ if entry_flag.contains(EntryFlags::REMOVED) => { + set.remove(version, pointer).map_err(Error::memtable)? + } + _ if entry_flag.contains(EntryFlags::RANGE_DELETION) => set + .range_remove(version, pointer) + .map_err(Error::memtable)?, + _ if entry_flag.contains(EntryFlags::RANGE_SET) => { + set.range_set(version, pointer).map_err(Error::memtable)? + } + _ if entry_flag.contains(EntryFlags::RANGE_UNSET) => { + set.range_unset(version, pointer).map_err(Error::memtable)? + } + _ => set.insert(version, pointer).map_err(Error::memtable)?, + } + + batch_data_buf = &batch_data_buf[entry_cursor..]; + } + + debug_assert_eq!( + encoded_data_len as usize, sub_cursor, + "expected encoded batch data size ({}) is not equal to the actual size ({})", + encoded_data_len, sub_cursor, + ); + + cursor += total_size; + } + } + } + + Ok(Self::construct(arena, set, opts, checksumer)) + } +} + +#[inline] +const fn min_u64(a: u64, b: u64) -> u64 { + if a < b { + a + } else { + b + } +} + +#[inline] +const fn check_range( + start_key_len: usize, + end_key_len: usize, + vlen: usize, + max_key_size: u32, + max_value_size: u32, + ro: bool, +) -> Result> { + if ro { + return Err(Error::read_only()); + } + + let max_ksize = min_u64(max_key_size as u64, u32::MAX as u64); + let max_vsize = min_u64(max_value_size as u64, u32::MAX as u64); + + if max_ksize < start_key_len as u64 { + return Err(Error::key_too_large(start_key_len as u64, max_key_size)); + } + + if max_ksize < end_key_len as u64 { + return Err(Error::key_too_large(end_key_len as u64, max_key_size)); + } + + let range_key_len = merge_lengths(start_key_len as u32, end_key_len as u32); + let range_key_len_size = encoded_u64_varint_len(range_key_len); + let total_range_key_size = range_key_len_size + start_key_len + end_key_len; + + if total_range_key_size as u64 > u32::MAX as u64 { + return Err(Error::range_key_too_large(total_range_key_size as u64)); + } + + if max_vsize < vlen as u64 { + return Err(Error::value_too_large(vlen as u64, max_value_size)); + } + + let len = merge_lengths(total_range_key_size as u32, vlen as u32); + let len_size = encoded_u64_varint_len(len); + let elen = RECORD_FLAG_SIZE as u64 + + EntryFlags::SIZE as u64 + + VERSION_SIZE as u64 + + len_size as u64 + + total_range_key_size as u64 + + vlen as u64 + + CHECKSUM_SIZE as u64; + + if elen > u32::MAX as u64 { + return Err(Error::entry_too_large( + elen, + min_u64( + RECORD_FLAG_SIZE as u64 + + 10 + + EntryFlags::SIZE as u64 + + VERSION_SIZE as u64 + + max_key_size as u64 + + max_value_size as u64, + u32::MAX as u64, + ), + )); + } + + Ok(EncodedRangeEntryMeta { + packed_kvlen_size: len_size, + packed_kvlen: len, + entry_size: elen as u32, + range_key_len, + range_key_len_size, + total_range_key_size, + start_key_len, + end_key_len, + vlen, + batch: false, + }) +} + +#[inline] +const fn check( + klen: usize, + vlen: usize, + max_key_size: u32, + max_value_size: u32, + ro: bool, +) -> Result> { + if ro { + return Err(Error::read_only()); + } + + let max_ksize = min_u64(max_key_size as u64, u32::MAX as u64); + let max_vsize = min_u64(max_value_size as u64, u32::MAX as u64); + + if max_ksize < klen as u64 { + return Err(Error::key_too_large(klen as u64, max_key_size)); + } + + if max_vsize < vlen as u64 { + return Err(Error::value_too_large(vlen as u64, max_value_size)); + } + + let len = merge_lengths(klen as u32, vlen as u32); + let len_size = encoded_u64_varint_len(len); + let elen = RECORD_FLAG_SIZE as u64 + + EntryFlags::SIZE as u64 + + VERSION_SIZE as u64 + + len_size as u64 + + klen as u64 + + vlen as u64 + + CHECKSUM_SIZE as u64; + + if elen > u32::MAX as u64 { + return Err(Error::entry_too_large( + elen, + min_u64( + RECORD_FLAG_SIZE as u64 + + 10 + + EntryFlags::SIZE as u64 + + VERSION_SIZE as u64 + + max_key_size as u64 + + max_value_size as u64, + u32::MAX as u64, + ), + )); + } + + Ok(EncodedEntryMeta { + packed_kvlen_size: len_size, + batch: false, + packed_kvlen: len, + entry_size: elen as u32, + klen, + vlen, + }) +} + +#[inline] +fn check_batch_entry( + klen: usize, + vlen: usize, + max_key_size: u32, + max_value_size: u32, +) -> Result> { + let max_ksize = min_u64(max_key_size as u64, u32::MAX as u64); + let max_vsize = min_u64(max_value_size as u64, u32::MAX as u64); + + if max_ksize < klen as u64 { + return Err(Error::key_too_large(klen as u64, max_key_size)); + } + + if max_vsize < vlen as u64 { + return Err(Error::value_too_large(vlen as u64, max_value_size)); + } + + let len = merge_lengths(klen as u32, vlen as u32); + let len_size = encoded_u64_varint_len(len); + let elen = + EntryFlags::SIZE as u64 + VERSION_SIZE as u64 + len_size as u64 + klen as u64 + vlen as u64; + + if elen > u32::MAX as u64 { + return Err(Error::entry_too_large( + elen, + min_u64( + 10 + EntryFlags::SIZE as u64 + + VERSION_SIZE as u64 + + max_key_size as u64 + + max_value_size as u64, + u32::MAX as u64, + ), + )); + } + + Ok(EncodedEntryMeta { + packed_kvlen_size: len_size, + packed_kvlen: len, + entry_size: elen as u32, + klen, + vlen, + batch: true, + }) +} + +#[inline] +fn check_batch_range_entry( + start_key_len: usize, + end_key_len: usize, + vlen: usize, + max_key_size: u32, + max_value_size: u32, +) -> Result> { + let max_ksize = min_u64(max_key_size as u64, u32::MAX as u64); + let max_vsize = min_u64(max_value_size as u64, u32::MAX as u64); + + if max_ksize < start_key_len as u64 { + return Err(Error::key_too_large(start_key_len as u64, max_key_size)); + } + + if max_ksize < end_key_len as u64 { + return Err(Error::key_too_large(end_key_len as u64, max_key_size)); + } + + let range_key_len = merge_lengths(start_key_len as u32, end_key_len as u32); + let range_key_len_size = encoded_u64_varint_len(range_key_len); + let total_range_key_size = range_key_len_size + start_key_len + end_key_len; + + if total_range_key_size as u64 > u32::MAX as u64 { + return Err(Error::range_key_too_large(total_range_key_size as u64)); + } + + if max_vsize < vlen as u64 { + return Err(Error::value_too_large(vlen as u64, max_value_size)); + } + + let len = merge_lengths(total_range_key_size as u32, vlen as u32); + let len_size = encoded_u64_varint_len(len); + let elen = EntryFlags::SIZE as u64 + + VERSION_SIZE as u64 + + len_size as u64 + + total_range_key_size as u64 + + vlen as u64; + + if elen > u32::MAX as u64 { + return Err(Error::entry_too_large( + elen, + min_u64( + 10 + EntryFlags::SIZE as u64 + + VERSION_SIZE as u64 + + max_key_size as u64 + + max_value_size as u64, + u32::MAX as u64, + ), + )); + } + + Ok(EncodedRangeEntryMeta { + packed_kvlen_size: len_size, + packed_kvlen: len, + entry_size: elen as u32, + range_key_len, + range_key_len_size, + total_range_key_size, + start_key_len, + end_key_len, + vlen, + batch: true, + }) +} + +struct Noop; + +impl BufWriterOnce for Noop { + type Error = (); + + #[inline(never)] + #[cold] + fn encoded_len(&self) -> usize { + 0 + } + + #[inline(never)] + #[cold] + fn write_once(self, _: &mut VacantBuffer<'_>) -> Result { + Ok(0) + } +} + +#[inline] +fn encode_bound_key_len(writer: Bound<&W>) -> usize { + BoundedKey::encoded_size() + + match writer { + Bound::Included(k) => k.encoded_len(), + Bound::Excluded(k) => k.encoded_len(), + Bound::Unbounded => 0, + } +} + +struct BoundWriterOnce { + writer: Bound, + pointer: bool, +} + +impl BoundWriterOnce { + #[inline] + fn new(writer: Bound) -> Self { + Self { + writer, + pointer: false, + } + } +} + +impl BufWriterOnce for BoundWriterOnce +where + W: BufWriterOnce, +{ + type Error = Either; + + #[inline] + fn encoded_len(&self) -> usize { + BoundedKey::encoded_size() + + match self.writer.as_ref() { + Bound::Included(k) => k.encoded_len(), + Bound::Excluded(k) => k.encoded_len(), + Bound::Unbounded => 0, + } + } + + #[inline] + fn write_once(self, buf: &mut VacantBuffer<'_>) -> Result { + match self.writer { + Bound::Included(k) => { + buf + .put_u8(BoundedKey::new(Bound::Included(()), self.pointer).encode()) + .map_err(Either::Right)?; + let mut kbuf = buf.split_off(1); + k.write_once(&mut kbuf).map(|n| n + 1).map_err(Either::Left) + } + Bound::Excluded(k) => { + buf + .put_u8(BoundedKey::new(Bound::Included(()), self.pointer).encode()) + .map_err(Either::Right)?; + let mut kbuf = buf.split_off(1); + k.write_once(&mut kbuf).map(|n| n + 1).map_err(Either::Left) + } + Bound::Unbounded => { + buf + .put_u8(BoundedKey::new(Bound::Unbounded, false).encode()) + .map_err(Either::Right)?; + Ok(1) + } + } + } +} + +struct BoundWriter<'a, W> { + writer: Bound<&'a W>, + pointer: bool, +} + +impl<'a, W> BoundWriter<'a, W> { + #[inline] + fn new(writer: Bound<&'a W>) -> Self { + Self { + writer, + pointer: false, + } + } +} + +impl BufWriter for BoundWriter<'_, W> +where + W: BufWriter, +{ + type Error = W::Error; + + #[inline] + fn encoded_len(&self) -> usize { + BoundedKey::encoded_size() + + match self.writer.as_ref() { + Bound::Included(k) => k.encoded_len(), + Bound::Excluded(k) => k.encoded_len(), + Bound::Unbounded => 0, + } + } + + #[inline] + fn write(&self, buf: &mut VacantBuffer<'_>) -> Result { + // use put_u8_unchecked as this method is used internally, and should be have enough space before calling this method. + match self.writer { + Bound::Included(k) => { + buf.put_u8_unchecked(BoundedKey::new(Bound::Included(()), self.pointer).encode()); + let mut kbuf = buf.split_off(1); + k.write(&mut kbuf).map(|n| n + 1) + } + Bound::Excluded(k) => { + buf.put_u8_unchecked(BoundedKey::new(Bound::Included(()), self.pointer).encode()); + let mut kbuf = buf.split_off(1); + k.write(&mut kbuf).map(|n| n + 1) + } + Bound::Unbounded => { + buf.put_u8_unchecked(BoundedKey::new(Bound::Unbounded, false).encode()); + Ok(1) + } + } + } +} diff --git a/src/memtable.rs b/src/memtable.rs index 88072757..a892a143 100644 --- a/src/memtable.rs +++ b/src/memtable.rs @@ -1,280 +1,472 @@ -use core::ops::{Bound, RangeBounds}; -use dbutils::equivalent::Comparable; +use core::ops::Bound; +#[cfg(any(feature = "bounded", feature = "unbounded"))] +use core::ops::RangeBounds; -use crate::{ - sealed::{WithVersion, WithoutVersion}, - types::Kind, - wal::{KeyPointer, ValuePointer}, -}; +use crate::types::{RecordPointer, WithValue}; -/// Memtable implementation based on linked based [`SkipMap`][`crossbeam_skiplist`]. -#[cfg(feature = "std")] -#[cfg_attr(docsrs, doc(cfg(feature = "std")))] -pub mod linked; +#[cfg(any(feature = "bounded", feature = "unbounded"))] +use crate::types::Query; -/// Memtable implementation based on ARNEA based [`SkipMap`](skl). -pub mod arena; +#[cfg(feature = "skl")] +pub(crate) mod bounded; +#[cfg(feature = "crossbeam-skiplist-mvcc")] +pub(crate) mod unbounded; -/// Sum type for different memtable implementations. -pub mod alternative; +/// Memtables for dynamic(bytes) key-value order WALs. +pub mod dynamic; + +/// Memtables for generic(structured) key-value order WALs. +pub mod generic; /// An entry which is stored in the memory table. -pub trait BaseEntry<'a>: Sized { +pub trait Entry<'a> +where + Self: Sized, +{ /// The key type. - type Key: ?Sized; + type Key: 'a; + /// The value type. - type Value: ?Sized; + type Value: 'a; /// Returns the key in the entry. - fn key(&self) -> KeyPointer; + fn key(&self) -> Self::Key; + + /// Returns the value in the entry. + fn value(&self) -> Self::Value; /// Returns the next entry in the memory table. - fn next(&mut self) -> Option; + fn next(&self) -> Option; /// Returns the previous entry in the memory table. - fn prev(&mut self) -> Option; + fn prev(&self) -> Option; + + /// Returns the version of the entry. + fn version(&self) -> u64; } -/// An entry which is stored in the memory table. -pub trait MemtableEntry<'a>: BaseEntry<'a> + WithoutVersion { - /// Returns the value in the entry. - fn value(&self) -> ValuePointer; +/// An entry which means that the entry can return key and value in bytes format. +pub trait RawEntry<'a> +where + Self: Sized, +{ + /// The raw value type. + type RawValue: 'a; + + /// Returns the raw key in the entry. + fn raw_key(&self) -> &'a [u8]; + + /// Returns the raw value in the entry. + fn raw_value(&self) -> Self::RawValue; } -/// An entry which is stored in the multiple versioned memory table. -pub trait VersionedMemtableEntry<'a>: BaseEntry<'a> + WithVersion { - /// Returns the value in the entry. - fn value(&self) -> Option>; +/// A raw range entry which means that the entry can return start bound and ent bound in bytes format. +pub trait RawRangeEntry<'a, O> +where + Self: Sized, +{ + /// The raw value type. + type RawValue: 'a + where + O: WithValue; - /// Returns the version of the entry if it is versioned. - fn version(&self) -> u64; + /// Returns the start bound of the range entry in bytes. + fn raw_start_bound(&self) -> Bound<&'a [u8]>; + + /// Returns the end bound of the range entry in bytes. + fn raw_end_bound(&self) -> Bound<&'a [u8]>; + + /// Returns the raw value in the entry. + fn raw_value(&self) -> Self::RawValue + where + O: WithValue; } -/// A memory table which is used to store pointers to the underlying entries. -pub trait BaseTable { +/// An range entry which is stored in the memory table. +pub trait RangeEntry<'a, O> { /// The key type. - type Key: ?Sized; - + type Key: 'a; /// The value type. - type Value: ?Sized; + type Value: 'a + where + O: WithValue; - /// The configuration options for the memtable. - type Options; + /// Returns the start bound of the range entry. + fn start_bound(&self) -> Bound; - /// The error type may be returned when constructing the memtable. - type Error; + /// Returns the end bound of the range entry. + fn end_bound(&self) -> Bound; - /// The item returned by the iterator or query methods. - type Item<'a>: BaseEntry<'a, Key = Self::Key, Value = Self::Value> + Clone + /// Returns the value in the entry. + fn value(&self) -> Self::Value where - Self: 'a; + O: WithValue; - /// The iterator type. - type Iterator<'a>: DoubleEndedIterator> - where - Self: 'a; + /// Returns the range of the entry. + fn range(&self) -> (Bound, Bound) { + (self.start_bound(), self.end_bound()) + } - /// The range iterator type. - type Range<'a, Q, R>: DoubleEndedIterator> + /// Returns the next entry in the memory table. + fn next(&mut self) -> Option where - Self: 'a, - R: RangeBounds + 'a, - Q: ?Sized + Comparable>; + Self: Sized; - /// Creates a new memtable with the specified options. - fn new(opts: Self::Options) -> Result + /// Returns the previous entry in the memory table. + fn prev(&mut self) -> Option where Self: Sized; - /// Inserts a pointer into the memtable. - fn insert( - &self, - version: Option, - kp: KeyPointer, - vp: ValuePointer, - ) -> Result<(), Self::Error> - where - KeyPointer: Ord + 'static; + /// Returns the version of the entry. + fn version(&self) -> u64; +} - /// Removes the pointer associated with the key. - fn remove(&self, version: Option, key: KeyPointer) -> Result<(), Self::Error> - where - KeyPointer: Ord + 'static; +#[cfg(any(feature = "bounded", feature = "unbounded"))] +trait RangeEntryExt<'a, O>: RangeEntry<'a, O> { + /// Returns the start bound of the range entry. + fn query_start_bound(&self) -> Bound> { + match self.start_bound() { + Bound::Included(key) => Bound::Included(Query(key)), + Bound::Excluded(key) => Bound::Excluded(Query(key)), + Bound::Unbounded => Bound::Unbounded, + } + } + + /// Returns the end bound of the range entry. + fn query_end_bound(&self) -> Bound> { + match self.end_bound() { + Bound::Included(key) => Bound::Included(Query(key)), + Bound::Excluded(key) => Bound::Excluded(Query(key)), + Bound::Unbounded => Bound::Unbounded, + } + } - /// Returns the kind of the memtable. - fn kind() -> Kind; + /// Returns the range of the entry. + fn query_range(&self) -> impl RangeBounds> + 'a { + (self.query_start_bound(), self.query_end_bound()) + } } +#[cfg(any(feature = "bounded", feature = "unbounded"))] +impl<'a, O, T> RangeEntryExt<'a, O> for T where T: RangeEntry<'a, O> {} + /// A memory table which is used to store pointers to the underlying entries. -pub trait Memtable: BaseTable -where - for<'a> Self::Item<'a>: MemtableEntry<'a>, -{ - /// Returns the number of entries in the memtable. +pub trait Memtable { + /// The configuration options for the memtable. + type Options; + + /// The error type may be returned when constructing the memtable. + type Error; + + /// Creates a new memtable with the specified options. + fn new(arena: A, opts: Self::Options) -> Result + where + Self: Sized, + A: rarena_allocator::Allocator; + + /// Returns the total number of entries in the memtable. fn len(&self) -> usize; /// Returns `true` if the memtable is empty. fn is_empty(&self) -> bool { self.len() == 0 } +} - /// Returns the upper bound of the memtable. - fn upper_bound(&self, bound: Bound<&Q>) -> Option> - where - Q: ?Sized + Comparable>; +/// A memory table which is used to store pointers to the underlying entries. +pub trait MutableMemtable: Memtable { + /// Inserts a pointer into the memtable. + fn insert(&self, version: u64, pointer: RecordPointer) -> Result<(), Self::Error>; - /// Returns the lower bound of the memtable. - fn lower_bound(&self, bound: Bound<&Q>) -> Option> - where - Q: ?Sized + Comparable>; + /// Removes the pointer associated with the key. + fn remove(&self, version: u64, key: RecordPointer) -> Result<(), Self::Error>; - /// Returns the first pointer in the memtable. - fn first(&self) -> Option> - where - KeyPointer: Ord; + /// Inserts a range deletion pointer into the memtable, a range deletion is a deletion of a range of keys, + /// which means that keys in the range are marked as deleted. + /// + /// This is not a contra operation to [`range_set`](MutableMemtable::range_set). + /// See also [`range_set`](MutableMemtable::range_set) and [`range_set`](MutableMemtable::range_unset). + fn range_remove(&self, version: u64, pointer: RecordPointer) -> Result<(), Self::Error>; - /// Returns the last pointer in the memtable. - fn last(&self) -> Option> - where - KeyPointer: Ord; + /// Inserts an range update pointer into the memtable. + fn range_set(&self, version: u64, pointer: RecordPointer) -> Result<(), Self::Error>; - /// Returns the pointer associated with the key. - fn get(&self, key: &Q) -> Option> - where - Q: ?Sized + Comparable>; + /// Unset a range from the memtable, this is a contra operation to [`range_set`](MutableMemtable::range_set). + fn range_unset(&self, version: u64, pointer: RecordPointer) -> Result<(), Self::Error>; +} - /// Returns `true` if the memtable contains the specified pointer. - fn contains(&self, key: &Q) -> bool - where - Q: ?Sized + Comparable>; +/// Transfer trait for converting data between different states. +pub trait Transfer<'a, D>: sealed::Sealed<'a, D> {} - /// Returns an iterator over the memtable. - fn iter(&self) -> Self::Iterator<'_>; +impl<'a, D, T> Transfer<'a, D> for T where T: sealed::Sealed<'a, D> {} - /// Returns an iterator over a subset of the memtable. - fn range<'a, Q, R>(&'a self, range: R) -> Self::Range<'a, Q, R> - where - R: RangeBounds + 'a, - Q: ?Sized + Comparable>; -} +mod sealed { + use dbutils::types::{LazyRef, Type}; -/// A memory table which is used to store pointers to the underlying entries. -pub trait MultipleVersionMemtable: BaseTable -where - for<'a> Self::Item<'a>: VersionedMemtableEntry<'a>, -{ - /// The item returned by the iterator or query methods. - type VersionedItem<'a>: VersionedMemtableEntry<'a, Key = Self::Key, Value = Self::Value> + Clone - where - KeyPointer: 'a, - Self: 'a; + #[cfg(all(feature = "crossbeam-skiplist-mvcc", not(feature = "skl")))] + pub trait Sealed<'a, I>: + crossbeam_skiplist_mvcc::Transfer< + 'a, + crate::types::RecordPointer, + To = ::Data<'a, &'a crate::types::RecordPointer>, + > + { + type Value; - /// The iterator type which can yields all the entries in the memtable. - type IterAll<'a>: DoubleEndedIterator> - where - KeyPointer: 'a, - Self: 'a; + fn input(data: &Self::Data<'a, I>) -> Self::Data<'a, &'a [u8]>; - /// The range iterator type which can yields all the entries in the memtable. - type RangeAll<'a, Q, R>: DoubleEndedIterator> - where - KeyPointer: 'a, - Self: 'a, - R: RangeBounds + 'a, - Q: ?Sized + Comparable>; + fn from_input(input: Option<&'a [u8]>) -> Self::Data<'a, I> + where + Self: Sized; - /// Returns the maximum version of the memtable. - fn maximum_version(&self) -> u64; + fn raw(input: Option<&'a [u8]>) -> Self::Data<'a, &'a [u8]> + where + Self: Sized; - /// Returns the minimum version of the memtable. - fn minimum_version(&self) -> u64; + fn transfer(data: &Self::Data<'a, I>) -> Self::Data<'a, Self::Value>; - /// Returns `true` if the memtable may contain an entry whose version is less than or equal to the specified version. - fn may_contain_version(&self, version: u64) -> bool; + fn leak(data: Self::Data<'a, T>) -> Option; - /// Returns the upper bound of the memtable. - fn upper_bound(&self, version: u64, bound: Bound<&Q>) -> Option> - where - Q: ?Sized + Comparable>; - - /// Returns the upper bound of the memtable. - fn upper_bound_versioned( - &self, - version: u64, - bound: Bound<&Q>, - ) -> Option> - where - Q: ?Sized + Comparable>; + fn into_state(data: Option>) -> Self::Data<'a, D>; + } - /// Returns the lower bound of the memtable. - fn lower_bound(&self, version: u64, bound: Bound<&Q>) -> Option> - where - Q: ?Sized + Comparable>; - - /// Returns the lower bound of the memtable. - fn lower_bound_versioned( - &self, - version: u64, - bound: Bound<&Q>, - ) -> Option> - where - Q: ?Sized + Comparable>; + #[cfg(all(feature = "skl", not(feature = "crossbeam-skiplist-mvcc")))] + pub trait Sealed<'a, I>: + skl::Transfer<'a, LazyRef<'a, crate::types::RecordPointer>, To = crate::types::RecordPointer> + { + type Value; - /// Returns the first pointer in the memtable. - fn first(&self, version: u64) -> Option> - where - KeyPointer: Ord; + fn input(data: &Self::Data<'a, I>) -> Self::Data<'a, &'a [u8]>; - /// Returns the first pointer in the memtable. - fn first_versioned(&self, version: u64) -> Option> - where - KeyPointer: Ord; + fn from_input(input: Option<&'a [u8]>) -> Self::Data<'a, I> + where + Self: Sized; - /// Returns the last pointer in the memtable. - fn last(&self, version: u64) -> Option> - where - KeyPointer: Ord; + fn raw(input: Option<&'a [u8]>) -> Self::Data<'a, &'a [u8]> + where + Self: Sized; - /// Returns the last pointer in the memtable. - fn last_versioned(&self, version: u64) -> Option> - where - KeyPointer: Ord; + fn transfer(data: &Self::Data<'a, I>) -> Self::Data<'a, Self::Value>; - /// Returns the pointer associated with the key. - fn get(&self, version: u64, key: &Q) -> Option> - where - Q: ?Sized + Comparable>; + fn leak(data: Self::Data<'a, T>) -> Option; - /// Returns the pointer associated with the key. - fn get_versioned(&self, version: u64, key: &Q) -> Option> - where - Q: ?Sized + Comparable>; + fn into_state(data: Option>) -> Self::Data<'a, D>; + } - /// Returns `true` if the memtable contains the specified pointer. - fn contains(&self, version: u64, key: &Q) -> bool - where - Q: ?Sized + Comparable>; + #[cfg(all(feature = "skl", feature = "crossbeam-skiplist-mvcc"))] + pub trait Sealed<'a, I>: + skl::Transfer<'a, LazyRef<'a, crate::types::RecordPointer>, To = crate::types::RecordPointer> + + crossbeam_skiplist_mvcc::Transfer< + 'a, + crate::types::RecordPointer, + To = ::Data<'a, &'a crate::types::RecordPointer>, + > + { + type Value; - /// Returns `true` if the memtable contains the specified pointer. - fn contains_versioned(&self, version: u64, key: &Q) -> bool - where - Q: ?Sized + Comparable>; + fn input(data: &Self::Data<'a, I>) -> Self::Data<'a, &'a [u8]>; + + fn from_input(input: Option<&'a [u8]>) -> Self::Data<'a, I> + where + Self: Sized; + + fn raw(input: Option<&'a [u8]>) -> Self::Data<'a, &'a [u8]> + where + Self: Sized; - /// Returns an iterator over the memtable. - fn iter(&self, version: u64) -> Self::Iterator<'_>; + fn transfer(data: &Self::Data<'a, I>) -> Self::Data<'a, Self::Value>; + + fn leak(data: Self::Data<'a, T>) -> Option; + + fn into_state(data: Option>) -> Self::Data<'a, D>; + } - /// Returns an iterator over all the entries in the memtable. - fn iter_all_versions(&self, version: u64) -> Self::IterAll<'_>; + #[cfg(not(any(feature = "skl", feature = "crossbeam-skiplist-mvcc")))] + pub trait Sealed<'a, I>: dbutils::state::State { + type Value; - /// Returns an iterator over a subset of the memtable. - fn range<'a, Q, R>(&'a self, version: u64, range: R) -> Self::Range<'a, Q, R> + fn input(data: &Self::Data<'a, I>) -> Self::Data<'a, &'a [u8]>; + + fn from_input(input: Option<&'a [u8]>) -> Self::Data<'a, I> + where + Self: Sized; + + fn raw(input: Option<&'a [u8]>) -> Self::Data<'a, &'a [u8]> + where + Self: Sized; + + fn transfer(data: &Self::Data<'a, I>) -> Self::Data<'a, Self::Value>; + + fn leak(data: Self::Data<'a, T>) -> Option; + + fn into_state(data: Option>) -> Self::Data<'a, D>; + } + + impl<'a, I> Sealed<'a, LazyRef<'a, I>> for dbutils::state::Active where - R: RangeBounds + 'a, - Q: ?Sized + Comparable>; + I: Type + ?Sized, + { + type Value = I::Ref<'a>; + + #[inline] + fn input(data: &Self::Data<'a, LazyRef<'a, I>>) -> Self::Data<'a, &'a [u8]> { + data.raw().expect("entry in Active state must have value") + } + + #[inline] + fn from_input(input: Option<&'a [u8]>) -> LazyRef<'a, I> + where + Self: Sized, + { + unsafe { LazyRef::from_raw(input.expect("entry in Active state must have value")) } + } + + #[inline] + fn raw(input: Option<&'a [u8]>) -> Self::Data<'a, &'a [u8]> + where + Self: Sized, + { + input.expect("entry in Active state must have value") + } + + #[inline] + fn transfer(data: &Self::Data<'a, LazyRef<'a, I>>) -> Self::Data<'a, I::Ref<'a>> { + *data.get() + } + + #[inline] + fn leak(data: Self::Data<'a, T>) -> Option { + Some(data) + } + + #[inline] + fn into_state(data: Option>) -> Self::Data<'a, D> { + data.expect("entry in Active state must have value") + } + } - /// Returns an iterator over all the entries in a subset of the memtable. - fn range_all_versions<'a, Q, R>(&'a self, version: u64, range: R) -> Self::RangeAll<'a, Q, R> + impl<'a, I> Sealed<'a, LazyRef<'a, I>> for dbutils::state::MaybeTombstone where - R: RangeBounds + 'a, - Q: ?Sized + Comparable>; + I: Type + ?Sized, + { + type Value = I::Ref<'a>; + + #[inline] + fn input(data: &Self::Data<'a, LazyRef<'a, I>>) -> Option<&'a [u8]> { + data + .as_ref() + .map(|v| v.raw().expect("entry in Active state must have value")) + } + + #[inline] + fn from_input(input: Option<&'a [u8]>) -> Option> + where + Self: Sized, + { + unsafe { input.map(|v| LazyRef::from_raw(v)) } + } + + #[inline] + fn raw(input: Option<&'a [u8]>) -> Self::Data<'a, &'a [u8]> + where + Self: Sized, + { + input + } + + #[inline] + fn transfer(data: &Self::Data<'a, LazyRef<'a, I>>) -> Self::Data<'a, I::Ref<'a>> { + data.as_ref().map(|v| *v.get()) + } + + #[inline] + fn leak(data: Self::Data<'a, T>) -> Option { + data + } + + #[inline] + fn into_state(data: Option>) -> Self::Data<'a, D> { + data.flatten() + } + } + + impl<'a> Sealed<'a, &'a [u8]> for dbutils::state::Active { + type Value = &'a [u8]; + + #[inline] + fn input(data: &Self::Data<'a, &'a [u8]>) -> Self::Data<'a, &'a [u8]> { + *data + } + + #[inline] + fn from_input(input: Option<&'a [u8]>) -> Self::Data<'a, &'a [u8]> + where + Self: Sized, + { + input.expect("entry in Active state must have value") + } + + #[inline] + fn raw(input: Option<&'a [u8]>) -> Self::Data<'a, &'a [u8]> + where + Self: Sized, + { + input.expect("entry in Active state must have value") + } + + #[inline] + fn transfer(data: &Self::Data<'a, &'a [u8]>) -> Self::Data<'a, Self::Value> { + *data + } + + #[inline] + fn leak(data: Self::Data<'a, T>) -> Option { + Some(data) + } + + #[inline] + fn into_state(data: Option>) -> Self::Data<'a, D> { + data.expect("entry in Active state must have value") + } + } + + impl<'a> Sealed<'a, &'a [u8]> for dbutils::state::MaybeTombstone { + type Value = &'a [u8]; + + #[inline] + fn input(data: &Self::Data<'a, &'a [u8]>) -> Option<&'a [u8]> { + data.as_ref().copied() + } + + #[inline] + fn from_input(input: Option<&'a [u8]>) -> Self::Data<'a, &'a [u8]> + where + Self: Sized, + { + input + } + + #[inline] + fn raw(input: Option<&'a [u8]>) -> Self::Data<'a, &'a [u8]> + where + Self: Sized, + { + input + } + + #[inline] + fn transfer(data: &Self::Data<'a, &'a [u8]>) -> Self::Data<'a, Self::Value> { + data.as_ref().copied() + } + + #[inline] + fn leak(data: Self::Data<'a, T>) -> Option { + data + } + + #[inline] + fn into_state(data: Option>) -> Self::Data<'a, D> { + data.flatten() + } + } } diff --git a/src/memtable/alternative.rs b/src/memtable/alternative.rs deleted file mode 100644 index ccde21a5..00000000 --- a/src/memtable/alternative.rs +++ /dev/null @@ -1,289 +0,0 @@ -pub use multiple_version::MultipleVersionTable; -pub use table::Table; - -macro_rules! match_op { - ($self:ident.$op:ident($($args:ident),*) $(.map($associated_ty:ident))?) => {{ - match $self { - Self::Arena(e) => e.$op($($args,)*) $(.map(Self::$associated_ty::Arena))?, - #[cfg(feature = "std")] - Self::Linked(e) => e.$op($($args,)*) $(.map(Self::$associated_ty::Linked))?, - }} - }; - (Dispatch::$associated_ty:ident($self:ident.$op:ident($($args:ident),*))) => {{ - match $self { - Self::Arena(e) => Self::$associated_ty::Arena(e.$op($($args,)*)), - #[cfg(feature = "std")] - Self::Linked(e) => Self::$associated_ty::Linked(e.$op($($args,)*)), - }} - }; - (new($opts:ident)) => {{ - match $opts { - Self::Options::Arena(opts) => ArenaTable::new(opts).map(Self::Arena).map_err(Self::Error::Arena), - #[cfg(feature = "std")] - Self::Options::Linked => LinkedTable::new(()) - .map(Self::Linked) - .map_err(|_| Self::Error::Linked), - } - }}; - (update($self:ident.$op:ident($($args:ident),*))) => {{ - match $self { - Self::Arena(t) => t.$op($($args,)*).map_err(Self::Error::Arena), - #[cfg(feature = "std")] - Self::Linked(t) => t.$op($($args,)*).map_err(|_| Self::Error::Linked), - } - }}; -} - -macro_rules! iter { - (enum $name:ident { - Arena($arena:ident), - Linked($linked:ident), - } -> $ent:ident) => { - /// A sum type of iter for different memtable implementations. - #[non_exhaustive] - pub enum $name<'a, K, V> - where - K: ?Sized + Type + Ord, - KeyPointer: Type = KeyPointer> + KeyRef<'a, KeyPointer>, - V: ?Sized + Type, - { - /// Arena iter - Arena($arena<'a, KeyPointer, ValuePointer>), - /// Linked iter - #[cfg(feature = "std")] - #[cfg_attr(docsrs, doc(cfg(feature = "std")))] - Linked($linked<'a, KeyPointer, ValuePointer>), - } - - impl<'a, K, V> Iterator for $name<'a, K, V> - where - K: ?Sized + Type + Ord + 'static, - KeyPointer: Type = KeyPointer> + KeyRef<'a, KeyPointer>, - V: ?Sized + Type + 'static, - { - type Item = $ent<'a, K, V>; - - #[inline] - fn next(&mut self) -> Option { - match_op!(self.next().map(Item)) - } - } - - impl<'a, K, V> DoubleEndedIterator for $name<'a, K, V> - where - K: ?Sized + Type + Ord + 'static, - KeyPointer: Type = KeyPointer> + KeyRef<'a, KeyPointer>, - V: ?Sized + Type + 'static, - { - #[inline] - fn next_back(&mut self) -> Option { - match_op!(self.next_back().map(Item)) - } - } - }; -} - -macro_rules! range { - (enum $name:ident { - Arena($arena:ident), - Linked($linked:ident), - } -> $ent:ident) => { - /// A sum type of range for different memtable implementations. - #[non_exhaustive] - pub enum $name<'a, K, V, Q, R> - where - R: RangeBounds, - Q: ?Sized + Comparable>, - K: ?Sized + Type + Ord, - KeyPointer: Type = KeyPointer> + KeyRef<'a, KeyPointer>, - V: ?Sized + Type, - { - /// Arena range - Arena($arena<'a, KeyPointer, ValuePointer, Q, R>), - #[cfg(feature = "std")] - #[cfg_attr(docsrs, doc(cfg(feature = "std")))] - /// Linked range - Linked($linked<'a, Q, R, KeyPointer, ValuePointer>), - } - - impl<'a, K, V, Q, R> Iterator for $name<'a, K, V, Q, R> - where - R: RangeBounds, - Q: ?Sized + Comparable>, - K: ?Sized + Type + Ord + 'a, - KeyPointer: Type = KeyPointer> + KeyRef<'a, KeyPointer>, - V: ?Sized + Type + 'a, - { - type Item = $ent<'a, K, V>; - - #[inline] - fn next(&mut self) -> Option { - match_op!(self.next().map(Item)) - } - } - - impl<'a, K, V, Q, R> DoubleEndedIterator for $name<'a, K, V, Q, R> - where - R: RangeBounds, - Q: ?Sized + Comparable>, - K: ?Sized + Type + Ord + 'a, - KeyPointer: Type = KeyPointer> + KeyRef<'a, KeyPointer>, - V: ?Sized + Type + 'a, - { - fn next_back(&mut self) -> Option { - match_op!(self.next_back().map(Item)) - } - } - }; -} - -macro_rules! base_entry { - (enum $name:ident { - Arena($arena:ident), - Linked($linked:ident), - }) => { - /// A sum type of entry for different memtable implementations. - #[derive(Debug)] - #[non_exhaustive] - pub enum $name<'a, K, V> - where - K: ?Sized, - V: ?Sized, - { - /// Arena entry - Arena($arena<'a, KeyPointer, ValuePointer>), - /// Linked entry - #[cfg(feature = "std")] - #[cfg_attr(docsrs, doc(cfg(feature = "std")))] - Linked($linked<'a, KeyPointer, ValuePointer>), - } - - impl Clone for $name<'_, K, V> { - #[inline] - fn clone(&self) -> Self { - match self { - Self::Arena(e) => Self::Arena(e.clone()), - #[cfg(feature = "std")] - Self::Linked(e) => Self::Linked(e.clone()), - } - } - } - - impl<'a, K, V> BaseEntry<'a> for $name<'a, K, V> - where - K: ?Sized + Type + Ord, - KeyPointer: Type = KeyPointer> + KeyRef<'a, KeyPointer>, - V: ?Sized + Type, - { - type Key = K; - - type Value = V; - - #[inline] - fn key(&self) -> KeyPointer { - *match_op!(self.key()) - } - - fn next(&mut self) -> Option { - match self { - Self::Arena(e) => e.next().map(Self::Arena), - #[cfg(feature = "std")] - Self::Linked(e) => e.next().map(Self::Linked), - } - } - - fn prev(&mut self) -> Option { - match self { - Self::Arena(e) => e.prev().map(Self::Arena), - #[cfg(feature = "std")] - Self::Linked(e) => e.prev().map(Self::Linked), - } - } - } - }; -} - -/// The sum type for different memtable implementations options. -#[derive(Debug)] -#[non_exhaustive] -pub enum TableOptions { - /// The options for the arena memtable. - Arena(super::arena::TableOptions), - /// The options for the linked memtable. - #[cfg(feature = "std")] - #[cfg_attr(docsrs, doc(cfg(feature = "std")))] - Linked, -} - -#[cfg(feature = "std")] -impl Default for TableOptions { - #[inline] - fn default() -> Self { - Self::linked() - } -} - -#[cfg(not(feature = "std"))] -impl Default for TableOptions { - #[inline] - fn default() -> Self { - Self::arena() - } -} - -impl From for TableOptions { - #[inline] - fn from(opts: super::arena::TableOptions) -> Self { - Self::Arena(opts) - } -} - -impl TableOptions { - /// Create a new arena memtable options with the default values. - #[inline] - pub const fn arena() -> Self { - Self::Arena(super::arena::TableOptions::new()) - } - - /// Create a new linked memtable options with the default values. - #[inline] - #[cfg(feature = "std")] - #[cfg_attr(docsrs, doc(cfg(feature = "std")))] - pub const fn linked() -> Self { - Self::Linked - } -} - -/// The sum type of error for different memtable implementations. -#[derive(Debug)] -#[non_exhaustive] -pub enum Error { - /// The error for the arena memtable. - Arena(skl::error::Error), - /// The error for the linked memtable. - #[cfg(feature = "std")] - #[cfg_attr(docsrs, doc(cfg(feature = "std")))] - Linked, -} - -impl From for Error { - #[inline] - fn from(e: skl::error::Error) -> Self { - Self::Arena(e) - } -} - -impl core::fmt::Display for Error { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - match self { - Self::Arena(e) => write!(f, "{e}"), - #[cfg(feature = "std")] - Self::Linked => Ok(()), - } - } -} - -impl core::error::Error for Error {} - -mod multiple_version; -mod table; diff --git a/src/memtable/alternative/multiple_version.rs b/src/memtable/alternative/multiple_version.rs deleted file mode 100644 index 863b096f..00000000 --- a/src/memtable/alternative/multiple_version.rs +++ /dev/null @@ -1,362 +0,0 @@ -use core::ops::{Bound, RangeBounds}; - -use crate::{ - memtable::{ - arena::{ - multiple_version::{ - Entry as ArenaEntry, Iter as ArenaIter, IterAll as ArenaIterAll, Range as ArenaRange, - RangeAll as ArenaRangeAll, VersionedEntry as ArenaVersionedEntry, - }, - MultipleVersionTable as ArenaTable, - }, - BaseEntry, BaseTable, MultipleVersionMemtable, VersionedMemtableEntry, - }, - sealed::WithVersion, - types::Kind, - wal::{KeyPointer, ValuePointer}, -}; - -#[cfg(feature = "std")] -use crate::memtable::linked::{ - multiple_version::{ - Entry as LinkedEntry, Iter as LinkedIter, IterAll as LinkedIterAll, Range as LinkedRange, - RangeAll as LinkedRangeAll, VersionedEntry as LinkedVersionedEntry, - }, - MultipleVersionTable as LinkedTable, -}; - -use dbutils::{ - equivalent::Comparable, - types::{KeyRef, Type}, -}; - -use super::TableOptions; - -base_entry!( - enum Entry { - Arena(ArenaEntry), - Linked(LinkedEntry), - } -); - -impl<'a, K, V> VersionedMemtableEntry<'a> for Entry<'a, K, V> -where - K: ?Sized + Type + Ord, - KeyPointer: Type = KeyPointer> + KeyRef<'a, KeyPointer>, - V: ?Sized + Type, -{ - #[inline] - fn value(&self) -> Option> { - Some(*match_op!(self.value())) - } - - #[inline] - fn version(&self) -> u64 { - match_op!(self.version()) - } -} - -impl WithVersion for Entry<'_, K, V> {} - -base_entry!( - enum VersionedEntry { - Arena(ArenaVersionedEntry), - Linked(LinkedVersionedEntry), - } -); - -impl<'a, K, V> VersionedMemtableEntry<'a> for VersionedEntry<'a, K, V> -where - K: ?Sized + Type + Ord, - KeyPointer: Type = KeyPointer> + KeyRef<'a, KeyPointer>, - V: ?Sized + Type, -{ - #[inline] - fn value(&self) -> Option> { - match_op!(self.value()).copied() - } - - #[inline] - fn version(&self) -> u64 { - match_op!(self.version()) - } -} - -impl WithVersion for VersionedEntry<'_, K, V> {} - -iter!( - enum Iter { - Arena(ArenaIter), - Linked(LinkedIter), - } -> Entry -); - -range!( - enum Range { - Arena(ArenaRange), - Linked(LinkedRange), - } -> Entry -); - -iter!( - enum IterAll { - Arena(ArenaIterAll), - Linked(LinkedIterAll), - } -> VersionedEntry -); - -range!( - enum RangeAll { - Arena(ArenaRangeAll), - Linked(LinkedRangeAll), - } -> VersionedEntry -); - -/// A sum type for different memtable implementations. -#[non_exhaustive] -pub enum MultipleVersionTable { - /// Arena memtable - Arena(ArenaTable), - /// Linked memtable - #[cfg(feature = "std")] - #[cfg_attr(docsrs, doc(cfg(feature = "std")))] - Linked(LinkedTable), -} - -impl BaseTable for MultipleVersionTable -where - K: ?Sized + Type + Ord + 'static, - for<'a> K::Ref<'a>: KeyRef<'a, K>, - for<'a> KeyPointer: Type = KeyPointer> + KeyRef<'a, KeyPointer>, - V: ?Sized + Type + 'static, -{ - type Key = K; - - type Value = V; - - type Options = TableOptions; - - type Error = super::Error; - - type Item<'a> - = Entry<'a, K, V> - where - Self: 'a; - - type Iterator<'a> - = Iter<'a, K, V> - where - Self: 'a; - - type Range<'a, Q, R> - = Range<'a, K, V, Q, R> - where - Self: 'a, - R: RangeBounds + 'a, - Q: ?Sized + Comparable>; - - #[inline] - fn new(opts: Self::Options) -> Result - where - Self: Sized, - { - match_op!(new(opts)) - } - - #[inline] - fn insert( - &self, - version: Option, - kp: KeyPointer, - vp: ValuePointer, - ) -> Result<(), Self::Error> - where - KeyPointer: Ord + 'static, - { - match_op!(update(self.insert(version, kp, vp))) - } - - #[inline] - fn remove(&self, version: Option, key: KeyPointer) -> Result<(), Self::Error> - where - KeyPointer: Ord + 'static, - { - match_op!(update(self.remove(version, key))) - } - - #[inline] - fn kind() -> Kind { - Kind::MultipleVersion - } -} - -impl MultipleVersionMemtable for MultipleVersionTable -where - K: ?Sized + Type + Ord + 'static, - for<'a> K::Ref<'a>: KeyRef<'a, K>, - for<'a> KeyPointer: Type = KeyPointer> + KeyRef<'a, KeyPointer>, - V: ?Sized + Type + 'static, -{ - type VersionedItem<'a> - = VersionedEntry<'a, K, V> - where - KeyPointer: 'a, - Self: 'a; - - type IterAll<'a> - = IterAll<'a, K, V> - where - KeyPointer: 'a, - Self: 'a; - - type RangeAll<'a, Q, R> - = RangeAll<'a, K, V, Q, R> - where - KeyPointer: 'a, - Self: 'a, - R: RangeBounds + 'a, - Q: ?Sized + Comparable>; - - #[inline] - fn maximum_version(&self) -> u64 { - match_op!(self.maximum_version()) - } - - #[inline] - fn minimum_version(&self) -> u64 { - match_op!(self.minimum_version()) - } - - #[inline] - fn may_contain_version(&self, version: u64) -> bool { - match_op!(self.may_contain_version(version)) - } - - #[inline] - fn upper_bound(&self, version: u64, bound: Bound<&Q>) -> Option> - where - Q: ?Sized + Comparable>, - { - match_op!(self.upper_bound(version, bound).map(Item)) - } - - fn upper_bound_versioned( - &self, - version: u64, - bound: Bound<&Q>, - ) -> Option> - where - Q: ?Sized + Comparable>, - { - match_op!(self - .upper_bound_versioned(version, bound) - .map(VersionedItem)) - } - - #[inline] - fn lower_bound(&self, version: u64, bound: Bound<&Q>) -> Option> - where - Q: ?Sized + Comparable>, - { - match_op!(self.lower_bound(version, bound).map(Item)) - } - - fn lower_bound_versioned( - &self, - version: u64, - bound: Bound<&Q>, - ) -> Option> - where - Q: ?Sized + Comparable>, - { - match_op!(self - .lower_bound_versioned(version, bound) - .map(VersionedItem)) - } - - #[inline] - fn first(&self, version: u64) -> Option> - where - KeyPointer: Ord, - { - match_op!(self.first(version).map(Item)) - } - - fn first_versioned(&self, version: u64) -> Option> - where - KeyPointer: Ord, - { - match_op!(self.first_versioned(version).map(VersionedItem)) - } - - #[inline] - fn last(&self, version: u64) -> Option> - where - KeyPointer: Ord, - { - match_op!(self.last(version).map(Item)) - } - - fn last_versioned(&self, version: u64) -> Option> - where - KeyPointer: Ord, - { - match_op!(self.last_versioned(version).map(VersionedItem)) - } - - #[inline] - fn get(&self, version: u64, key: &Q) -> Option> - where - Q: ?Sized + Comparable>, - { - match_op!(self.get(version, key).map(Item)) - } - - fn get_versioned(&self, version: u64, key: &Q) -> Option> - where - Q: ?Sized + Comparable>, - { - match_op!(self.get_versioned(version, key).map(VersionedItem)) - } - - #[inline] - fn contains(&self, version: u64, key: &Q) -> bool - where - Q: ?Sized + Comparable>, - { - match_op!(self.contains(version, key)) - } - - fn contains_versioned(&self, version: u64, key: &Q) -> bool - where - Q: ?Sized + Comparable>, - { - match_op!(self.contains_versioned(version, key)) - } - - #[inline] - fn iter(&self, version: u64) -> Self::Iterator<'_> { - match_op!(Dispatch::Iterator(self.iter(version))) - } - - fn iter_all_versions(&self, version: u64) -> Self::IterAll<'_> { - match_op!(Dispatch::IterAll(self.iter_all_versions(version))) - } - - #[inline] - fn range<'a, Q, R>(&'a self, version: u64, range: R) -> Self::Range<'a, Q, R> - where - R: RangeBounds + 'a, - Q: ?Sized + Comparable>, - { - match_op!(Dispatch::Range(self.range(version, range))) - } - - fn range_all_versions<'a, Q, R>(&'a self, version: u64, range: R) -> Self::RangeAll<'a, Q, R> - where - R: RangeBounds + 'a, - Q: ?Sized + Comparable>, - { - match_op!(Dispatch::RangeAll(self.range_all_versions(version, range))) - } -} diff --git a/src/memtable/alternative/table.rs b/src/memtable/alternative/table.rs deleted file mode 100644 index ef75af6f..00000000 --- a/src/memtable/alternative/table.rs +++ /dev/null @@ -1,215 +0,0 @@ -use core::ops::{Bound, RangeBounds}; - -use crate::{ - memtable::{ - arena::{ - table::{Entry as ArenaEntry, Iter as ArenaIter, Range as ArenaRange}, - Table as ArenaTable, - }, - BaseEntry, BaseTable, Memtable, MemtableEntry, - }, - sealed::WithoutVersion, - types::Kind, - wal::{KeyPointer, ValuePointer}, -}; - -#[cfg(feature = "std")] -use crate::memtable::linked::{ - table::{Entry as LinkedEntry, Iter as LinkedIter, Range as LinkedRange}, - Table as LinkedTable, -}; - -use dbutils::{ - equivalent::Comparable, - types::{KeyRef, Type}, -}; - -use super::TableOptions; - -base_entry!( - enum Entry { - Arena(ArenaEntry), - Linked(LinkedEntry), - } -); - -impl<'a, K, V> MemtableEntry<'a> for Entry<'a, K, V> -where - K: ?Sized + Type + Ord, - KeyPointer: Type = KeyPointer> + KeyRef<'a, KeyPointer>, - V: ?Sized + Type, -{ - #[inline] - fn value(&self) -> ValuePointer { - *match_op!(self.value()) - } -} - -impl WithoutVersion for Entry<'_, K, V> {} - -iter!( - enum Iter { - Arena(ArenaIter), - Linked(LinkedIter), - } -> Entry -); - -range!( - enum Range { - Arena(ArenaRange), - Linked(LinkedRange), - } -> Entry -); - -/// A sum type for different memtable implementations. -#[non_exhaustive] -pub enum Table { - /// Arena memtable - Arena(ArenaTable), - /// Linked memtable - #[cfg(feature = "std")] - #[cfg_attr(docsrs, doc(cfg(feature = "std")))] - Linked(LinkedTable), -} - -impl BaseTable for Table -where - K: ?Sized + Type + Ord + 'static, - for<'a> K::Ref<'a>: KeyRef<'a, K>, - for<'a> KeyPointer: Type = KeyPointer> + KeyRef<'a, KeyPointer>, - V: ?Sized + Type + 'static, -{ - type Key = K; - - type Value = V; - - type Options = TableOptions; - - type Error = super::Error; - - type Item<'a> - = Entry<'a, K, V> - where - Self: 'a; - - type Iterator<'a> - = Iter<'a, K, V> - where - Self: 'a; - - type Range<'a, Q, R> - = Range<'a, K, V, Q, R> - where - Self: 'a, - R: RangeBounds + 'a, - Q: ?Sized + Comparable>; - - #[inline] - fn new(opts: Self::Options) -> Result - where - Self: Sized, - { - match_op!(new(opts)) - } - - #[inline] - fn insert( - &self, - version: Option, - kp: KeyPointer, - vp: ValuePointer, - ) -> Result<(), Self::Error> - where - KeyPointer: Ord + 'static, - { - match_op!(update(self.insert(version, kp, vp))) - } - - #[inline] - fn remove(&self, version: Option, key: KeyPointer) -> Result<(), Self::Error> - where - KeyPointer: Ord + 'static, - { - match_op!(update(self.remove(version, key))) - } - - #[inline] - fn kind() -> Kind { - Kind::Plain - } -} - -impl Memtable for Table -where - K: ?Sized + Type + Ord + 'static, - for<'a> K::Ref<'a>: KeyRef<'a, K>, - for<'a> KeyPointer: Type = KeyPointer> + KeyRef<'a, KeyPointer>, - V: ?Sized + Type + 'static, -{ - #[inline] - fn len(&self) -> usize { - match_op!(self.len()) - } - - #[inline] - fn upper_bound(&self, bound: Bound<&Q>) -> Option> - where - Q: ?Sized + Comparable>, - { - match_op!(self.upper_bound(bound).map(Item)) - } - - #[inline] - fn lower_bound(&self, bound: Bound<&Q>) -> Option> - where - Q: ?Sized + Comparable>, - { - match_op!(self.lower_bound(bound).map(Item)) - } - - #[inline] - fn first(&self) -> Option> - where - KeyPointer: Ord, - { - match_op!(self.first().map(Item)) - } - - #[inline] - fn last(&self) -> Option> - where - KeyPointer: Ord, - { - match_op!(self.last().map(Item)) - } - - #[inline] - fn get(&self, key: &Q) -> Option> - where - Q: ?Sized + Comparable>, - { - match_op!(self.get(key).map(Item)) - } - - #[inline] - fn contains(&self, key: &Q) -> bool - where - Q: ?Sized + Comparable>, - { - match_op!(self.contains(key)) - } - - #[inline] - fn iter(&self) -> Self::Iterator<'_> { - match_op!(Dispatch::Iterator(self.iter())) - } - - #[inline] - fn range<'a, Q, R>(&'a self, range: R) -> Self::Range<'a, Q, R> - where - R: RangeBounds + 'a, - Q: ?Sized + Comparable>, - { - match_op!(Dispatch::Range(self.range(range))) - } -} diff --git a/src/memtable/arena.rs b/src/memtable/arena.rs deleted file mode 100644 index d05f414a..00000000 --- a/src/memtable/arena.rs +++ /dev/null @@ -1,102 +0,0 @@ -#[cfg(all(feature = "memmap", not(target_family = "wasm")))] -macro_rules! memmap_or_not { - ($opts:ident($arena_opts:ident)) => {{ - if $opts.map_anon() { - $arena_opts - .map_anon::, ValuePointer, _>() - .map_err(skl::error::Error::IO) - } else { - $arena_opts.alloc::, ValuePointer, _>() - } - .map(|map| Self { map }) - }}; -} - -#[cfg(not(all(feature = "memmap", not(target_family = "wasm"))))] -macro_rules! memmap_or_not { - ($opts:ident($arena_opts:ident)) => {{ - $arena_opts - .alloc::, ValuePointer, _>() - .map(|map| Self { map }) - }}; -} - -pub use skl::Height; - -/// Options to configure the [`Table`] or [`MultipleVersionTable`]. -#[derive(Debug, Copy, Clone)] -pub struct TableOptions { - capacity: u32, - map_anon: bool, - max_height: Height, -} - -impl Default for TableOptions { - #[inline] - fn default() -> Self { - Self::new() - } -} - -impl TableOptions { - /// Creates a new instance of `TableOptions` with the default options. - #[inline] - pub const fn new() -> Self { - Self { - capacity: 8192, - map_anon: false, - max_height: Height::new(), - } - } - - /// Sets the capacity of the table. - /// - /// Default is `8KB`. - #[inline] - pub const fn with_capacity(mut self, capacity: u32) -> Self { - self.capacity = capacity; - self - } - - /// Sets the table to use anonymous memory. - #[inline] - pub const fn with_map_anon(mut self, map_anon: bool) -> Self { - self.map_anon = map_anon; - self - } - - /// Sets the maximum height of the table. - /// - /// Default is `20`. - #[inline] - pub const fn with_max_height(mut self, max_height: Height) -> Self { - self.max_height = max_height; - self - } - - /// Returns the capacity of the table. - #[inline] - pub const fn capacity(&self) -> u32 { - self.capacity - } - - /// Returns `true` if the table is using anonymous memory. - #[inline] - pub const fn map_anon(&self) -> bool { - self.map_anon - } - - /// Returns the maximum height of the table. - #[inline] - pub const fn max_height(&self) -> Height { - self.max_height - } -} - -/// The multiple version memtable implementation. -pub mod multiple_version; -/// The memtable implementation. -pub mod table; - -pub use multiple_version::MultipleVersionTable; -pub use table::Table; diff --git a/src/memtable/arena/multiple_version.rs b/src/memtable/arena/multiple_version.rs deleted file mode 100644 index 238f4616..00000000 --- a/src/memtable/arena/multiple_version.rs +++ /dev/null @@ -1,358 +0,0 @@ -use core::ops::{Bound, RangeBounds}; - -use among::Among; -use dbutils::{ - equivalent::Comparable, - types::{KeyRef, Type}, -}; -use skl::{ - either::Either, - multiple_version::{sync::SkipMap, Map as _}, - Options, -}; - -pub use skl::multiple_version::sync::{Entry, Iter, IterAll, Range, RangeAll, VersionedEntry}; - -use crate::{ - memtable::{BaseEntry, BaseTable, MultipleVersionMemtable, VersionedMemtableEntry}, - sealed::WithVersion, - types::Kind, - wal::{KeyPointer, ValuePointer}, -}; - -use super::TableOptions; - -impl<'a, K, V> BaseEntry<'a> for Entry<'a, KeyPointer, ValuePointer> -where - K: ?Sized + Type + Ord, - KeyPointer: Type = KeyPointer> + KeyRef<'a, KeyPointer>, - V: ?Sized + Type, -{ - type Key = K; - type Value = V; - - #[inline] - fn next(&mut self) -> Option { - Entry::next(self) - } - - #[inline] - fn prev(&mut self) -> Option { - Entry::prev(self) - } - - #[inline] - fn key(&self) -> KeyPointer { - *Entry::key(self) - } -} - -impl<'a, K, V> VersionedMemtableEntry<'a> for Entry<'a, KeyPointer, ValuePointer> -where - K: ?Sized + Type + Ord, - KeyPointer: Type = KeyPointer> + KeyRef<'a, KeyPointer>, - V: ?Sized + Type, -{ - #[inline] - fn value(&self) -> Option> { - Some(*Entry::value(self)) - } - - #[inline] - fn version(&self) -> u64 { - Entry::version(self) - } -} - -impl WithVersion for Entry<'_, KeyPointer, ValuePointer> -where - K: ?Sized, - V: ?Sized, -{ -} - -impl<'a, K, V> BaseEntry<'a> for VersionedEntry<'a, KeyPointer, ValuePointer> -where - K: ?Sized + Type + Ord, - KeyPointer: Type = KeyPointer> + KeyRef<'a, KeyPointer>, - V: ?Sized + Type, -{ - type Key = K; - type Value = V; - - #[inline] - fn next(&mut self) -> Option { - VersionedEntry::next(self) - } - - #[inline] - fn prev(&mut self) -> Option { - VersionedEntry::prev(self) - } - - #[inline] - fn key(&self) -> KeyPointer { - *VersionedEntry::key(self) - } -} - -impl<'a, K, V> VersionedMemtableEntry<'a> for VersionedEntry<'a, KeyPointer, ValuePointer> -where - K: ?Sized + Type + Ord, - KeyPointer: Type = KeyPointer> + KeyRef<'a, KeyPointer>, - V: ?Sized + Type, -{ - #[inline] - fn version(&self) -> u64 { - self.version() - } - - #[inline] - fn value(&self) -> Option> { - VersionedEntry::value(self).copied() - } -} - -impl WithVersion for VersionedEntry<'_, KeyPointer, ValuePointer> -where - K: ?Sized, - V: ?Sized, -{ -} - -/// A memory table implementation based on ARENA [`SkipMap`](skl). -pub struct MultipleVersionTable { - map: SkipMap, ValuePointer>, -} - -impl BaseTable for MultipleVersionTable -where - K: ?Sized + Type + Ord + 'static, - for<'a> KeyPointer: Type = KeyPointer> + KeyRef<'a, KeyPointer>, - V: ?Sized + Type + 'static, -{ - type Key = K; - type Value = V; - - type Item<'a> - = Entry<'a, KeyPointer, ValuePointer> - where - Self: 'a; - - type Iterator<'a> - = Iter<'a, KeyPointer, ValuePointer> - where - Self: 'a; - - type Range<'a, Q, R> - = Range<'a, KeyPointer, ValuePointer, Q, R> - where - Self: 'a, - R: RangeBounds + 'a, - Q: ?Sized + Comparable>; - - type Options = TableOptions; - - type Error = skl::error::Error; - - #[inline] - fn new(opts: Self::Options) -> Result { - let arena_opts = Options::new() - .with_capacity(opts.capacity()) - .with_freelist(skl::Freelist::None) - .with_unify(false) - .with_max_height(opts.max_height()); - - memmap_or_not!(opts(arena_opts)) - } - - fn insert( - &self, - version: Option, - kp: KeyPointer, - vp: ValuePointer, - ) -> Result<(), Self::Error> - where - KeyPointer: Ord + 'static, - { - self - .map - .insert(version.unwrap_or(0), &kp, &vp) - .map(|_| ()) - .map_err(|e| match e { - Among::Right(e) => e, - _ => unreachable!(), - }) - } - - fn remove(&self, version: Option, key: KeyPointer) -> Result<(), Self::Error> - where - KeyPointer: Ord + 'static, - { - match self.map.get_or_remove(version.unwrap_or(0), &key) { - Err(Either::Right(e)) => Err(e), - Err(Either::Left(_)) => unreachable!(), - _ => Ok(()), - } - } - - #[inline] - fn kind() -> Kind { - Kind::MultipleVersion - } -} - -impl MultipleVersionMemtable for MultipleVersionTable -where - K: ?Sized + Type + Ord + 'static, - for<'a> KeyPointer: Type = KeyPointer> + KeyRef<'a, KeyPointer>, - V: ?Sized + Type + 'static, -{ - type VersionedItem<'a> - = VersionedEntry<'a, KeyPointer, ValuePointer> - where - Self: 'a; - - type IterAll<'a> - = IterAll<'a, KeyPointer, ValuePointer> - where - Self: 'a; - - type RangeAll<'a, Q, R> - = RangeAll<'a, KeyPointer, ValuePointer, Q, R> - where - Self: 'a, - R: RangeBounds + 'a, - Q: ?Sized + Comparable>; - - #[inline] - fn maximum_version(&self) -> u64 { - self.map.maximum_version() - } - - #[inline] - fn minimum_version(&self) -> u64 { - self.map.minimum_version() - } - - #[inline] - fn may_contain_version(&self, version: u64) -> bool { - self.map.may_contain_version(version) - } - - fn upper_bound(&self, version: u64, bound: Bound<&Q>) -> Option> - where - Q: ?Sized + Comparable>, - { - self.map.upper_bound(version, bound) - } - - fn upper_bound_versioned( - &self, - version: u64, - bound: Bound<&Q>, - ) -> Option> - where - Q: ?Sized + Comparable>, - { - self.map.upper_bound_versioned(version, bound) - } - - fn lower_bound(&self, version: u64, bound: Bound<&Q>) -> Option> - where - Q: ?Sized + Comparable>, - { - self.map.lower_bound(version, bound) - } - - fn lower_bound_versioned( - &self, - version: u64, - bound: Bound<&Q>, - ) -> Option> - where - Q: ?Sized + Comparable>, - { - self.map.lower_bound_versioned(version, bound) - } - - fn first(&self, version: u64) -> Option> - where - KeyPointer: Ord, - { - self.map.first(version) - } - - fn first_versioned(&self, version: u64) -> Option> - where - KeyPointer: Ord, - { - self.map.first_versioned(version) - } - - fn last(&self, version: u64) -> Option> - where - KeyPointer: Ord, - { - self.map.last(version) - } - - fn last_versioned(&self, version: u64) -> Option> - where - KeyPointer: Ord, - { - self.map.last_versioned(version) - } - - fn get(&self, version: u64, key: &Q) -> Option> - where - Q: ?Sized + Comparable>, - { - self.map.get(version, key) - } - - fn get_versioned(&self, version: u64, key: &Q) -> Option> - where - Q: ?Sized + Comparable>, - { - self.map.get_versioned(version, key) - } - - fn contains(&self, version: u64, key: &Q) -> bool - where - Q: ?Sized + Comparable>, - { - self.map.contains_key(version, key) - } - - fn contains_versioned(&self, version: u64, key: &Q) -> bool - where - Q: ?Sized + Comparable>, - { - self.map.contains_key_versioned(version, key) - } - - fn iter(&self, version: u64) -> Self::Iterator<'_> { - self.map.iter(version) - } - - fn iter_all_versions(&self, version: u64) -> Self::IterAll<'_> { - self.map.iter_all_versions(version) - } - - fn range<'a, Q, R>(&'a self, version: u64, range: R) -> Self::Range<'a, Q, R> - where - R: RangeBounds + 'a, - Q: ?Sized + Comparable>, - { - self.map.range(version, range) - } - - fn range_all_versions<'a, Q, R>(&'a self, version: u64, range: R) -> Self::RangeAll<'a, Q, R> - where - R: RangeBounds + 'a, - Q: ?Sized + Comparable>, - { - self.map.range_all_versions(version, range) - } -} diff --git a/src/memtable/arena/table.rs b/src/memtable/arena/table.rs deleted file mode 100644 index b85ce717..00000000 --- a/src/memtable/arena/table.rs +++ /dev/null @@ -1,204 +0,0 @@ -use core::ops::{Bound, RangeBounds}; - -use among::Among; -use dbutils::{ - equivalent::Comparable, - types::{KeyRef, Type}, -}; -use skl::{ - either::Either, - map::{sync::SkipMap, Map as _}, - Arena as _, EntryRef, Options, -}; - -use crate::{ - memtable::{BaseEntry, BaseTable, Memtable, MemtableEntry}, - sealed::WithoutVersion, - types::Kind, - wal::{KeyPointer, ValuePointer}, -}; - -use super::TableOptions; - -pub use skl::map::sync::{Entry, Iter, Range}; - -impl<'a, K, V> BaseEntry<'a> for Entry<'a, KeyPointer, ValuePointer> -where - K: ?Sized + Type + Ord, - KeyPointer: Type = KeyPointer> + KeyRef<'a, KeyPointer>, - V: ?Sized + Type, -{ - type Key = K; - type Value = V; - - #[inline] - fn next(&mut self) -> Option { - Entry::next(self) - } - - #[inline] - fn prev(&mut self) -> Option { - Entry::prev(self) - } - - #[inline] - fn key(&self) -> KeyPointer { - *EntryRef::key(self) - } -} - -impl<'a, K, V> MemtableEntry<'a> for Entry<'a, KeyPointer, ValuePointer> -where - K: ?Sized + Type + Ord, - KeyPointer: Type = KeyPointer> + KeyRef<'a, KeyPointer>, - V: ?Sized + Type, -{ - #[inline] - fn value(&self) -> ValuePointer { - *EntryRef::value(self) - } -} - -impl WithoutVersion for Entry<'_, KeyPointer, ValuePointer> {} - -/// A memory table implementation based on ARENA [`SkipMap`](skl). -pub struct Table { - map: SkipMap, ValuePointer>, -} - -impl BaseTable for Table -where - K: ?Sized + Type + Ord + 'static, - for<'a> KeyPointer: Type = KeyPointer> + KeyRef<'a, KeyPointer>, - V: ?Sized + Type + 'static, -{ - type Key = K; - type Value = V; - type Item<'a> - = Entry<'a, KeyPointer, ValuePointer> - where - Self: 'a; - - type Iterator<'a> - = Iter<'a, KeyPointer, ValuePointer> - where - Self: 'a; - - type Range<'a, Q, R> - = Range<'a, KeyPointer, ValuePointer, Q, R> - where - Self: 'a, - R: RangeBounds + 'a, - Q: ?Sized + Comparable>; - - type Options = TableOptions; - type Error = skl::error::Error; - - #[inline] - fn new(opts: Self::Options) -> Result { - let arena_opts = Options::new() - .with_capacity(opts.capacity()) - .with_freelist(skl::Freelist::None) - .with_unify(false) - .with_max_height(opts.max_height()); - - memmap_or_not!(opts(arena_opts)) - } - - fn insert( - &self, - _: Option, - kp: KeyPointer, - vp: ValuePointer, - ) -> Result<(), Self::Error> - where - KeyPointer: Ord + 'static, - { - self.map.insert(&kp, &vp).map(|_| ()).map_err(|e| match e { - Among::Right(e) => e, - _ => unreachable!(), - }) - } - - fn remove(&self, _: Option, key: KeyPointer) -> Result<(), Self::Error> - where - KeyPointer: Ord + 'static, - { - match self.map.get_or_remove(&key) { - Err(Either::Right(e)) => Err(e), - Err(Either::Left(_)) => unreachable!(), - _ => Ok(()), - } - } - - #[inline] - fn kind() -> Kind { - Kind::Plain - } -} - -impl Memtable for Table -where - K: ?Sized + Type + Ord + 'static, - for<'a> KeyPointer: Type = KeyPointer> + KeyRef<'a, KeyPointer>, - V: ?Sized + Type + 'static, -{ - #[inline] - fn len(&self) -> usize { - self.map.len() - } - - fn upper_bound(&self, bound: Bound<&Q>) -> Option> - where - Q: ?Sized + Comparable>, - { - self.map.upper_bound(bound) - } - - fn lower_bound(&self, bound: Bound<&Q>) -> Option> - where - Q: ?Sized + Comparable>, - { - self.map.lower_bound(bound) - } - - fn first(&self) -> Option> - where - KeyPointer: Ord, - { - self.map.first() - } - - fn last(&self) -> Option> - where - KeyPointer: Ord, - { - self.map.last() - } - - fn get(&self, key: &Q) -> Option> - where - Q: ?Sized + Comparable>, - { - self.map.get(key) - } - - fn contains(&self, key: &Q) -> bool - where - Q: ?Sized + Comparable>, - { - self.map.contains_key(key) - } - - fn iter(&self) -> Self::Iterator<'_> { - self.map.iter() - } - - fn range<'a, Q, R>(&'a self, range: R) -> Self::Range<'a, Q, R> - where - R: RangeBounds + 'a, - Q: ?Sized + Comparable>, - { - self.map.range(range) - } -} diff --git a/src/memtable/bounded.rs b/src/memtable/bounded.rs new file mode 100644 index 00000000..eb2446cc --- /dev/null +++ b/src/memtable/bounded.rs @@ -0,0 +1,353 @@ +pub use skl::Height; + +use crate::{ + memtable::{Entry, Memtable, RangeEntry, RangeEntryExt as _}, + types::{ + sealed::{ComparatorConstructor, PointComparator, Pointee, RangeComparator}, + Mode, Query, RecordPointer, RefQuery, Remove, Update, + }, +}; +use core::ops::ControlFlow; + +use among::Among; +use dbutils::state::State; +use ref_cast::RefCast; + +use skl::{ + either::Either, + generic::{ + multiple_version::{sync::SkipMap, Map}, + Ascend, Comparator, LazyRef, TypeRefComparator, TypeRefQueryComparator, + }, + Active, MaybeTombstone, +}; +use triomphe::Arc; + +pub use entry::*; +pub use iter::*; +pub use point::*; +pub use range_entry::*; + +use super::{sealed, MutableMemtable, Transfer}; + +mod entry; +mod iter; +mod point; +mod range_entry; + +/// Options to configure the [`Table`] or [`MultipleVersionTable`]. +#[derive(Debug, Copy, Clone)] +pub struct TableOptions { + capacity: u32, + map_anon: bool, + max_height: Height, + pub(in crate::memtable) cmp: C, +} + +impl Default for TableOptions { + #[inline] + fn default() -> Self { + Self::with_comparator(Default::default()) + } +} + +impl TableOptions { + /// Creates a new instance of `TableOptions` with the default options. + #[inline] + pub const fn new() -> Self { + Self { + capacity: 8192, + map_anon: false, + max_height: Height::new(), + cmp: Ascend::new(), + } + } +} + +impl TableOptions { + /// Creates a new instance of `TableOptions` with the default options. + #[inline] + pub const fn with_comparator(cmp: C) -> TableOptions { + Self { + capacity: 8192, + map_anon: false, + max_height: Height::new(), + cmp, + } + } + + /// Sets the capacity of the table. + /// + /// Default is `8KB`. + #[inline] + pub const fn with_capacity(mut self, capacity: u32) -> Self { + self.capacity = capacity; + self + } + + /// Sets the table to use anonymous memory. + #[inline] + pub const fn with_map_anon(mut self, map_anon: bool) -> Self { + self.map_anon = map_anon; + self + } + + /// Sets the maximum height of the table. + /// + /// Default is `20`. + #[inline] + pub const fn with_max_height(mut self, max_height: Height) -> Self { + self.max_height = max_height; + self + } + + /// Returns the capacity of the table. + #[inline] + pub const fn capacity(&self) -> u32 { + self.capacity + } + + /// Returns `true` if the table is using anonymous memory. + #[inline] + pub const fn map_anon(&self) -> bool { + self.map_anon + } + + /// Returns the maximum height of the table. + #[inline] + pub const fn max_height(&self) -> Height { + self.max_height + } +} + +/// A memory table implementation based on ARENA [`SkipMap`](skl). +pub struct Table +where + T: Mode, +{ + pub(in crate::memtable) skl: SkipMap>, + pub(in crate::memtable) range_deletions_skl: + SkipMap>, + pub(in crate::memtable) range_updates_skl: + SkipMap>, +} + +impl Memtable for Table +where + C: 'static, + T: Mode, + T::Comparator: 'static, + T::RangeComparator: 'static, +{ + type Options = TableOptions; + type Error = skl::error::Error; + + #[inline] + fn new(arena: A, opts: Self::Options) -> Result + where + Self: Sized, + A: rarena_allocator::Allocator, + { + { + use skl::Arena; + let arena_opts = skl::Options::new() + .with_capacity(opts.capacity()) + .with_freelist(skl::options::Freelist::None) + .with_compression_policy(skl::options::CompressionPolicy::None) + .with_unify(false) + .with_max_height(opts.max_height()); + + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + let mmap = opts.map_anon(); + let cmp = Arc::new(opts.cmp); + let ptr = arena.raw_ptr(); + let points_cmp = as ComparatorConstructor<_>>::new(ptr, cmp.clone()); + let range_del_cmp = + as ComparatorConstructor<_>>::new(ptr, cmp.clone()); + let range_update_cmp = + as ComparatorConstructor<_>>::new(ptr, cmp.clone()); + let b = skl::generic::Builder::with(points_cmp).with_options(arena_opts); + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] + let points: SkipMap<_, _, _> = { + if mmap { + b.map_anon().map_err(skl::error::Error::IO) + } else { + b.alloc() + } + }?; + #[cfg(not(all(feature = "memmap", not(target_family = "wasm"))))] + let points: SkipMap<_, _, _> = b.alloc()?; + let allocator = points.allocator().clone(); + let range_del_skl = + SkipMap::<_, _, _>::create_from_allocator(allocator.clone(), range_del_cmp)?; + let range_key_skl = SkipMap::<_, _, _>::create_from_allocator(allocator, range_update_cmp)?; + Ok(Self { + skl: points, + range_updates_skl: range_key_skl, + range_deletions_skl: range_del_skl, + }) + } + } + + #[inline] + fn len(&self) -> usize { + self.skl.len() + self.range_deletions_skl.len() + self.range_updates_skl.len() + } +} + +impl MutableMemtable for Table +where + C: 'static, + T: Mode, + T::Comparator: for<'a> TypeRefComparator<'a, RecordPointer> + 'static, + T::RangeComparator: for<'a> TypeRefComparator<'a, RecordPointer> + 'static, +{ + #[inline] + fn insert(&self, version: u64, pointer: RecordPointer) -> Result<(), Self::Error> { + self + .skl + .insert(version, &pointer, &pointer) + .map(|_| ()) + .map_err(Among::unwrap_right) + } + + #[inline] + fn remove(&self, version: u64, key: RecordPointer) -> Result<(), Self::Error> { + self + .skl + .get_or_remove(version, &key) + .map(|_| ()) + .map_err(Either::unwrap_right) + } + + #[inline] + fn range_remove(&self, version: u64, pointer: RecordPointer) -> Result<(), Self::Error> { + self + .range_deletions_skl + .insert(version, &pointer, &pointer) + .map(|_| ()) + .map_err(Among::unwrap_right) + } + + #[inline] + fn range_set(&self, version: u64, pointer: RecordPointer) -> Result<(), Self::Error> { + self + .range_updates_skl + .insert(version, &pointer, &pointer) + .map(|_| ()) + .map_err(Among::unwrap_right) + } + + #[inline] + fn range_unset(&self, version: u64, key: RecordPointer) -> Result<(), Self::Error> { + self + .range_updates_skl + .get_or_remove(version, &key) + .map(|_| ()) + .map_err(Either::unwrap_right) + } +} + +impl<'a, C, T> Table +where + C: 'static, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]>, + T::Comparator: PointComparator + + TypeRefComparator<'a, RecordPointer> + + Comparator as Pointee<'a>>::Output>> + + 'static, + T::RangeComparator: TypeRefComparator<'a, RecordPointer> + + TypeRefQueryComparator<'a, RecordPointer, RefQuery< as Pointee<'a>>::Output>> + + RangeComparator + + 'static, + RangeEntryRef<'a, Active, Remove, C, T>: + RangeEntry<'a, Remove, Key = as Pointee<'a>>::Output>, +{ + pub(in crate::memtable) fn validate( + &'a self, + query_version: u64, + ent: PointEntryRef<'a, S, C, T>, + ) -> ControlFlow>, PointEntryRef<'a, S, C, T>> + where + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, LazyRef<'a, RecordPointer>>: Clone, + S::Data<'a, S::Value>: 'a, + PointEntryRef<'a, S, C, T>: Entry<'a, Key = as Pointee<'a>>::Output>, + MaybeTombstone: Transfer<'a, T::Value<'a>>, + RangeEntryRef<'a, MaybeTombstone, Update, C, T>: RangeEntry< + 'a, + Update, + Key = as Pointee<'a>>::Output, + Value = ::Data< + 'a, + >>::Value, + >, + >, + { + let key = ent.key(); + let cmp = ent.ent.comparator(); + let version = ent.ent.version(); + let query = RefQuery::new(key); + let shadow = self + .range_deletions_skl + .range(query_version, ..=&query) + .any(|ent| { + let del_ent_version = ent.version(); + if !(version <= del_ent_version && del_ent_version <= query_version) { + return false; + } + let ent = RangeEntryRef::::new(ent); + dbutils::equivalentor::RangeComparator::contains( + cmp, + &ent.query_range(), + Query::ref_cast(&query.query), + ) + }); + if shadow { + return ControlFlow::Continue(ent); + } + let range_ent = self + .range_updates_skl + .range_all(query_version, ..=&query) + .filter_map(|ent| { + let range_ent_version = ent.version(); + if !(version <= range_ent_version && range_ent_version <= query_version) { + return None; + } + let ent = RangeEntryRef::::new(ent); + if dbutils::equivalentor::RangeComparator::contains( + cmp, + &ent.query_range(), + Query::ref_cast(&query.query), + ) { + Some(ent) + } else { + None + } + }) + .max_by_key(|e| e.version()); + if let Some(range_ent) = range_ent { + let version = range_ent.version(); + if let Some(val) = range_ent.into_value() { + return ControlFlow::Break(Some(EntryRef::new( + self, + query_version, + ent, + key, + Some(S::data(val)), + version, + ))); + } + } + let version = ent.version(); + ControlFlow::Break(Some(EntryRef::new( + self, + query_version, + ent, + key, + None, + version, + ))) + } +} diff --git a/src/memtable/bounded/entry.rs b/src/memtable/bounded/entry.rs new file mode 100644 index 00000000..488d3e09 --- /dev/null +++ b/src/memtable/bounded/entry.rs @@ -0,0 +1,218 @@ +use core::ops::ControlFlow; + +use skl::{ + generic::{Comparator, LazyRef, TypeRefComparator, TypeRefQueryComparator}, + Active, MaybeTombstone, State, +}; + +use crate::{ + memtable::{sealed, Entry, RangeEntry, RawEntry, Transfer}, + types::{ + sealed::{PointComparator, Pointee, RangeComparator}, + Mode, Query, RecordPointer, RefQuery, Remove, Update, + }, +}; + +use super::{PointEntryRef, RangeEntryRef, Table}; + +/// Entry in the memtable. +pub struct EntryRef<'a, S, C, T> +where + S: State, + T: Mode, +{ + table: &'a Table, + point_ent: PointEntryRef<'a, S, C, T>, + key: as Pointee<'a>>::Output, + val: Option>>, + version: u64, + query_version: u64, +} + +impl<'a, S, C, T> core::fmt::Debug for EntryRef<'a, S, C, T> +where + C: 'static, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, S::Value>: core::fmt::Debug, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]> + 'a, + as Pointee<'a>>::Output: core::fmt::Debug, + T::Comparator: PointComparator + TypeRefComparator<'a, RecordPointer>, + PointEntryRef<'a, S, C, T>: + Entry<'a, Key = as Pointee<'a>>::Output, Value = S::Data<'a, S::Value>>, +{ + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.debug_struct("Entry") + .field("key", &self.key) + .field("value", &self.value_in()) + .field("version", &self.version) + .finish() + } +} + +impl<'a, S, C, T> Clone for EntryRef<'a, S, C, T> +where + S: State, + S::Data<'a, T::Value<'a>>: Clone, + PointEntryRef<'a, S, C, T>: Clone, + T: Mode, + T::Key<'a>: Clone, + T::Value<'a>: Clone, +{ + #[inline] + fn clone(&self) -> Self { + Self { + table: self.table, + point_ent: self.point_ent.clone(), + key: self.key, + val: self.val.clone(), + version: self.version, + query_version: self.query_version, + } + } +} + +impl<'a, S, C, T> RawEntry<'a> for EntryRef<'a, S, C, T> +where + C: 'static, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, &'a [u8]>: 'a, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]> + 'a, + T::Comparator: PointComparator + TypeRefComparator<'a, RecordPointer>, + PointEntryRef<'a, S, C, T>: RawEntry<'a, RawValue = S::Data<'a, &'a [u8]>>, +{ + type RawValue = S::Data<'a, &'a [u8]>; + + #[inline] + fn raw_key(&self) -> &'a [u8] { + self.point_ent.raw_key() + } + + #[inline] + fn raw_value(&self) -> Self::RawValue { + match self.val.as_ref() { + Some(val) => >>::input(val), + None => self.point_ent.raw_value(), + } + } +} + +impl<'a, S, C, T> Entry<'a> for EntryRef<'a, S, C, T> +where + C: 'static, + S: Transfer<'a, T::Value<'a>>, + MaybeTombstone: Transfer<'a, T::Value<'a>>, + S::Data<'a, S::Value>: 'a, + S::Data<'a, LazyRef<'a, RecordPointer>>: Clone, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]>, + T::Comparator: PointComparator + + TypeRefComparator<'a, RecordPointer> + + Comparator as Pointee<'a>>::Output>> + + 'static, + T::RangeComparator: TypeRefComparator<'a, RecordPointer> + + TypeRefQueryComparator<'a, RecordPointer, RefQuery< as Pointee<'a>>::Output>> + + RangeComparator + + 'static, + PointEntryRef<'a, S, C, T>: + Entry<'a, Key = as Pointee<'a>>::Output, Value = S::Data<'a, S::Value>>, + RangeEntryRef<'a, Active, Remove, C, T>: + RangeEntry<'a, Remove, Key = as Pointee<'a>>::Output>, + RangeEntryRef<'a, MaybeTombstone, Update, C, T>: RangeEntry< + 'a, + Update, + Key = as Pointee<'a>>::Output, + Value = ::Data< + 'a, + >>::Value, + >, + >, +{ + type Key = as Pointee<'a>>::Output; + + type Value = S::Data<'a, S::Value>; + + #[inline] + fn key(&self) -> Self::Key { + self.key + } + + #[inline] + fn value(&self) -> Self::Value { + self.value_in() + } + + #[inline] + fn next(&self) -> Option { + let mut next = self.point_ent.next(); + while let Some(ent) = next { + match self.table.validate(self.query_version, ent) { + ControlFlow::Break(entry) => return entry, + ControlFlow::Continue(ent) => next = ent.next(), + } + } + None + } + + #[inline] + fn prev(&self) -> Option { + let mut prev = self.point_ent.prev(); + while let Some(ent) = prev { + match self.table.validate(self.query_version, ent) { + ControlFlow::Break(entry) => return entry, + ControlFlow::Continue(ent) => prev = ent.prev(), + } + } + None + } + + #[inline] + fn version(&self) -> u64 { + self.version + } +} + +impl<'a, S, C, T> EntryRef<'a, S, C, T> +where + S: State, + T: Mode, +{ + #[inline] + pub(crate) fn new( + table: &'a Table, + query_version: u64, + point_ent: PointEntryRef<'a, S, C, T>, + key: as Pointee<'a>>::Output, + val: Option>>, + version: u64, + ) -> Self { + Self { + table, + point_ent, + key, + val, + version, + query_version, + } + } +} + +impl<'a, S, C, T> EntryRef<'a, S, C, T> +where + C: 'static, + S: Transfer<'a, T::Value<'a>>, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]> + 'a, + T::Comparator: PointComparator + TypeRefComparator<'a, RecordPointer>, + PointEntryRef<'a, S, C, T>: + Entry<'a, Key = as Pointee<'a>>::Output, Value = S::Data<'a, S::Value>>, +{ + #[inline] + fn value_in(&self) -> S::Data<'a, S::Value> { + match self.val.as_ref() { + Some(val) => >>::transfer(val), + None => self.point_ent.value(), + } + } +} diff --git a/src/memtable/bounded/iter.rs b/src/memtable/bounded/iter.rs new file mode 100644 index 00000000..979459bc --- /dev/null +++ b/src/memtable/bounded/iter.rs @@ -0,0 +1,292 @@ +use core::ops::{ControlFlow, RangeBounds}; + +use skl::{ + generic::{ + multiple_version::Map as _, Comparator, LazyRef, TypeRefComparator, TypeRefQueryComparator, + }, + Active, MaybeTombstone, State, +}; + +use crate::{ + memtable::{sealed, Entry, RangeEntry, Transfer}, + types::{ + sealed::{PointComparator, Pointee, RangeComparator}, + Mode, Query, RecordPointer, RefQuery, Remove, Update, + }, +}; + +use super::{EntryRef, IterPoints, PointEntryRef, RangeEntryRef, RangePoints, Table}; + +/// An iterator over the entries of a `Memtable`. +pub struct Iter<'a, S, C, T> +where + C: 'static, + T: Mode, + S: State, +{ + table: &'a Table, + iter: IterPoints<'a, S, C, T>, + query_version: u64, +} + +impl<'a, C, T> Iter<'a, MaybeTombstone, C, T> +where + C: 'static, + T: Mode, + T::Comparator: 'static, +{ + pub(in crate::memtable) fn with_tombstone(version: u64, table: &'a Table) -> Self { + Self { + iter: IterPoints::new(table.skl.iter_all(version)), + query_version: version, + table, + } + } +} + +impl<'a, C, T> Iter<'a, Active, C, T> +where + C: 'static, + T: Mode, + T::Comparator: 'static, +{ + pub(in crate::memtable) fn new(version: u64, table: &'a Table) -> Self { + Self { + iter: IterPoints::new(table.skl.iter(version)), + query_version: version, + table, + } + } +} + +impl<'a, S, C, T> Iterator for Iter<'a, S, C, T> +where + C: 'static, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, LazyRef<'a, RecordPointer>>: Clone, + S::Data<'a, S::Value>: 'a, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]>, + T::Comparator: PointComparator + + TypeRefComparator<'a, RecordPointer> + + Comparator as Pointee<'a>>::Output>> + + 'static, + T::RangeComparator: TypeRefComparator<'a, RecordPointer> + + TypeRefQueryComparator<'a, RecordPointer, RefQuery< as Pointee<'a>>::Output>> + + RangeComparator + + 'static, + MaybeTombstone: Transfer<'a, T::Value<'a>>, + PointEntryRef<'a, S, C, T>: Entry<'a, Key = as Pointee<'a>>::Output>, + RangeEntryRef<'a, Active, Remove, C, T>: + RangeEntry<'a, Remove, Key = as Pointee<'a>>::Output>, + RangeEntryRef<'a, MaybeTombstone, Update, C, T>: RangeEntry< + 'a, + Update, + Key = as Pointee<'a>>::Output, + Value = ::Data< + 'a, + >>::Value, + >, + >, +{ + type Item = EntryRef<'a, S, C, T>; + + #[inline] + fn next(&mut self) -> Option { + loop { + let next = self.iter.next()?; + match self.table.validate(self.query_version, next) { + ControlFlow::Break(entry) => return entry, + ControlFlow::Continue(_) => continue, + } + } + } +} + +impl<'a, S, C, T> DoubleEndedIterator for Iter<'a, S, C, T> +where + C: 'static, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, LazyRef<'a, RecordPointer>>: Clone, + S::Data<'a, S::Value>: 'a, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]>, + T::Comparator: PointComparator + + TypeRefComparator<'a, RecordPointer> + + Comparator as Pointee<'a>>::Output>> + + 'static, + T::RangeComparator: TypeRefComparator<'a, RecordPointer> + + TypeRefQueryComparator<'a, RecordPointer, RefQuery< as Pointee<'a>>::Output>> + + RangeComparator + + 'static, + MaybeTombstone: Transfer<'a, T::Value<'a>>, + PointEntryRef<'a, S, C, T>: Entry<'a, Key = as Pointee<'a>>::Output>, + RangeEntryRef<'a, Active, Remove, C, T>: + RangeEntry<'a, Remove, Key = as Pointee<'a>>::Output>, + RangeEntryRef<'a, MaybeTombstone, Update, C, T>: RangeEntry< + 'a, + Update, + Key = as Pointee<'a>>::Output, + Value = ::Data< + 'a, + >>::Value, + >, + >, +{ + #[inline] + fn next_back(&mut self) -> Option { + loop { + let prev = self.iter.next_back()?; + match self.table.validate(self.query_version, prev) { + ControlFlow::Break(entry) => return entry, + ControlFlow::Continue(_) => continue, + } + } + } +} + +/// An iterator over the entries of a `Memtable`. +pub struct Range<'a, S, Q, R, C, T> +where + R: RangeBounds, + Q: ?Sized, + C: 'static, + T: Mode, + S: State, +{ + table: &'a Table, + iter: RangePoints<'a, S, Q, R, C, T>, + query_version: u64, +} + +impl<'a, Q, R, C, T> Range<'a, Active, Q, R, C, T> +where + C: 'static, + R: RangeBounds + 'a, + Q: ?Sized, + T: Mode, + T::Comparator: 'static, +{ + pub(in crate::memtable) fn new(version: u64, table: &'a Table, r: R) -> Self { + Self { + iter: RangePoints::new(table.skl.range(version, r.into())), + query_version: version, + table, + } + } +} + +impl<'a, Q, R, C, T> Range<'a, MaybeTombstone, Q, R, C, T> +where + C: 'static, + R: RangeBounds + 'a, + Q: ?Sized, + T: Mode, + T::Comparator: 'static, +{ + pub(in crate::memtable) fn with_tombstone(version: u64, table: &'a Table, r: R) -> Self { + Self { + iter: RangePoints::new(table.skl.range_all(version, r.into())), + query_version: version, + table, + } + } +} + +impl<'a, S, Q, R, C, T> Iterator for Range<'a, S, Q, R, C, T> +where + R: RangeBounds, + Q: ?Sized, + C: 'static, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, LazyRef<'a, RecordPointer>>: Clone, + S::Data<'a, S::Value>: 'a, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]>, + as Pointee<'a>>::Output: 'a, + as Pointee<'a>>::Output: 'a, + T::Comparator: PointComparator + + TypeRefComparator<'a, RecordPointer> + + TypeRefQueryComparator<'a, RecordPointer, Query> + + Comparator as Pointee<'a>>::Output>> + + 'static, + T::RangeComparator: TypeRefComparator<'a, RecordPointer> + + TypeRefQueryComparator<'a, RecordPointer, RefQuery< as Pointee<'a>>::Output>> + + RangeComparator + + 'static, + MaybeTombstone: Transfer<'a, T::Value<'a>>, + PointEntryRef<'a, S, C, T>: + Entry<'a, Key = as Pointee<'a>>::Output, Value = S::Data<'a, S::Value>>, + RangeEntryRef<'a, Active, Remove, C, T>: + RangeEntry<'a, Remove, Key = as Pointee<'a>>::Output>, + RangeEntryRef<'a, MaybeTombstone, Update, C, T>: RangeEntry< + 'a, + Update, + Key = as Pointee<'a>>::Output, + Value = ::Data< + 'a, + >>::Value, + >, + >, +{ + type Item = EntryRef<'a, S, C, T>; + + #[inline] + fn next(&mut self) -> Option { + loop { + let next = self.iter.next()?; + match self.table.validate(self.query_version, next) { + ControlFlow::Break(entry) => return entry, + ControlFlow::Continue(_) => continue, + } + } + } +} + +impl<'a, S, Q, R, C, T> DoubleEndedIterator for Range<'a, S, Q, R, C, T> +where + R: RangeBounds, + Q: ?Sized, + C: 'static, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, LazyRef<'a, RecordPointer>>: Clone, + S::Data<'a, S::Value>: 'a, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]>, + as Pointee<'a>>::Output: 'a, + as Pointee<'a>>::Output: 'a, + T::Comparator: PointComparator + + TypeRefComparator<'a, RecordPointer> + + TypeRefQueryComparator<'a, RecordPointer, Query> + + Comparator as Pointee<'a>>::Output>> + + 'static, + T::RangeComparator: TypeRefComparator<'a, RecordPointer> + + TypeRefQueryComparator<'a, RecordPointer, RefQuery< as Pointee<'a>>::Output>> + + RangeComparator + + 'static, + MaybeTombstone: Transfer<'a, T::Value<'a>>, + PointEntryRef<'a, S, C, T>: + Entry<'a, Key = as Pointee<'a>>::Output, Value = S::Data<'a, S::Value>>, + RangeEntryRef<'a, Active, Remove, C, T>: + RangeEntry<'a, Remove, Key = as Pointee<'a>>::Output>, + RangeEntryRef<'a, MaybeTombstone, Update, C, T>: RangeEntry< + 'a, + Update, + Key = as Pointee<'a>>::Output, + Value = ::Data< + 'a, + >>::Value, + >, + >, +{ + #[inline] + fn next_back(&mut self) -> Option { + loop { + let prev = self.iter.next_back()?; + match self.table.validate(self.query_version, prev) { + ControlFlow::Break(entry) => return entry, + ControlFlow::Continue(_) => continue, + } + } + } +} diff --git a/src/memtable/bounded/point.rs b/src/memtable/bounded/point.rs new file mode 100644 index 00000000..f9a01c2d --- /dev/null +++ b/src/memtable/bounded/point.rs @@ -0,0 +1,306 @@ +use core::{cell::OnceCell, ops::RangeBounds}; + +use skl::{ + generic::{ + multiple_version::sync::{Entry, Iter, Range}, + LazyRef, TypeRefComparator, TypeRefQueryComparator, + }, + State, +}; + +use crate::{ + memtable::{sealed, Transfer}, + types::{ + sealed::{PointComparator, Pointee}, + Mode, Query, QueryRange, RawEntryRef, RecordPointer, + }, +}; + +/// Point entry. +pub struct PointEntryRef<'a, S, C, T> +where + S: State, + T: Mode, +{ + pub(in crate::memtable) ent: Entry<'a, RecordPointer, RecordPointer, S, T::Comparator>, + data: OnceCell>, + key: OnceCell>, + pub(in crate::memtable) value: OnceCell>>, +} + +impl core::fmt::Debug for PointEntryRef<'_, S, C, T> +where + S: State, + T: Mode, + T::Comparator: PointComparator, +{ + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + self + .data + .get_or_init(|| self.ent.comparator().fetch_entry(self.ent.key())) + .write_fmt("PointEntryRef", f) + } +} + +impl<'a, S, C, T> Clone for PointEntryRef<'a, S, C, T> +where + S: State, + S::Data<'a, LazyRef<'a, RecordPointer>>: Clone, + S::Data<'a, T::Value<'a>>: Clone, + T: Mode, + T::Key<'a>: Clone, +{ + #[inline] + fn clone(&self) -> Self { + Self { + ent: self.ent.clone(), + data: self.data.clone(), + key: self.key.clone(), + value: self.value.clone(), + } + } +} +impl<'a, S, C, T> PointEntryRef<'a, S, C, T> +where + S: State, + T: Mode, +{ + #[inline] + pub(in crate::memtable) fn new( + ent: Entry<'a, RecordPointer, RecordPointer, S, T::Comparator>, + ) -> Self { + Self { + ent, + data: OnceCell::new(), + key: OnceCell::new(), + value: OnceCell::new(), + } + } +} + +impl<'a, S, C, T> crate::memtable::RawEntry<'a> for PointEntryRef<'a, S, C, T> +where + C: 'static, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, &'a [u8]>: 'a, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]> + 'a, + T::Comparator: PointComparator + TypeRefComparator<'a, RecordPointer>, +{ + type RawValue = S::Data<'a, &'a [u8]>; + + #[inline] + fn raw_key(&self) -> &'a [u8] { + let ent = self.data.get_or_init(|| { + let ptr = S::leak(self.ent.value()); + + match ptr { + Some(ptr) => self.ent.comparator().fetch_entry(&ptr), + None => self.ent.comparator().fetch_entry(self.ent.key()), + } + }); + + ent.key() + } + + #[inline] + fn raw_value(&self) -> Self::RawValue { + let ent = self.data.get_or_init(|| { + let ptr = S::leak(self.ent.value()); + + match ptr { + Some(ptr) => self.ent.comparator().fetch_entry(&ptr), + None => self.ent.comparator().fetch_entry(self.ent.key()), + } + }); + + S::raw(ent.value()) + } +} + +impl<'a, S, C, T> crate::memtable::Entry<'a> for PointEntryRef<'a, S, C, T> +where + C: 'static, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, S::Value>: 'a, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]> + 'a, + T::Comparator: PointComparator + TypeRefComparator<'a, RecordPointer>, +{ + type Key = as Pointee<'a>>::Output; + type Value = S::Data<'a, S::Value>; + + #[inline] + fn key(&self) -> Self::Key { + self + .key + .get_or_init(|| { + let ptr = S::leak(self.ent.value()); + + let ent = match ptr { + Some(ptr) => self + .data + .get_or_init(|| self.ent.comparator().fetch_entry(&ptr)), + None => self + .data + .get_or_init(|| self.ent.comparator().fetch_entry(self.ent.key())), + }; + + as Pointee<'a>>::from_input(ent.key()) + }) + .output() + } + + #[inline] + fn value(&self) -> Self::Value { + let val = self.value.get_or_init(|| { + let ptr = S::leak(self.ent.value()); + + let data = ptr.map(|ptr| { + let ent = self + .data + .get_or_init(|| self.ent.comparator().fetch_entry(&ptr)); + + >>::from_input(ent.value()) + }); + S::into_state(data) + }); + >>::transfer(val) + } + + #[inline] + fn next(&self) -> Option { + self.ent.next().map(Self::new) + } + + #[inline] + fn prev(&self) -> Option { + self.ent.prev().map(Self::new) + } + + #[inline] + fn version(&self) -> u64 { + self.ent.version() + } +} + +impl PointEntryRef<'_, S, C, T> +where + C: 'static, + S: State, + T: Mode, +{ + /// Returns the version of the entry. + #[inline] + pub fn version(&self) -> u64 { + self.ent.version() + } +} + +/// The iterator for point entries. +pub struct IterPoints<'a, S, C, T> +where + S: State, + T: Mode, +{ + iter: Iter<'a, RecordPointer, RecordPointer, S, T::Comparator>, +} + +impl<'a, S, C, T> IterPoints<'a, S, C, T> +where + S: State, + T: Mode, +{ + #[inline] + pub(in crate::memtable) const fn new( + iter: Iter<'a, RecordPointer, RecordPointer, S, T::Comparator>, + ) -> Self { + Self { iter } + } +} + +impl<'a, S, C, T> Iterator for IterPoints<'a, S, C, T> +where + C: 'static, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, LazyRef<'a, RecordPointer>>: Clone, + T: Mode, + T::Comparator: TypeRefComparator<'a, RecordPointer> + 'a, +{ + type Item = PointEntryRef<'a, S, C, T>; + #[inline] + fn next(&mut self) -> Option { + self.iter.next().map(PointEntryRef::new) + } +} + +impl<'a, S, C, T> DoubleEndedIterator for IterPoints<'a, S, C, T> +where + C: 'static, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, LazyRef<'a, RecordPointer>>: Clone, + T: Mode, + T::Comparator: TypeRefComparator<'a, RecordPointer> + 'a, +{ + #[inline] + fn next_back(&mut self) -> Option { + self.iter.next_back().map(PointEntryRef::new) + } +} + +/// The iterator over a subset of point entries. +pub struct RangePoints<'a, S, Q, R, C, T> +where + S: State, + Q: ?Sized, + T: Mode, +{ + range: Range<'a, RecordPointer, RecordPointer, S, Query, QueryRange, T::Comparator>, +} + +impl<'a, S, Q, R, C, T> RangePoints<'a, S, Q, R, C, T> +where + S: State, + Q: ?Sized, + T: Mode, +{ + #[inline] + pub(in crate::memtable) const fn new( + range: Range<'a, RecordPointer, RecordPointer, S, Query, QueryRange, T::Comparator>, + ) -> Self { + Self { range } + } +} + +impl<'a, S, Q, R, C, T> Iterator for RangePoints<'a, S, Q, R, C, T> +where + C: 'static, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, LazyRef<'a, RecordPointer>>: Clone, + R: RangeBounds, + Q: ?Sized, + T: Mode, + T::Comparator: TypeRefQueryComparator<'a, RecordPointer, Query> + 'a, +{ + type Item = PointEntryRef<'a, S, C, T>; + #[inline] + fn next(&mut self) -> Option { + self.range.next().map(PointEntryRef::new) + } +} + +impl<'a, S, Q, R, C, T> DoubleEndedIterator for RangePoints<'a, S, Q, R, C, T> +where + C: 'static, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, LazyRef<'a, RecordPointer>>: Clone, + R: RangeBounds, + Q: ?Sized, + T: Mode, + T::Comparator: TypeRefQueryComparator<'a, RecordPointer, Query> + 'a, +{ + #[inline] + fn next_back(&mut self) -> Option { + self.range.next_back().map(PointEntryRef::new) + } +} diff --git a/src/memtable/bounded/range_entry.rs b/src/memtable/bounded/range_entry.rs new file mode 100644 index 00000000..42aa1673 --- /dev/null +++ b/src/memtable/bounded/range_entry.rs @@ -0,0 +1,383 @@ +use core::{ + cell::OnceCell, + marker::PhantomData, + ops::{Bound, RangeBounds}, +}; + +use skl::{ + generic::{ + multiple_version::sync::{Entry, Iter, Range}, + LazyRef, TypeRefComparator, TypeRefQueryComparator, + }, + State, +}; + +use crate::{ + memtable::{sealed, Transfer}, + types::{ + sealed::{Pointee, RangeComparator}, + BulkOperation, Mode, Query, QueryRange, RecordPointer, WithValue, + }, +}; + +/// Range update entry. +pub struct RangeEntryRef<'a, S, O, C, T> +where + O: BulkOperation, + S: State, + T: Mode, +{ + pub(crate) ent: Entry<'a, RecordPointer, RecordPointer, S, T::RangeComparator>, + data: OnceCell>, + start_bound: OnceCell>>, + end_bound: OnceCell>>, + value: OnceCell>>, + _op: PhantomData, +} + +impl<'a, S, O, C, T> core::fmt::Debug for RangeEntryRef<'a, S, O, C, T> +where + C: 'static, + O: BulkOperation, + S: State, + T: Mode, + T::RangeComparator: TypeRefComparator<'a, RecordPointer> + RangeComparator, +{ + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + O::fmt( + self + .data + .get_or_init(|| O::fetch(self.ent.comparator(), self.ent.key())), + "RangeEntryRef", + f, + ) + } +} + +impl<'a, S, O, C, T> Clone for RangeEntryRef<'a, S, O, C, T> +where + O: BulkOperation, + O::Output<'a>: Clone, + S: State, + S::Data<'a, T::Value<'a>>: Clone, + S::Data<'a, LazyRef<'a, RecordPointer>>: Clone, + T: Mode, + T::Key<'a>: Clone, +{ + #[inline] + fn clone(&self) -> Self { + Self { + ent: self.ent.clone(), + data: self.data.clone(), + start_bound: self.start_bound.clone(), + end_bound: self.end_bound.clone(), + value: self.value.clone(), + _op: PhantomData, + } + } +} + +impl<'a, S, O, C, T> RangeEntryRef<'a, S, O, C, T> +where + O: BulkOperation, + S: State, + T: Mode, +{ + pub(in crate::memtable) fn new( + ent: Entry<'a, RecordPointer, RecordPointer, S, T::RangeComparator>, + ) -> Self { + Self { + ent, + data: OnceCell::new(), + start_bound: OnceCell::new(), + end_bound: OnceCell::new(), + value: OnceCell::new(), + _op: PhantomData, + } + } +} + +impl<'a, S, O, C, T> crate::memtable::RawRangeEntry<'a, O> for RangeEntryRef<'a, S, O, C, T> +where + C: 'static, + O: BulkOperation, + S::Data<'a, &'a [u8]>: 'a, + S: Transfer<'a, T::Value<'a>>, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]> + 'a, + T::RangeComparator: TypeRefComparator<'a, RecordPointer> + RangeComparator, +{ + type RawValue + = S::Data<'a, &'a [u8]> + where + O: WithValue; + + #[inline] + fn raw_start_bound(&self) -> Bound<&'a [u8]> { + let ent = self + .data + .get_or_init(|| O::fetch(self.ent.comparator(), self.ent.key())); + + O::start_bound(ent) + } + + #[inline] + fn raw_end_bound(&self) -> Bound<&'a [u8]> { + let ent = self + .data + .get_or_init(|| O::fetch(self.ent.comparator(), self.ent.key())); + O::end_bound(ent) + } + + #[inline] + fn raw_value(&self) -> Self::RawValue + where + O: WithValue, + { + let ent = self.data.get_or_init(|| { + let ptr = S::leak(self.ent.value()); + + match ptr { + Some(ptr) => O::fetch(self.ent.comparator(), &ptr), + None => O::fetch(self.ent.comparator(), self.ent.key()), + } + }); + + S::raw(O::value(ent)) + } +} + +impl<'a, S, O, C, T> crate::memtable::RangeEntry<'a, O> for RangeEntryRef<'a, S, O, C, T> +where + C: 'static, + O: BulkOperation, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, S::Value>: 'a, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]> + 'a, + T::RangeComparator: TypeRefComparator<'a, RecordPointer> + RangeComparator, +{ + type Key = as Pointee<'a>>::Output; + + type Value + = S::Data<'a, S::Value> + where + O: WithValue; + + #[inline] + fn start_bound(&self) -> Bound { + let start_bound = self.start_bound.get_or_init(|| { + let ent = self + .data + .get_or_init(|| O::fetch(self.ent.comparator(), self.ent.key())); + O::start_bound(ent).map( as Pointee>::from_input) + }); + start_bound.as_ref().map(|k| k.output()) + } + + #[inline] + fn end_bound(&self) -> Bound { + let end_bound = self.end_bound.get_or_init(|| { + let ent = self + .data + .get_or_init(|| O::fetch(self.ent.comparator(), self.ent.key())); + O::end_bound(ent).map( as Pointee>::from_input) + }); + end_bound.as_ref().map(|k| k.output()) + } + + #[inline] + fn value(&self) -> Self::Value + where + O: WithValue, + { + let val = self.value.get_or_init(|| { + let ptr = S::leak(self.ent.value()); + + let data = ptr.map(|ptr| { + let ent = self + .data + .get_or_init(|| O::fetch(self.ent.comparator(), &ptr)); + + >>::from_input(O::value(ent)) + }); + S::into_state(data) + }); + >>::transfer(val) + } + + #[inline] + fn next(&mut self) -> Option { + self.ent.next().map(Self::new) + } + + #[inline] + fn prev(&mut self) -> Option { + self.ent.prev().map(Self::new) + } + + #[inline] + fn version(&self) -> u64 { + self.ent.version() + } +} + +impl<'a, S, O, C, T> RangeEntryRef<'a, S, O, C, T> +where + C: 'static, + O: WithValue, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, S::Value>: 'a, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]> + 'a, + T::RangeComparator: TypeRefComparator<'a, RecordPointer> + RangeComparator, +{ + #[inline] + pub(in crate::memtable) fn into_value(self) -> S::Data<'a, T::Value<'a>> { + self.value.get_or_init(|| { + let ptr = S::leak(self.ent.value()); + + let data = ptr.map(|ptr| { + let ent = self + .data + .get_or_init(|| O::fetch(self.ent.comparator(), &ptr)); + + >>::from_input(O::value(ent)) + }); + S::into_state(data) + }); + self.value.into_inner().unwrap() + } +} + +/// The iterator for point entries. +pub struct IterBulkOperations<'a, S, O, C, T> +where + S: State, + T: Mode, +{ + iter: Iter<'a, RecordPointer, RecordPointer, S, T::RangeComparator>, + _op: PhantomData, +} + +impl<'a, S, O, C, T> IterBulkOperations<'a, S, O, C, T> +where + S: State, + T: Mode, +{ + #[inline] + pub(in crate::memtable) const fn new( + iter: Iter<'a, RecordPointer, RecordPointer, S, T::RangeComparator>, + ) -> Self { + Self { + iter, + _op: PhantomData, + } + } +} + +impl<'a, S, O, C, T> Iterator for IterBulkOperations<'a, S, O, C, T> +where + C: 'static, + O: BulkOperation, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, LazyRef<'a, RecordPointer>>: Clone, + T: Mode, + T::RangeComparator: TypeRefComparator<'a, RecordPointer> + 'a, +{ + type Item = RangeEntryRef<'a, S, O, C, T>; + + #[inline] + fn next(&mut self) -> Option { + self.iter.next().map(RangeEntryRef::new) + } +} + +impl<'a, S, O, C, T> DoubleEndedIterator for IterBulkOperations<'a, S, O, C, T> +where + C: 'static, + O: BulkOperation, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, LazyRef<'a, RecordPointer>>: Clone, + T: Mode, + T::RangeComparator: TypeRefComparator<'a, RecordPointer> + 'a, +{ + #[inline] + fn next_back(&mut self) -> Option { + self.iter.next_back().map(RangeEntryRef::new) + } +} + +/// The iterator over a subset of point entries. +pub struct RangeBulkOperations<'a, S, O, Q, R, C, T> +where + O: BulkOperation, + S: State, + Q: ?Sized, + T: Mode, +{ + range: + Range<'a, RecordPointer, RecordPointer, S, Query, QueryRange, T::RangeComparator>, + _op: PhantomData, +} + +impl<'a, S, O, Q, R, C, T> RangeBulkOperations<'a, S, O, Q, R, C, T> +where + O: BulkOperation, + S: State, + Q: ?Sized, + T: Mode, +{ + #[inline] + pub(in crate::memtable) const fn new( + range: Range< + 'a, + RecordPointer, + RecordPointer, + S, + Query, + QueryRange, + T::RangeComparator, + >, + ) -> Self { + Self { + range, + _op: PhantomData, + } + } +} + +impl<'a, S, O, Q, R, C, T> Iterator for RangeBulkOperations<'a, S, O, Q, R, C, T> +where + C: 'static, + O: BulkOperation, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, LazyRef<'a, RecordPointer>>: Clone, + R: RangeBounds, + Q: ?Sized, + T: Mode, + T::RangeComparator: TypeRefQueryComparator<'a, RecordPointer, Query> + 'a, +{ + type Item = RangeEntryRef<'a, S, O, C, T>; + #[inline] + fn next(&mut self) -> Option { + self.range.next().map(RangeEntryRef::new) + } +} + +impl<'a, S, O, Q, R, C, T> DoubleEndedIterator for RangeBulkOperations<'a, S, O, Q, R, C, T> +where + C: 'static, + O: BulkOperation, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, LazyRef<'a, RecordPointer>>: Clone, + R: RangeBounds, + Q: ?Sized, + T: Mode, + T::RangeComparator: TypeRefQueryComparator<'a, RecordPointer, Query> + 'a, +{ + #[inline] + fn next_back(&mut self) -> Option { + self.range.next_back().map(RangeEntryRef::new) + } +} diff --git a/src/memtable/dynamic.rs b/src/memtable/dynamic.rs new file mode 100644 index 00000000..b876af7e --- /dev/null +++ b/src/memtable/dynamic.rs @@ -0,0 +1,278 @@ +use core::{ + borrow::Borrow, + ops::{Bound, RangeBounds}, +}; + +use dbutils::state::{Active, MaybeTombstone, State}; + +use crate::{ + memtable::Memtable, + types::{BulkOperation, Remove, Update}, +}; + +/// Bounded memtable implementation based on ARNEA based [`SkipMap`](skl::generic::multiple_version::sync::SkipMap)s. +#[cfg(feature = "skl")] +#[cfg_attr(docsrs, doc(cfg(feature = "bounded")))] +pub mod bounded; + +/// Unbounded memtable implementation based on ARNEA based [`SkipMap`](crossbeam_skiplist_mvcc::nested::SkipMap)s. +#[cfg(feature = "crossbeam-skiplist-mvcc")] +#[cfg_attr(docsrs, doc(cfg(feature = "unbounded")))] +pub mod unbounded; + +mod comparator; +mod range_comparator; + +pub(crate) use comparator::MemtableComparator; +pub(crate) use range_comparator::MemtableRangeComparator; + +/// A memory table which is used to store pointers to the underlying entries. +pub trait DynamicMemtable: Memtable { + /// The item returned by the iterator or query methods. + type Entry<'a, S> + where + Self: 'a, + S: State + 'a; + + /// The item returned by the point iterators + type PointEntry<'a, S> + where + Self: 'a, + S: State + 'a; + + /// The item returned by the bulk operations iterators + type RangeEntry<'a, S, O> + where + Self: 'a, + S: State + 'a, + O: BulkOperation; + + /// The iterator type. + type Iterator<'a, S> + where + Self: 'a, + S: State + 'a; + + /// The range iterator type. + type Range<'a, S, Q, R> + where + Self: 'a, + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + S: State + 'a; + + /// The iterator over point entries. + type PointsIterator<'a, S> + where + Self: 'a, + S: State + 'a; + + /// The range iterator over point entries. + type RangePoints<'a, S, Q, R> + where + Self: 'a, + S: State + 'a, + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>; + + /// The iterator over range deletions entries. + type BulkOperationsIterator<'a, S, O> + where + Self: 'a, + S: State + 'a, + O: BulkOperation; + + /// The range iterator over range deletions entries. + type BulkOperationsRange<'a, S, O, Q, R> + where + Self: 'a, + S: State + 'a, + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + O: BulkOperation; + + /// Returns the maximum version of the memtable. + fn maximum_version(&self) -> u64; + + /// Returns the minimum version of the memtable. + fn minimum_version(&self) -> u64; + + /// Returns `true` if the memtable may contain an entry whose version is less than or equal to the specified version. + fn may_contain_version(&self, version: u64) -> bool; + + /// Returns the upper bound of the memtable. + fn upper_bound<'a, Q>( + &'a self, + version: u64, + bound: Bound<&'a Q>, + ) -> Option> + where + Q: ?Sized + Borrow<[u8]>; + + /// Returns the lower bound of the memtable. + fn lower_bound<'a, Q>( + &'a self, + version: u64, + bound: Bound<&'a Q>, + ) -> Option> + where + Q: ?Sized + Borrow<[u8]>; + + /// Returns the upper bound of the memtable. + fn upper_bound_with_tombstone<'a, Q>( + &'a self, + version: u64, + bound: Bound<&'a Q>, + ) -> Option> + where + Q: ?Sized + Borrow<[u8]>; + + /// Returns the lower bound of the memtable. + fn lower_bound_with_tombstone<'a, Q>( + &'a self, + version: u64, + bound: Bound<&'a Q>, + ) -> Option> + where + Q: ?Sized + Borrow<[u8]>; + + /// Returns the first pointer in the memtable. + fn first(&self, version: u64) -> Option>; + + /// Returns the last pointer in the memtable. + fn last(&self, version: u64) -> Option>; + + /// Returns the first pointer in the memtable. + fn first_with_tombstone(&self, version: u64) -> Option>; + + /// Returns the last pointer in the memtable. + fn last_with_tombstone(&self, version: u64) -> Option>; + + /// Returns the pointer associated with the key. + fn get(&self, version: u64, key: &Q) -> Option> + where + Q: ?Sized + Borrow<[u8]>; + + /// Returns `true` if the memtable contains the specified pointer. + fn contains(&self, version: u64, key: &Q) -> bool + where + Q: ?Sized + Borrow<[u8]>, + { + self.get(version, key).is_some() + } + + /// Returns the pointer associated with the key. + fn get_with_tombstone(&self, version: u64, key: &Q) -> Option> + where + Q: ?Sized + Borrow<[u8]>; + + /// Returns `true` if the memtable contains the specified pointer. + fn contains_with_tombsone(&self, version: u64, key: &Q) -> bool + where + Q: ?Sized + Borrow<[u8]>, + { + self.get_with_tombstone(version, key).is_some() + } + + /// Returns an iterator over the memtable. + fn iter(&self, version: u64) -> Self::Iterator<'_, Active>; + + /// Returns an iterator over a subset of the memtable. + fn range<'a, Q, R>(&'a self, version: u64, range: R) -> Self::Range<'a, Active, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>; + + /// Returns an iterator over the memtable. + fn iter_all(&self, version: u64) -> Self::Iterator<'_, MaybeTombstone>; + + /// Returns an iterator over a subset of the memtable. + fn range_all<'a, Q, R>(&'a self, version: u64, range: R) -> Self::Range<'a, MaybeTombstone, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>; + + /// Returns an iterator over point entries in the memtable. + fn iter_points(&self, version: u64) -> Self::PointsIterator<'_, Active>; + + /// Returns an iterator over all(including all versions and tombstones) the point entries in the memtable. + fn iter_all_points(&self, version: u64) -> Self::PointsIterator<'_, MaybeTombstone>; + + /// Returns an iterator over a subset of point entries in the memtable. + fn range_points<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::RangePoints<'a, Active, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>; + + /// Returns an iterator over all(including all versions and tombstones) the point entries in a subset of the memtable. + fn range_all_points<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::RangePoints<'a, MaybeTombstone, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>; + + /// Returns an iterator over range deletions entries in the memtable. + fn iter_bulk_removes(&self, version: u64) -> Self::BulkOperationsIterator<'_, Active, Remove>; + + /// Returns an iterator over all(including all versions and tombstones) the range deletions entries in the memtable. + fn iter_all_bulk_removes( + &self, + version: u64, + ) -> Self::BulkOperationsIterator<'_, MaybeTombstone, Remove>; + + /// Returns an iterator over a subset of range deletions entries in the memtable. + fn range_bulk_removes<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::BulkOperationsRange<'a, Active, Remove, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>; + + /// Returns an iterator over all(including all versions and tombstones) the range deletions entries in a subset of the memtable. + fn range_all_bulk_removes<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::BulkOperationsRange<'a, MaybeTombstone, Remove, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>; + + /// Returns an iterator over range updates entries in the memtable. + fn iter_bulk_updates(&self, version: u64) -> Self::BulkOperationsIterator<'_, Active, Update>; + + /// Returns an iterator over all(including all versions and tombstones) the range updates entries in the memtable. + fn iter_all_bulk_updates( + &self, + version: u64, + ) -> Self::BulkOperationsIterator<'_, MaybeTombstone, Update>; + + /// Returns an iterator over a subset of range updates entries in the memtable. + fn range_bulk_updates<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::BulkOperationsRange<'a, Active, Update, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>; + + /// Returns an iterator over all(including all versions and tombstones) the range updates entries in a subset of the memtable. + fn range_all_bulk_updates<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::BulkOperationsRange<'a, MaybeTombstone, Update, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>; +} diff --git a/src/memtable/dynamic/bounded.rs b/src/memtable/dynamic/bounded.rs new file mode 100644 index 00000000..97a26dc4 --- /dev/null +++ b/src/memtable/dynamic/bounded.rs @@ -0,0 +1,377 @@ +use core::{ + borrow::Borrow, + ops::{Bound, ControlFlow, RangeBounds}, +}; + +use ref_cast::RefCast as _; +use skl::{dynamic::BytesComparator, generic::multiple_version::Map as _, Active, MaybeTombstone}; + +use crate::{ + memtable::bounded, + state::State, + types::{Dynamic, Query, Remove, Update}, +}; + +use super::DynamicMemtable; + +/// Dynamic multiple version memtable implementation based on ARNEA based [`SkipMap`](skl::generic::multiple_version::sync::SkipMap)s. +pub type Table = bounded::Table; + +/// Entry of the [`Table`]. +pub type EntryRef<'a, S, C> = bounded::EntryRef<'a, S, C, Dynamic>; + +/// Point entry of the [`Table`]. +pub type PointEntryRef<'a, S, C> = bounded::PointEntryRef<'a, S, C, Dynamic>; + +/// Range entry of the [`Table`]. +pub type RangeEntryRef<'a, S, O, C> = bounded::RangeEntryRef<'a, S, O, C, Dynamic>; + +/// Iterator of the [`Table`]. +pub type Iter<'a, S, C> = bounded::Iter<'a, S, C, Dynamic>; + +/// Range iterator of the [`Table`]. +pub type Range<'a, S, Q, R, C> = bounded::Range<'a, S, Q, R, C, Dynamic>; + +/// Point iterator of the [`Table`]. +pub type IterPoints<'a, S, C> = bounded::IterPoints<'a, S, C, Dynamic>; + +/// Range point iterator of the [`Table`]. +pub type RangePoints<'a, S, Q, R, C> = bounded::RangePoints<'a, S, Q, R, C, Dynamic>; + +/// Bulk operations iterator of the [`Table`]. +pub type IterBulkOperations<'a, S, O, C> = bounded::IterBulkOperations<'a, S, O, C, Dynamic>; + +/// Bulk operations range iterator of the [`Table`]. +pub type RangeBulkOperations<'a, S, O, Q, R, C> = + bounded::RangeBulkOperations<'a, S, O, Q, R, C, Dynamic>; + +impl DynamicMemtable for Table +where + C: BytesComparator + 'static, +{ + type Entry<'a, S> + = EntryRef<'a, S, C> + where + Self: 'a, + S: State + 'a; + + type PointEntry<'a, S> + = PointEntryRef<'a, S, C> + where + Self: 'a, + S: State + 'a; + + type RangeEntry<'a, S, O> + = RangeEntryRef<'a, S, O, C> + where + Self: 'a, + S: State + 'a, + O: crate::types::BulkOperation; + + type Iterator<'a, S> + = Iter<'a, S, C> + where + Self: 'a, + S: State + 'a; + + type Range<'a, S, Q, R> + = Range<'a, S, Q, R, C> + where + Self: 'a, + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + S: State + 'a; + + type PointsIterator<'a, S> + = IterPoints<'a, S, C> + where + Self: 'a, + S: State + 'a; + + type RangePoints<'a, S, Q, R> + = RangePoints<'a, S, Q, R, C> + where + Self: 'a, + S: State + 'a, + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>; + + type BulkOperationsIterator<'a, S, O> + = IterBulkOperations<'a, S, O, C> + where + Self: 'a, + S: State + 'a, + O: crate::types::BulkOperation; + + type BulkOperationsRange<'a, S, O, Q, R> + = RangeBulkOperations<'a, S, O, Q, R, C> + where + Self: 'a, + S: State + 'a, + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + O: crate::types::BulkOperation; + + #[inline] + fn maximum_version(&self) -> u64 { + self + .skl + .maximum_version() + .max(self.range_deletions_skl.maximum_version()) + .max(self.range_updates_skl.maximum_version()) + } + + #[inline] + fn minimum_version(&self) -> u64 { + self + .skl + .minimum_version() + .min(self.range_deletions_skl.minimum_version()) + .min(self.range_updates_skl.minimum_version()) + } + + #[inline] + fn may_contain_version(&self, version: u64) -> bool { + self.skl.may_contain_version(version) + || self.range_deletions_skl.may_contain_version(version) + || self.range_updates_skl.may_contain_version(version) + } + + #[inline] + fn upper_bound<'a, Q>( + &'a self, + version: u64, + bound: core::ops::Bound<&'a Q>, + ) -> Option> + where + Q: ?Sized + Borrow<[u8]>, + { + self + .range::(version, (Bound::Unbounded, bound)) + .next_back() + } + + #[inline] + fn lower_bound<'a, Q>( + &'a self, + version: u64, + bound: core::ops::Bound<&'a Q>, + ) -> Option> + where + Q: ?Sized + Borrow<[u8]>, + { + self + .range::(version, (bound, Bound::Unbounded)) + .next() + } + + #[inline] + fn upper_bound_with_tombstone<'a, Q>( + &'a self, + version: u64, + bound: Bound<&'a Q>, + ) -> Option> + where + Q: ?Sized + Borrow<[u8]>, + { + self + .range_all::(version, (Bound::Unbounded, bound)) + .next_back() + } + + #[inline] + fn lower_bound_with_tombstone<'a, Q>( + &'a self, + version: u64, + bound: Bound<&'a Q>, + ) -> Option> + where + Q: ?Sized + Borrow<[u8]>, + { + self + .range_all::(version, (bound, Bound::Unbounded)) + .next() + } + + #[inline] + fn first(&self, version: u64) -> Option> { + self.iter(version).next() + } + + #[inline] + fn last(&self, version: u64) -> Option> { + self.iter(version).next_back() + } + + #[inline] + fn first_with_tombstone(&self, version: u64) -> Option> { + self.iter_all(version).next() + } + + #[inline] + fn last_with_tombstone(&self, version: u64) -> Option> { + self.iter_all(version).next_back() + } + + #[inline] + fn get(&self, version: u64, key: &Q) -> Option> + where + Q: ?Sized + Borrow<[u8]>, + { + let ent = self.skl.get(version, Query::ref_cast(key))?; + match self.validate(version, PointEntryRef::new(ent)) { + ControlFlow::Break(entry) => entry, + ControlFlow::Continue(_) => None, + } + } + + #[inline] + fn get_with_tombstone(&self, version: u64, key: &Q) -> Option> + where + Q: ?Sized + Borrow<[u8]>, + { + let ent = self.skl.get_with_tombstone(version, Query::ref_cast(key))?; + match self.validate(version, PointEntryRef::new(ent)) { + ControlFlow::Break(entry) => entry, + ControlFlow::Continue(_) => None, + } + } + + #[inline] + fn iter(&self, version: u64) -> Self::Iterator<'_, Active> { + Iter::new(version, self) + } + + #[inline] + fn range<'a, Q, R>(&'a self, version: u64, range: R) -> Self::Range<'a, Active, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + { + Range::new(version, self, range) + } + + #[inline] + fn iter_all(&self, version: u64) -> Self::Iterator<'_, MaybeTombstone> { + Iter::with_tombstone(version, self) + } + + #[inline] + fn range_all<'a, Q, R>(&'a self, version: u64, range: R) -> Self::Range<'a, MaybeTombstone, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + { + Range::with_tombstone(version, self, range) + } + + #[inline] + fn iter_points(&self, version: u64) -> Self::PointsIterator<'_, Active> { + IterPoints::new(self.skl.iter(version)) + } + + #[inline] + fn iter_all_points(&self, version: u64) -> Self::PointsIterator<'_, MaybeTombstone> { + IterPoints::new(self.skl.iter_all(version)) + } + + #[inline] + fn range_points<'a, Q, R>(&'a self, version: u64, range: R) -> Self::RangePoints<'a, Active, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + { + RangePoints::new(self.skl.range(version, range.into())) + } + + #[inline] + fn range_all_points<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::RangePoints<'a, MaybeTombstone, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + { + RangePoints::new(self.skl.range_all(version, range.into())) + } + + #[inline] + fn iter_bulk_removes(&self, version: u64) -> Self::BulkOperationsIterator<'_, Active, Remove> { + IterBulkOperations::new(self.range_deletions_skl.iter(version)) + } + + #[inline] + fn iter_all_bulk_removes( + &self, + version: u64, + ) -> Self::BulkOperationsIterator<'_, MaybeTombstone, Remove> { + IterBulkOperations::new(self.range_deletions_skl.iter_all(version)) + } + + #[inline] + fn range_bulk_removes<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::BulkOperationsRange<'a, Active, Remove, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + { + RangeBulkOperations::new(self.range_deletions_skl.range(version, range.into())) + } + + #[inline] + fn range_all_bulk_removes<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::BulkOperationsRange<'a, MaybeTombstone, Remove, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + { + RangeBulkOperations::new(self.range_deletions_skl.range_all(version, range.into())) + } + + #[inline] + fn iter_bulk_updates(&self, version: u64) -> Self::BulkOperationsIterator<'_, Active, Update> { + IterBulkOperations::new(self.range_updates_skl.iter(version)) + } + + #[inline] + fn iter_all_bulk_updates( + &self, + version: u64, + ) -> Self::BulkOperationsIterator<'_, MaybeTombstone, Update> { + IterBulkOperations::new(self.range_updates_skl.iter_all(version)) + } + + #[inline] + fn range_bulk_updates<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::BulkOperationsRange<'a, Active, Update, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + { + RangeBulkOperations::new(self.range_updates_skl.range(version, range.into())) + } + + #[inline] + fn range_all_bulk_updates<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::BulkOperationsRange<'a, MaybeTombstone, Update, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + { + RangeBulkOperations::new(self.range_updates_skl.range_all(version, range.into())) + } +} diff --git a/src/memtable/dynamic/comparator.rs b/src/memtable/dynamic/comparator.rs new file mode 100644 index 00000000..906f9ae7 --- /dev/null +++ b/src/memtable/dynamic/comparator.rs @@ -0,0 +1,228 @@ +use core::{borrow::Borrow, cmp}; + +use dbutils::equivalentor::{ + BytesComparator, BytesEquivalentor, Comparator, Equivalentor, QueryComparator, QueryEquivalentor, + TypeRefComparator, TypeRefEquivalentor, TypeRefQueryComparator, TypeRefQueryEquivalentor, +}; +use triomphe::Arc; + +use crate::types::{fetch_entry, fetch_raw_key, Query, RawEntryRef, RecordPointer}; + +pub struct MemtableComparator { + /// The start pointer of the parent ARENA. + ptr: *const u8, + cmp: Arc, +} + +unsafe impl Send for MemtableComparator {} +unsafe impl Sync for MemtableComparator {} + +impl crate::types::sealed::ComparatorConstructor for MemtableComparator { + #[inline] + fn new(ptr: *const u8, cmp: Arc) -> Self { + Self { ptr, cmp } + } +} + +impl crate::types::sealed::PointComparator for MemtableComparator { + #[inline] + fn fetch_entry<'a>(&self, kp: &RecordPointer) -> RawEntryRef<'a> { + unsafe { fetch_entry(self.ptr, kp) } + } +} + +impl MemtableComparator { + #[inline] + fn equivalent_key(&self, a: &RecordPointer, b: &[u8]) -> bool + where + C: BytesEquivalentor, + { + unsafe { + let (_, ak) = fetch_raw_key(self.ptr, a); + self.cmp.equivalent(ak, b) + } + } + + #[inline] + fn equivalent_in(&self, a: &RecordPointer, b: &RecordPointer) -> bool + where + C: BytesEquivalentor, + { + unsafe { + let (_, ak) = fetch_raw_key(self.ptr, a); + let (_, bk) = fetch_raw_key(self.ptr, b); + self.cmp.equivalent(ak, bk) + } + } + + #[inline] + fn compare_key(&self, a: &RecordPointer, b: &[u8]) -> cmp::Ordering + where + C: BytesComparator, + { + unsafe { + let (_, ak) = fetch_raw_key(self.ptr, a); + self.cmp.compare(ak, b) + } + } + + #[inline] + fn compare_in(&self, a: &RecordPointer, b: &RecordPointer) -> cmp::Ordering + where + C: BytesComparator, + { + unsafe { + let (_, ak) = fetch_raw_key(self.ptr, a); + let (_, bk) = fetch_raw_key(self.ptr, b); + self.cmp.compare(ak, bk) + } + } +} + +impl Clone for MemtableComparator { + #[inline] + fn clone(&self) -> Self { + Self { + ptr: self.ptr, + cmp: self.cmp.clone(), + } + } +} + +impl core::fmt::Debug for MemtableComparator +where + C: core::fmt::Debug + ?Sized, +{ + #[inline] + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.debug_struct("MemtableComparator") + .field("ptr", &self.ptr) + .field("cmp", &self.cmp) + .finish() + } +} + +impl Equivalentor> for MemtableComparator +where + C: BytesEquivalentor + ?Sized, +{ + #[inline] + fn equivalent(&self, a: &Query<&[u8]>, b: &Query<&[u8]>) -> bool { + self.cmp.equivalent(a.0, b.0) + } +} + +impl Comparator> for MemtableComparator +where + C: BytesComparator + ?Sized, +{ + #[inline] + fn compare(&self, a: &Query<&[u8]>, b: &Query<&[u8]>) -> cmp::Ordering { + self.cmp.compare(a.0, b.0) + } +} + +impl Equivalentor for MemtableComparator +where + C: BytesEquivalentor + ?Sized, +{ + #[inline] + fn equivalent(&self, a: &RecordPointer, b: &RecordPointer) -> bool { + self.equivalent_in(a, b) + } +} + +impl TypeRefEquivalentor<'_, RecordPointer> for MemtableComparator +where + C: BytesEquivalentor + ?Sized, +{ + #[inline] + fn equivalent_ref(&self, a: &RecordPointer, b: &RecordPointer) -> bool { + self.equivalent_in(a, b) + } + + #[inline] + fn equivalent_refs(&self, a: &RecordPointer, b: &RecordPointer) -> bool { + self.equivalent_in(a, b) + } +} + +impl TypeRefQueryEquivalentor<'_, RecordPointer, Q> for MemtableComparator +where + C: BytesEquivalentor + ?Sized, + Q: ?Sized + Borrow<[u8]>, +{ + #[inline] + fn query_equivalent_ref(&self, a: &RecordPointer, b: &Q) -> bool { + self.equivalent_key(a, b.borrow()) + } +} + +impl Comparator for MemtableComparator +where + C: BytesComparator + ?Sized, +{ + #[inline] + fn compare(&self, a: &RecordPointer, b: &RecordPointer) -> cmp::Ordering { + self.compare_in(a, b) + } +} + +impl TypeRefComparator<'_, RecordPointer> for MemtableComparator +where + C: BytesComparator + ?Sized, +{ + #[inline] + fn compare_ref(&self, a: &RecordPointer, b: &RecordPointer) -> cmp::Ordering { + self.compare_in(a, b) + } + + #[inline] + fn compare_refs(&self, a: &RecordPointer, b: &RecordPointer) -> cmp::Ordering { + self.compare_in(a, b) + } +} + +impl TypeRefQueryEquivalentor<'_, RecordPointer, Query> for MemtableComparator +where + C: BytesEquivalentor + ?Sized, + Q: ?Sized + Borrow<[u8]>, +{ + #[inline] + fn query_equivalent_ref(&self, a: &RecordPointer, b: &Query) -> bool { + self.equivalent_key(a, b.0.borrow()) + } +} + +impl TypeRefQueryComparator<'_, RecordPointer, Query> for MemtableComparator +where + C: BytesComparator + ?Sized, + Q: ?Sized + Borrow<[u8]>, +{ + #[inline] + fn query_compare_ref(&self, a: &RecordPointer, b: &Query) -> cmp::Ordering { + self.compare_key(a, b.0.borrow()) + } +} + +impl QueryEquivalentor> for MemtableComparator +where + C: BytesEquivalentor + ?Sized, + Q: ?Sized + Borrow<[u8]>, +{ + #[inline] + fn query_equivalent(&self, a: &RecordPointer, b: &Query) -> bool { + self.equivalent_key(a, b.0.borrow()) + } +} + +impl QueryComparator> for MemtableComparator +where + C: BytesComparator + ?Sized, + Q: ?Sized + Borrow<[u8]>, +{ + #[inline] + fn query_compare(&self, a: &RecordPointer, b: &Query) -> cmp::Ordering { + self.compare_key(a, b.0.borrow()) + } +} diff --git a/src/memtable/dynamic/range_comparator.rs b/src/memtable/dynamic/range_comparator.rs new file mode 100644 index 00000000..ef5dba55 --- /dev/null +++ b/src/memtable/dynamic/range_comparator.rs @@ -0,0 +1,296 @@ +use core::{borrow::Borrow, cmp, ops::Bound}; + +use dbutils::equivalentor::{ + BytesComparator, BytesEquivalentor, Comparator, Equivalentor, QueryComparator, QueryEquivalentor, + TypeRefComparator, TypeRefEquivalentor, TypeRefQueryComparator, TypeRefQueryEquivalentor, +}; + +use triomphe::Arc; + +use crate::types::{ + fetch_raw_range_deletion_entry, fetch_raw_range_key_start_bound, fetch_raw_range_update_entry, + Query, RawRangeRemoveRef, RawRangeUpdateRef, RecordPointer, RefQuery, +}; + +pub struct MemtableRangeComparator { + /// The start pointer of the parent ARENA. + ptr: *const u8, + cmp: Arc, +} + +unsafe impl Send for MemtableRangeComparator {} +unsafe impl Sync for MemtableRangeComparator {} + +impl crate::types::sealed::ComparatorConstructor for MemtableRangeComparator { + #[inline] + fn new(ptr: *const u8, cmp: Arc) -> Self { + Self { ptr, cmp } + } +} + +impl crate::types::sealed::RangeComparator for MemtableRangeComparator { + fn fetch_range_update<'a>(&self, kp: &RecordPointer) -> RawRangeUpdateRef<'a> { + unsafe { fetch_raw_range_update_entry(self.ptr, kp) } + } + + fn fetch_range_deletion<'a>(&self, kp: &RecordPointer) -> RawRangeRemoveRef<'a> { + unsafe { fetch_raw_range_deletion_entry(self.ptr, kp) } + } +} + +impl MemtableRangeComparator { + #[inline] + fn equivalent_start_key(&self, a: &RecordPointer, b: &[u8]) -> bool + where + C: BytesEquivalentor, + { + unsafe { + let ak = fetch_raw_range_key_start_bound(self.ptr, a); + match ak { + Bound::Included(k) => self.cmp.equivalent(k, b), + Bound::Excluded(k) => self.cmp.equivalent(k, b), + Bound::Unbounded => false, + } + } + } + + #[inline] + fn equivalent_in(&self, a: &RecordPointer, b: &RecordPointer) -> bool + where + C: BytesEquivalentor, + { + unsafe { + let ak = fetch_raw_range_key_start_bound(self.ptr, a); + let bk = fetch_raw_range_key_start_bound(self.ptr, b); + + match (ak, bk) { + (Bound::Unbounded, Bound::Unbounded) => true, + (Bound::Included(_), Bound::Unbounded) => false, + (Bound::Excluded(_), Bound::Unbounded) => false, + (Bound::Unbounded, Bound::Included(_)) => false, + (Bound::Unbounded, Bound::Excluded(_)) => false, + + (Bound::Included(a), Bound::Included(b)) => self.cmp.equivalent(a, b), + (Bound::Included(a), Bound::Excluded(b)) => self.cmp.equivalent(a, b), + (Bound::Excluded(a), Bound::Included(b)) => self.cmp.equivalent(a, b), + (Bound::Excluded(a), Bound::Excluded(b)) => self.cmp.equivalent(a, b), + } + } + } + + #[inline] + fn compare_start_key(&self, a: &RecordPointer, b: &[u8]) -> cmp::Ordering + where + C: BytesComparator, + { + unsafe { + let ak = fetch_raw_range_key_start_bound(self.ptr, a); + match ak { + Bound::Included(k) => self.cmp.compare(k, b), + Bound::Excluded(k) => self.cmp.compare(k, b).then(cmp::Ordering::Greater), + Bound::Unbounded => cmp::Ordering::Less, + } + } + } + + #[inline] + fn compare_in(&self, a: &RecordPointer, b: &RecordPointer) -> cmp::Ordering + where + C: BytesComparator, + { + unsafe { + let ak = fetch_raw_range_key_start_bound(self.ptr, a); + let bk = fetch_raw_range_key_start_bound(self.ptr, b); + + match (ak, bk) { + (Bound::Included(_), Bound::Unbounded) => cmp::Ordering::Greater, + (Bound::Excluded(_), Bound::Unbounded) => cmp::Ordering::Greater, + (Bound::Unbounded, Bound::Included(_)) => cmp::Ordering::Less, + (Bound::Unbounded, Bound::Excluded(_)) => cmp::Ordering::Less, + (Bound::Unbounded, Bound::Unbounded) => cmp::Ordering::Equal, + + (Bound::Included(a), Bound::Included(b)) => self.cmp.compare(a, b), + (Bound::Included(a), Bound::Excluded(b)) => self.cmp.compare(a, b), + (Bound::Excluded(a), Bound::Included(b)) => self.cmp.compare(a, b), + (Bound::Excluded(a), Bound::Excluded(b)) => self.cmp.compare(a, b), + } + } + } +} + +impl Clone for MemtableRangeComparator { + #[inline] + fn clone(&self) -> Self { + Self { + ptr: self.ptr, + cmp: self.cmp.clone(), + } + } +} + +impl core::fmt::Debug for MemtableRangeComparator +where + C: core::fmt::Debug + ?Sized, +{ + #[inline] + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.debug_struct("MemtableRangeComparator") + .field("ptr", &self.ptr) + .field("cmp", &self.cmp) + .finish() + } +} + +impl Equivalentor for MemtableRangeComparator +where + C: BytesEquivalentor + ?Sized, +{ + #[inline] + fn equivalent(&self, a: &RecordPointer, b: &RecordPointer) -> bool { + self.equivalent_in(a, b) + } +} + +impl TypeRefEquivalentor<'_, RecordPointer> for MemtableRangeComparator +where + C: BytesEquivalentor + ?Sized, +{ + #[inline] + fn equivalent_ref(&self, a: &RecordPointer, b: &RecordPointer) -> bool { + self.equivalent_in(a, b) + } + + #[inline] + fn equivalent_refs(&self, a: &RecordPointer, b: &RecordPointer) -> bool { + self.equivalent_in(a, b) + } +} + +impl TypeRefQueryEquivalentor<'_, RecordPointer, Q> for MemtableRangeComparator +where + C: BytesEquivalentor + ?Sized, + Q: ?Sized + Borrow<[u8]>, +{ + #[inline] + fn query_equivalent_ref(&self, a: &RecordPointer, b: &Q) -> bool { + self.equivalent_start_key(a, b.borrow()) + } +} + +impl Comparator for MemtableRangeComparator +where + C: BytesComparator + ?Sized, +{ + #[inline] + fn compare(&self, a: &RecordPointer, b: &RecordPointer) -> cmp::Ordering { + self.compare_in(a, b) + } +} + +impl TypeRefComparator<'_, RecordPointer> for MemtableRangeComparator +where + C: BytesComparator + ?Sized, +{ + #[inline] + fn compare_ref(&self, a: &RecordPointer, b: &RecordPointer) -> cmp::Ordering { + self.compare_in(a, b) + } + + fn compare_refs(&self, a: &RecordPointer, b: &RecordPointer) -> cmp::Ordering { + self.compare_in(a, b) + } +} + +impl TypeRefQueryComparator<'_, RecordPointer, Q> for MemtableRangeComparator +where + C: BytesComparator + ?Sized, + Q: ?Sized + Borrow<[u8]>, +{ + #[inline] + fn query_compare_ref(&self, a: &RecordPointer, b: &Q) -> cmp::Ordering { + self.compare_start_key(a, b.borrow()) + } +} + +impl TypeRefQueryEquivalentor<'_, RecordPointer, RecordPointer> for MemtableRangeComparator +where + C: BytesComparator + ?Sized, +{ + fn query_equivalent_ref(&self, a: &RecordPointer, b: &RecordPointer) -> bool { + self.equivalent_in(a, b) + } +} + +impl TypeRefQueryComparator<'_, RecordPointer, RecordPointer> for MemtableRangeComparator +where + C: BytesComparator + ?Sized, +{ + #[inline] + fn query_compare_ref(&self, a: &RecordPointer, b: &RecordPointer) -> cmp::Ordering { + self.compare_in(a, b) + } +} + +impl TypeRefQueryEquivalentor<'_, RecordPointer, Query> for MemtableRangeComparator +where + C: BytesEquivalentor + ?Sized, + Q: ?Sized + Borrow<[u8]>, +{ + #[inline] + fn query_equivalent_ref(&self, a: &RecordPointer, b: &Query) -> bool { + self.equivalent_start_key(a, b.0.borrow()) + } +} + +impl TypeRefQueryComparator<'_, RecordPointer, Query> for MemtableRangeComparator +where + C: BytesComparator + ?Sized, + Q: ?Sized + Borrow<[u8]>, +{ + #[inline] + fn query_compare_ref(&self, a: &RecordPointer, b: &Query) -> cmp::Ordering { + self.compare_start_key(a, b.0.borrow()) + } +} + +impl<'a, C> TypeRefQueryEquivalentor<'a, RecordPointer, RefQuery<&'a [u8]>> + for MemtableRangeComparator +where + C: BytesEquivalentor + ?Sized, +{ + #[inline] + fn query_equivalent_ref(&self, a: &RecordPointer, b: &RefQuery<&'a [u8]>) -> bool { + self.equivalent_start_key(a, b.query) + } +} + +impl<'a, C> TypeRefQueryComparator<'a, RecordPointer, RefQuery<&'a [u8]>> + for MemtableRangeComparator +where + C: BytesComparator + ?Sized, +{ + #[inline] + fn query_compare_ref(&self, a: &RecordPointer, b: &RefQuery<&'a [u8]>) -> cmp::Ordering { + self.compare_start_key(a, b.query) + } +} + +impl<'a, C> QueryEquivalentor> for MemtableRangeComparator +where + C: BytesEquivalentor + ?Sized, +{ + #[inline] + fn query_equivalent(&self, a: &RecordPointer, b: &RefQuery<&'a [u8]>) -> bool { + self.equivalent_start_key(a, b.query) + } +} + +impl<'a, C> QueryComparator> for MemtableRangeComparator +where + C: BytesComparator + ?Sized, +{ + #[inline] + fn query_compare(&self, a: &RecordPointer, b: &RefQuery<&'a [u8]>) -> cmp::Ordering { + self.compare_start_key(a, b.query) + } +} diff --git a/src/memtable/dynamic/unbounded.rs b/src/memtable/dynamic/unbounded.rs new file mode 100644 index 00000000..211637c8 --- /dev/null +++ b/src/memtable/dynamic/unbounded.rs @@ -0,0 +1,379 @@ +use core::{ + borrow::Borrow, + ops::{Bound, ControlFlow, RangeBounds}, +}; + +use dbutils::{ + equivalentor::BytesComparator, + state::{Active, MaybeTombstone, State}, +}; +use ref_cast::RefCast as _; + +use crate::{ + memtable::unbounded, + types::{BulkOperation, Dynamic, Query, Remove, Update}, +}; + +use super::DynamicMemtable; + +/// Dynamic multiple version memtable implementation based on ARNEA based [`SkipMap`](skl::generic::multiple_version::sync::SkipMap)s. +pub type Table = unbounded::Table; + +/// Entry of the [`Table`]. +pub type EntryRef<'a, S, C> = unbounded::EntryRef<'a, S, C, Dynamic>; + +/// Point entry of the [`Table`]. +pub type PointEntryRef<'a, S, C> = unbounded::PointEntryRef<'a, S, C, Dynamic>; + +/// Range entry of the [`Table`]. +pub type RangeEntryRef<'a, S, O, C> = unbounded::RangeEntryRef<'a, S, O, C, Dynamic>; + +/// Iterator of the [`Table`]. +pub type Iter<'a, S, C> = unbounded::Iter<'a, S, C, Dynamic>; + +/// Range iterator of the [`Table`]. +pub type Range<'a, S, Q, R, C> = unbounded::Range<'a, S, Q, R, C, Dynamic>; + +/// Point iterator of the [`Table`]. +pub type IterPoints<'a, S, C> = unbounded::IterPoints<'a, S, C, Dynamic>; + +/// Range point iterator of the [`Table`]. +pub type RangePoints<'a, S, Q, R, C> = unbounded::RangePoints<'a, S, Q, R, C, Dynamic>; + +/// Bulk operations iterator of the [`Table`]. +pub type IterBulkOperations<'a, S, O, C> = unbounded::IterBulkOperations<'a, S, O, C, Dynamic>; + +/// Bulk operations range iterator of the [`Table`]. +pub type RangeBulkOperations<'a, S, O, Q, R, C> = + unbounded::RangeBulkOperations<'a, S, O, Q, R, C, Dynamic>; + +impl DynamicMemtable for Table +where + C: BytesComparator + 'static, +{ + type Entry<'a, S> + = EntryRef<'a, S, C> + where + Self: 'a, + S: State + 'a; + + type PointEntry<'a, S> + = PointEntryRef<'a, S, C> + where + Self: 'a, + S: State + 'a; + + type RangeEntry<'a, S, O> + = RangeEntryRef<'a, S, O, C> + where + Self: 'a, + S: State + 'a, + O: BulkOperation; + + type Iterator<'a, S> + = Iter<'a, S, C> + where + Self: 'a, + S: State + 'a; + + type Range<'a, S, Q, R> + = Range<'a, S, Q, R, C> + where + Self: 'a, + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + S: State + 'a; + + type PointsIterator<'a, S> + = IterPoints<'a, S, C> + where + Self: 'a, + S: State + 'a; + + type RangePoints<'a, S, Q, R> + = RangePoints<'a, S, Q, R, C> + where + Self: 'a, + S: State + 'a, + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>; + + type BulkOperationsIterator<'a, S, O> + = IterBulkOperations<'a, S, O, C> + where + Self: 'a, + S: State + 'a, + O: crate::types::BulkOperation; + + type BulkOperationsRange<'a, S, O, Q, R> + = RangeBulkOperations<'a, S, O, Q, R, C> + where + Self: 'a, + S: State + 'a, + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + O: crate::types::BulkOperation; + + #[inline] + fn maximum_version(&self) -> u64 { + self + .skl + .maximum_version() + .max(self.range_deletions_skl.maximum_version()) + .max(self.range_updates_skl.maximum_version()) + } + + #[inline] + fn minimum_version(&self) -> u64 { + self + .skl + .minimum_version() + .min(self.range_deletions_skl.minimum_version()) + .min(self.range_updates_skl.minimum_version()) + } + + #[inline] + fn may_contain_version(&self, version: u64) -> bool { + self.skl.may_contain_version(version) + || self.range_deletions_skl.may_contain_version(version) + || self.range_updates_skl.may_contain_version(version) + } + + #[inline] + fn upper_bound<'a, Q>( + &'a self, + version: u64, + bound: core::ops::Bound<&'a Q>, + ) -> Option> + where + Q: ?Sized + Borrow<[u8]>, + { + self + .range::(version, (Bound::Unbounded, bound)) + .next_back() + } + + #[inline] + fn lower_bound<'a, Q>( + &'a self, + version: u64, + bound: core::ops::Bound<&'a Q>, + ) -> Option> + where + Q: ?Sized + Borrow<[u8]>, + { + self + .range::(version, (bound, Bound::Unbounded)) + .next() + } + + #[inline] + fn upper_bound_with_tombstone<'a, Q>( + &'a self, + version: u64, + bound: Bound<&'a Q>, + ) -> Option> + where + Q: ?Sized + Borrow<[u8]>, + { + self + .range_all::(version, (Bound::Unbounded, bound)) + .next_back() + } + + #[inline] + fn lower_bound_with_tombstone<'a, Q>( + &'a self, + version: u64, + bound: Bound<&'a Q>, + ) -> Option> + where + Q: ?Sized + Borrow<[u8]>, + { + self + .range_all::(version, (bound, Bound::Unbounded)) + .next() + } + + #[inline] + fn first(&self, version: u64) -> Option> { + self.iter(version).next() + } + + #[inline] + fn last(&self, version: u64) -> Option> { + self.iter(version).next_back() + } + + #[inline] + fn first_with_tombstone(&self, version: u64) -> Option> { + self.iter_all(version).next() + } + + #[inline] + fn last_with_tombstone(&self, version: u64) -> Option> { + self.iter_all(version).next_back() + } + + #[inline] + fn get(&self, version: u64, key: &Q) -> Option> + where + Q: ?Sized + Borrow<[u8]>, + { + let ent = self.skl.get(version, Query::ref_cast(key))?; + match self.validate(version, PointEntryRef::new(ent)) { + ControlFlow::Break(entry) => entry, + ControlFlow::Continue(_) => None, + } + } + + #[inline] + fn get_with_tombstone(&self, version: u64, key: &Q) -> Option> + where + Q: ?Sized + Borrow<[u8]>, + { + let ent = self.skl.get_with_tombstone(version, Query::ref_cast(key))?; + match self.validate(version, PointEntryRef::new(ent)) { + ControlFlow::Break(entry) => entry, + ControlFlow::Continue(_) => None, + } + } + + #[inline] + fn iter(&self, version: u64) -> Self::Iterator<'_, Active> { + Iter::new(version, self) + } + + #[inline] + fn range<'a, Q, R>(&'a self, version: u64, range: R) -> Self::Range<'a, Active, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + { + Range::new(version, self, range) + } + + #[inline] + fn iter_all(&self, version: u64) -> Self::Iterator<'_, MaybeTombstone> { + Iter::with_tombstone(version, self) + } + + #[inline] + fn range_all<'a, Q, R>(&'a self, version: u64, range: R) -> Self::Range<'a, MaybeTombstone, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + { + Range::with_tombstone(version, self, range) + } + + #[inline] + fn iter_points(&self, version: u64) -> Self::PointsIterator<'_, Active> { + IterPoints::new(self.skl.iter(version)) + } + + #[inline] + fn iter_all_points(&self, version: u64) -> Self::PointsIterator<'_, MaybeTombstone> { + IterPoints::new(self.skl.iter_all(version)) + } + + #[inline] + fn range_points<'a, Q, R>(&'a self, version: u64, range: R) -> Self::RangePoints<'a, Active, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + { + RangePoints::new(self.skl.range(version, range.into())) + } + + #[inline] + fn range_all_points<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::RangePoints<'a, MaybeTombstone, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + { + RangePoints::new(self.skl.range_all(version, range.into())) + } + + #[inline] + fn iter_bulk_removes(&self, version: u64) -> Self::BulkOperationsIterator<'_, Active, Remove> { + IterBulkOperations::new(self.range_deletions_skl.iter(version)) + } + + #[inline] + fn iter_all_bulk_removes( + &self, + version: u64, + ) -> Self::BulkOperationsIterator<'_, MaybeTombstone, Remove> { + IterBulkOperations::new(self.range_deletions_skl.iter_all(version)) + } + + #[inline] + fn range_bulk_removes<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::BulkOperationsRange<'a, Active, Remove, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + { + RangeBulkOperations::new(self.range_deletions_skl.range(version, range.into())) + } + + #[inline] + fn range_all_bulk_removes<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::BulkOperationsRange<'a, MaybeTombstone, Remove, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + { + RangeBulkOperations::new(self.range_deletions_skl.range_all(version, range.into())) + } + + #[inline] + fn iter_bulk_updates(&self, version: u64) -> Self::BulkOperationsIterator<'_, Active, Update> { + IterBulkOperations::new(self.range_updates_skl.iter(version)) + } + + #[inline] + fn iter_all_bulk_updates( + &self, + version: u64, + ) -> Self::BulkOperationsIterator<'_, MaybeTombstone, Update> { + IterBulkOperations::new(self.range_updates_skl.iter_all(version)) + } + + #[inline] + fn range_bulk_updates<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::BulkOperationsRange<'a, Active, Update, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + { + RangeBulkOperations::new(self.range_updates_skl.range(version, range.into())) + } + + #[inline] + fn range_all_bulk_updates<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::BulkOperationsRange<'a, MaybeTombstone, Update, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized + Borrow<[u8]>, + { + RangeBulkOperations::new(self.range_updates_skl.range_all(version, range.into())) + } +} diff --git a/src/memtable/generic.rs b/src/memtable/generic.rs new file mode 100644 index 00000000..5513967b --- /dev/null +++ b/src/memtable/generic.rs @@ -0,0 +1,322 @@ +use core::ops::{Bound, RangeBounds}; + +use dbutils::{ + equivalentor::{TypeRefComparator, TypeRefQueryComparator}, + state::{Active, MaybeTombstone, State}, + types::{Type, TypeRef}, +}; + +use crate::{ + memtable::Memtable, + types::{BulkOperation, Remove, Update}, +}; + +/// Bounded memtable implementation based on ARNEA based [`SkipMap`](skl::generic::multiple_version::sync::SkipMap)s. +#[cfg(feature = "skl")] +#[cfg_attr(docsrs, doc(cfg(feature = "bounded")))] +pub mod bounded; + +/// Unbounded memtable implementation based on ARNEA based [`SkipMap`](crossbeam_skiplist_mvcc::nested::SkipMap)s. +#[cfg(feature = "crossbeam-skiplist-mvcc")] +#[cfg_attr(docsrs, doc(cfg(feature = "unbounded")))] +pub mod unbounded; + +mod comparator; +mod range_comparator; + +pub(crate) use comparator::MemtableComparator; +pub(crate) use range_comparator::MemtableRangeComparator; + +/// A memory table which is used to store pointers to the underlying entries. +pub trait GenericMemtable +where + Self: Memtable, + K: Type + ?Sized, + V: Type + ?Sized, +{ + /// The comparator used for key comparison. + type Comparator; + + /// The item returned by the iterator or query methods. + type Entry<'a, S> + where + Self: 'a, + S: State + 'a; + + /// The item returned by the point iterators + type PointEntry<'a, S> + where + Self: 'a, + S: State + 'a; + + /// The range entry type. + type RangeEntry<'a, S, O> + where + Self: 'a, + S: State + 'a, + O: BulkOperation; + + /// The iterator type. + type Iterator<'a, S> + where + Self: 'a, + S: State + 'a; + + /// The range iterator type. + type Range<'a, S, Q, R> + where + Self: 'a, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + R: RangeBounds + 'a, + Q: ?Sized, + S: State + 'a; + + /// The iterator over point entries. + type PointsIterator<'a, S> + where + Self: 'a, + S: State + 'a; + + /// The range iterator over point entries. + type RangePoints<'a, S, Q, R> + where + Self: 'a, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + S: State + 'a, + R: RangeBounds + 'a, + Q: ?Sized; + + /// The iterator over range deletions entries. + type BulkOperationsIterator<'a, S, O> + where + Self: 'a, + S: State + 'a, + O: BulkOperation; + + /// The range iterator over range deletions entries. + type BulkOperationsRange<'a, S, O, Q, R> + where + Self: 'a, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + S: State + 'a, + O: BulkOperation, + R: RangeBounds + 'a, + Q: ?Sized; + + /// Returns the maximum version of the memtable. + fn maximum_version(&self) -> u64; + + /// Returns the minimum version of the memtable. + fn minimum_version(&self) -> u64; + + /// Returns `true` if the memtable may contain an entry whose version is less than or equal to the specified version. + fn may_contain_version(&self, version: u64) -> bool; + + /// Returns the upper bound of the memtable. + fn upper_bound<'a, Q>( + &'a self, + version: u64, + bound: Bound<&'a Q>, + ) -> Option> + where + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>; + + /// Returns the lower bound of the memtable. + fn lower_bound<'a, Q>( + &'a self, + version: u64, + bound: Bound<&'a Q>, + ) -> Option> + where + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>; + + /// Returns the upper bound of the memtable. + fn upper_bound_with_tombstone<'a, Q>( + &'a self, + version: u64, + bound: Bound<&'a Q>, + ) -> Option> + where + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>; + + /// Returns the lower bound of the memtable. + fn lower_bound_with_tombstone<'a, Q>( + &'a self, + version: u64, + bound: Bound<&'a Q>, + ) -> Option> + where + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>; + + /// Returns the first pointer in the memtable. + fn first<'a>(&'a self, version: u64) -> Option> + where + Self::Comparator: TypeRefComparator<'a, K>; + + /// Returns the last pointer in the memtable. + fn last<'a>(&'a self, version: u64) -> Option> + where + Self::Comparator: TypeRefComparator<'a, K>; + + /// Returns the first pointer in the memtable. + fn first_with_tombstone<'a>(&'a self, version: u64) -> Option> + where + Self::Comparator: TypeRefComparator<'a, K>; + + /// Returns the last pointer in the memtable. + fn last_with_tombstone<'a>(&'a self, version: u64) -> Option> + where + Self::Comparator: TypeRefComparator<'a, K>; + + /// Returns the pointer associated with the key. + fn get<'a, Q>(&'a self, version: u64, key: &Q) -> Option> + where + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>; + + /// Returns `true` if the memtable contains the specified pointer. + fn contains<'a, Q>(&'a self, version: u64, key: &Q) -> bool + where + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self.get(version, key).is_some() + } + + /// Returns the pointer associated with the key. + fn get_with_tombstone<'a, Q>( + &'a self, + version: u64, + key: &Q, + ) -> Option> + where + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>; + + /// Returns `true` if the memtable contains the specified pointer. + fn contains_with_tombsone<'a, Q>(&'a self, version: u64, key: &Q) -> bool + where + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self.get_with_tombstone(version, key).is_some() + } + + /// Returns an iterator over the memtable. + fn iter(&self, version: u64) -> Self::Iterator<'_, Active>; + + /// Returns an iterator over all the entries(including tombstone entries and all versions) in the memtable. + fn iter_all(&self, version: u64) -> Self::Iterator<'_, MaybeTombstone>; + + /// Returns an iterator over a subset of the memtable. + fn range<'a, Q, R>(&'a self, version: u64, range: R) -> Self::Range<'a, Active, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>; + + /// Returns an iterator over all the entries(including tombstone entries and all versions) in a subset of the memtable. + fn range_all<'a, Q, R>(&'a self, version: u64, range: R) -> Self::Range<'a, MaybeTombstone, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>; + + /// Returns an iterator over point entries in the memtable. + fn iter_points(&self, version: u64) -> Self::PointsIterator<'_, Active>; + + /// Returns an iterator over all the point entries in the memtable. + fn iter_all_points(&self, version: u64) -> Self::PointsIterator<'_, MaybeTombstone>; + + /// Returns an iterator over a subset of point entries in the memtable. + fn range_points<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::RangePoints<'a, Active, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>; + + /// Returns an iterator over all the point entries in a subset of the memtable. + fn range_all_points<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::RangePoints<'a, MaybeTombstone, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>; + + /// Returns an iterator over range deletions entries in the memtable. + fn iter_bulk_removes(&self, version: u64) -> Self::BulkOperationsIterator<'_, Active, Remove>; + + /// Returns an iterator over all the range deletions entries in the memtable. + fn iter_all_bulk_removes( + &self, + version: u64, + ) -> Self::BulkOperationsIterator<'_, MaybeTombstone, Remove>; + + /// Returns an iterator over a subset of range deletions entries in the memtable. + fn range_bulk_removes<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::BulkOperationsRange<'a, Active, Remove, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>; + + /// Returns an iterator over all the range deletions entries in a subset of the memtable. + fn range_all_bulk_removes<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::BulkOperationsRange<'a, MaybeTombstone, Remove, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>; + + /// Returns an iterator over range updates entries in the memtable. + fn iter_bulk_updates(&self, version: u64) -> Self::BulkOperationsIterator<'_, Active, Update>; + + /// Returns an iterator over all the range updates entries in the memtable. + fn iter_all_bulk_updates( + &self, + version: u64, + ) -> Self::BulkOperationsIterator<'_, MaybeTombstone, Update>; + + /// Returns an iterator over a subset of range updates entries in the memtable. + fn range_bulk_updates<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::BulkOperationsRange<'a, Active, Update, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>; + + /// Returns an iterator over all the range updates entries in a subset of the memtable. + fn range_all_bulk_updates<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::BulkOperationsRange<'a, MaybeTombstone, Update, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>; +} + +unsafe fn ty_ref(src: &[u8]) -> T::Ref<'_> { + as TypeRef<'_>>::from_slice(src) +} diff --git a/src/memtable/generic/bounded.rs b/src/memtable/generic/bounded.rs new file mode 100644 index 00000000..ed071c2b --- /dev/null +++ b/src/memtable/generic/bounded.rs @@ -0,0 +1,415 @@ +use core::ops::{Bound, ControlFlow, RangeBounds}; + +use ref_cast::RefCast; +use skl::{ + generic::{multiple_version::Map as _, Type, TypeRefComparator, TypeRefQueryComparator}, + Active, MaybeTombstone, +}; + +use crate::{ + memtable::bounded, + state::State, + types::{BulkOperation, Generic, Query, Remove, Update}, +}; + +use super::GenericMemtable; + +/// Generic multiple version memtable implementation based on ARNEA based [`SkipMap`](skl::generic::unique::sync::SkipMap)s. +pub type Table = bounded::Table>; + +/// Entry of the [`Table`]. +pub type EntryRef<'a, K, V, S, C> = bounded::EntryRef<'a, S, C, Generic>; + +/// Point entry of the [`Table`]. +pub type PointEntryRef<'a, K, V, S, C> = bounded::PointEntryRef<'a, S, C, Generic>; + +/// Range entry of the [`Table`]. +pub type RangeEntryRef<'a, K, V, S, O, C> = bounded::RangeEntryRef<'a, S, O, C, Generic>; + +/// Iterator of the [`Table`]. +pub type Iter<'a, K, V, S, C> = bounded::Iter<'a, S, C, Generic>; + +/// Range iterator of the [`Table`]. +pub type Range<'a, K, V, S, Q, R, C> = bounded::Range<'a, S, Q, R, C, Generic>; + +/// Point iterator of the [`Table`]. +pub type IterPoints<'a, K, V, S, C> = bounded::IterPoints<'a, S, C, Generic>; + +/// Range point iterator of the [`Table`]. +pub type RangePoints<'a, K, V, S, Q, R, C> = bounded::RangePoints<'a, S, Q, R, C, Generic>; + +/// Bulk operations iterator of the [`Table`]. +pub type IterBulkOperations<'a, K, V, S, O, C> = + bounded::IterBulkOperations<'a, S, O, C, Generic>; + +/// Bulk operations range iterator of the [`Table`]. +pub type RangeBulkOperations<'a, K, V, S, O, Q, R, C> = + bounded::RangeBulkOperations<'a, S, O, Q, R, C, Generic>; + +impl GenericMemtable for Table +where + K: Type + ?Sized + 'static, + V: Type + ?Sized + 'static, + C: 'static, +{ + type Comparator = C; + + type Entry<'a, S> + = EntryRef<'a, K, V, S, C> + where + Self: 'a, + S: State + 'a; + + type PointEntry<'a, S> + = PointEntryRef<'a, K, V, S, C> + where + Self: 'a, + S: State + 'a; + + type RangeEntry<'a, S, O> + = RangeEntryRef<'a, K, V, S, O, C> + where + Self: 'a, + S: State + 'a, + O: BulkOperation; + + type Iterator<'a, S> + = Iter<'a, K, V, S, C> + where + Self: 'a, + S: State + 'a; + + type Range<'a, S, Q, R> + = Range<'a, K, V, S, Q, R, C> + where + Self: 'a, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + R: RangeBounds + 'a, + Q: ?Sized, + S: State + 'a; + + type PointsIterator<'a, S> + = IterPoints<'a, K, V, S, C> + where + Self: 'a, + S: State + 'a; + + type RangePoints<'a, S, Q, R> + = RangePoints<'a, K, V, S, Q, R, C> + where + Self: 'a, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + S: State + 'a, + R: RangeBounds + 'a, + Q: ?Sized; + + type BulkOperationsIterator<'a, S, O> + = IterBulkOperations<'a, K, V, S, O, C> + where + Self: 'a, + S: State + 'a, + O: BulkOperation; + + type BulkOperationsRange<'a, S, O, Q, R> + = RangeBulkOperations<'a, K, V, S, O, Q, R, C> + where + Self: 'a, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + S: State + 'a, + O: BulkOperation, + R: RangeBounds + 'a, + Q: ?Sized; + + #[inline] + fn maximum_version(&self) -> u64 { + self + .skl + .maximum_version() + .max(self.range_deletions_skl.maximum_version()) + .max(self.range_updates_skl.maximum_version()) + } + + #[inline] + fn minimum_version(&self) -> u64 { + self + .skl + .minimum_version() + .min(self.range_deletions_skl.minimum_version()) + .min(self.range_updates_skl.minimum_version()) + } + + #[inline] + fn may_contain_version(&self, version: u64) -> bool { + self.skl.may_contain_version(version) + || self.range_deletions_skl.may_contain_version(version) + || self.range_updates_skl.may_contain_version(version) + } + + #[inline] + fn upper_bound<'a, Q>( + &'a self, + version: u64, + bound: core::ops::Bound<&'a Q>, + ) -> Option> + where + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self + .range::(version, (Bound::Unbounded, bound)) + .next_back() + } + + #[inline] + fn lower_bound<'a, Q>( + &'a self, + version: u64, + bound: core::ops::Bound<&'a Q>, + ) -> Option> + where + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self + .range::(version, (bound, Bound::Unbounded)) + .next() + } + + #[inline] + fn upper_bound_with_tombstone<'a, Q>( + &'a self, + version: u64, + bound: Bound<&'a Q>, + ) -> Option> + where + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self + .range_all::(version, (Bound::Unbounded, bound)) + .next_back() + } + + #[inline] + fn lower_bound_with_tombstone<'a, Q>( + &'a self, + version: u64, + bound: Bound<&'a Q>, + ) -> Option> + where + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self + .range_all::(version, (bound, Bound::Unbounded)) + .next() + } + + #[inline] + fn first<'a>(&'a self, version: u64) -> Option> + where + Self::Comparator: TypeRefComparator<'a, K>, + { + self.iter(version).next() + } + + #[inline] + fn last<'a>(&'a self, version: u64) -> Option> + where + Self::Comparator: TypeRefComparator<'a, K>, + { + self.iter(version).next_back() + } + + #[inline] + fn first_with_tombstone<'a>(&'a self, version: u64) -> Option> + where + Self::Comparator: TypeRefComparator<'a, K>, + { + self.iter_all(version).next() + } + + #[inline] + fn last_with_tombstone<'a>(&'a self, version: u64) -> Option> + where + Self::Comparator: TypeRefComparator<'a, K>, + { + self.iter_all(version).next_back() + } + + #[inline] + fn get<'a, Q>(&'a self, version: u64, key: &Q) -> Option> + where + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + let ent = self.skl.get(version, Query::ref_cast(key))?; + match self.validate(version, PointEntryRef::new(ent)) { + ControlFlow::Break(entry) => entry, + ControlFlow::Continue(_) => None, + } + } + + #[inline] + fn get_with_tombstone<'a, Q>( + &'a self, + version: u64, + key: &Q, + ) -> Option> + where + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + let ent = self.skl.get_with_tombstone(version, Query::ref_cast(key))?; + match self.validate(version, PointEntryRef::new(ent)) { + ControlFlow::Break(entry) => entry, + ControlFlow::Continue(_) => None, + } + } + + #[inline] + fn iter(&self, version: u64) -> Self::Iterator<'_, Active> { + Iter::new(version, self) + } + + #[inline] + fn iter_all(&self, version: u64) -> Self::Iterator<'_, MaybeTombstone> { + Iter::with_tombstone(version, self) + } + + #[inline] + fn range<'a, Q, R>(&'a self, version: u64, range: R) -> Self::Range<'a, Active, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + Range::new(version, self, range) + } + + #[inline] + fn range_all<'a, Q, R>(&'a self, version: u64, range: R) -> Self::Range<'a, MaybeTombstone, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + Range::with_tombstone(version, self, range) + } + + #[inline] + fn iter_points(&self, version: u64) -> Self::PointsIterator<'_, Active> { + IterPoints::new(self.skl.iter(version)) + } + + #[inline] + fn iter_all_points(&self, version: u64) -> Self::PointsIterator<'_, MaybeTombstone> { + IterPoints::new(self.skl.iter_all(version)) + } + + #[inline] + fn range_points<'a, Q, R>(&'a self, version: u64, range: R) -> Self::RangePoints<'a, Active, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + RangePoints::new(self.skl.range(version, range.into())) + } + + #[inline] + fn range_all_points<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::RangePoints<'a, MaybeTombstone, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + RangePoints::new(self.skl.range_all(version, range.into())) + } + + #[inline] + fn iter_bulk_removes(&self, version: u64) -> Self::BulkOperationsIterator<'_, Active, Remove> { + IterBulkOperations::new(self.range_deletions_skl.iter(version)) + } + + #[inline] + fn iter_all_bulk_removes( + &self, + version: u64, + ) -> Self::BulkOperationsIterator<'_, MaybeTombstone, Remove> { + IterBulkOperations::new(self.range_deletions_skl.iter_all(version)) + } + + #[inline] + fn range_bulk_removes<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::BulkOperationsRange<'a, Active, Remove, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + RangeBulkOperations::new(self.range_deletions_skl.range(version, range.into())) + } + + #[inline] + fn range_all_bulk_removes<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::BulkOperationsRange<'a, MaybeTombstone, Remove, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + RangeBulkOperations::new(self.range_deletions_skl.range_all(version, range.into())) + } + + #[inline] + fn iter_bulk_updates(&self, version: u64) -> Self::BulkOperationsIterator<'_, Active, Update> { + IterBulkOperations::new(self.range_updates_skl.iter(version)) + } + + #[inline] + fn iter_all_bulk_updates( + &self, + version: u64, + ) -> Self::BulkOperationsIterator<'_, MaybeTombstone, Update> { + IterBulkOperations::new(self.range_updates_skl.iter_all(version)) + } + + #[inline] + fn range_bulk_updates<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::BulkOperationsRange<'a, Active, Update, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + RangeBulkOperations::new(self.range_updates_skl.range(version, range.into())) + } + + #[inline] + fn range_all_bulk_updates<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::BulkOperationsRange<'a, MaybeTombstone, Update, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + RangeBulkOperations::new(self.range_updates_skl.range_all(version, range.into())) + } +} diff --git a/src/memtable/generic/comparator.rs b/src/memtable/generic/comparator.rs new file mode 100644 index 00000000..0d2f61f0 --- /dev/null +++ b/src/memtable/generic/comparator.rs @@ -0,0 +1,266 @@ +use core::{cmp, marker::PhantomData}; + +use dbutils::{ + equivalentor::{ + Comparator, Equivalentor, QueryComparator, QueryEquivalentor, TypeRefComparator, + TypeRefEquivalentor, TypeRefQueryComparator, TypeRefQueryEquivalentor, + }, + types::Type, +}; +use triomphe::Arc; + +use crate::types::{fetch_entry, fetch_raw_key, Query, RawEntryRef, RecordPointer}; + +use super::ty_ref; + +pub struct MemtableComparator +where + K: ?Sized, + C: ?Sized, +{ + /// The start pointer of the parent ARENA. + ptr: *const u8, + cmp: Arc, + _k: PhantomData, +} + +unsafe impl Send for MemtableComparator {} +unsafe impl Sync for MemtableComparator {} + +impl crate::types::sealed::ComparatorConstructor for MemtableComparator +where + K: ?Sized, + C: ?Sized, +{ + #[inline] + fn new(ptr: *const u8, cmp: Arc) -> Self { + Self { + ptr, + cmp, + _k: PhantomData, + } + } +} + +impl crate::types::sealed::PointComparator for MemtableComparator { + #[inline] + fn fetch_entry<'a>(&self, kp: &RecordPointer) -> RawEntryRef<'a> { + unsafe { fetch_entry(self.ptr, kp) } + } +} + +impl MemtableComparator +where + K: ?Sized, + C: ?Sized, +{ + #[inline] + fn query_equivalent_key<'a, Q>(&self, a: &RecordPointer, b: &Q) -> bool + where + C: TypeRefQueryEquivalentor<'a, K, Q>, + K: Type, + Q: ?Sized, + { + unsafe { + let (_, ak) = fetch_raw_key(self.ptr, a); + let ak = ty_ref::(ak); + self.cmp.query_equivalent_ref(&ak, b) + } + } + + #[inline] + fn equivalent_in<'a>(&self, a: &RecordPointer, b: &RecordPointer) -> bool + where + C: TypeRefEquivalentor<'a, K>, + K: Type, + { + unsafe { + let (_, ak) = fetch_raw_key(self.ptr, a); + let ak = ty_ref::(ak); + let (_, bk) = fetch_raw_key(self.ptr, b); + let bk = ty_ref::(bk); + self.cmp.equivalent_refs(&ak, &bk) + } + } + + #[inline] + fn compare_key<'a, Q>(&self, a: &RecordPointer, b: &Q) -> cmp::Ordering + where + C: TypeRefQueryComparator<'a, K, Q>, + K: Type, + Q: ?Sized, + { + unsafe { + let (_, ak) = fetch_raw_key(self.ptr, a); + let ak = ty_ref::(ak); + self.cmp.query_compare_ref(&ak, b) + } + } + + #[inline] + fn compare_in<'a>(&self, a: &RecordPointer, b: &RecordPointer) -> cmp::Ordering + where + C: TypeRefComparator<'a, K>, + K: Type, + { + unsafe { + let (_, ak) = fetch_raw_key(self.ptr, a); + let ak = ty_ref::(ak); + let (_, bk) = fetch_raw_key(self.ptr, b); + let bk = ty_ref::(bk); + self.cmp.compare_refs(&ak, &bk) + } + } +} + +impl Clone for MemtableComparator +where + K: ?Sized, + C: ?Sized, +{ + #[inline] + fn clone(&self) -> Self { + Self { + ptr: self.ptr, + cmp: self.cmp.clone(), + _k: PhantomData, + } + } +} + +impl core::fmt::Debug for MemtableComparator +where + C: core::fmt::Debug + ?Sized, + K: ?Sized, +{ + #[inline] + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.debug_struct("MemtableComparator") + .field("ptr", &self.ptr) + .field("cmp", &self.cmp) + .finish() + } +} + +impl<'a, K, C> Equivalentor for MemtableComparator +where + C: TypeRefEquivalentor<'a, K> + ?Sized, + K: Type + ?Sized, +{ + #[inline] + fn equivalent(&self, a: &RecordPointer, b: &RecordPointer) -> bool { + self.equivalent_in(a, b) + } +} + +impl<'a, K, C> TypeRefEquivalentor<'a, RecordPointer> for MemtableComparator +where + C: TypeRefEquivalentor<'a, K> + ?Sized, + K: Type + ?Sized, +{ + #[inline] + fn equivalent_ref(&self, a: &RecordPointer, b: &RecordPointer) -> bool { + self.equivalent_in(a, b) + } + + #[inline] + fn equivalent_refs(&self, a: &RecordPointer, b: &RecordPointer) -> bool { + self.equivalent_in(a, b) + } +} + +impl<'a, K, C> Comparator for MemtableComparator +where + C: TypeRefComparator<'a, K> + ?Sized, + K: Type + ?Sized, +{ + #[inline] + fn compare(&self, a: &RecordPointer, b: &RecordPointer) -> cmp::Ordering { + self.compare_in(a, b) + } +} + +impl<'a, K, C> Equivalentor>> for MemtableComparator +where + C: TypeRefEquivalentor<'a, K> + ?Sized, + K: Type + ?Sized, +{ + fn equivalent(&self, a: &Query>, b: &Query>) -> bool { + self.cmp.equivalent_refs(&a.0, &b.0) + } +} + +impl<'a, K, C> Comparator>> for MemtableComparator +where + C: TypeRefComparator<'a, K> + ?Sized, + K: Type + ?Sized, +{ + #[inline] + fn compare(&self, a: &Query>, b: &Query>) -> cmp::Ordering { + self.cmp.compare_refs(&a.0, &b.0) + } +} + +impl<'a, K, C> TypeRefComparator<'a, RecordPointer> for MemtableComparator +where + C: TypeRefComparator<'a, K> + ?Sized, + K: Type + ?Sized, +{ + #[inline] + fn compare_ref(&self, a: &RecordPointer, b: &RecordPointer) -> cmp::Ordering { + self.compare_in(a, b) + } + + #[inline] + fn compare_refs(&self, a: &RecordPointer, b: &RecordPointer) -> cmp::Ordering { + self.compare_in(a, b) + } +} + +impl<'a, K, Q, C> TypeRefQueryEquivalentor<'a, RecordPointer, Query> for MemtableComparator +where + C: TypeRefQueryEquivalentor<'a, K, Q> + ?Sized, + Q: ?Sized, + K: Type + ?Sized, +{ + #[inline] + fn query_equivalent_ref(&self, a: &RecordPointer, b: &Query) -> bool { + self.query_equivalent_key(a, &b.0) + } +} + +impl<'a, K, Q, C> TypeRefQueryComparator<'a, RecordPointer, Query> for MemtableComparator +where + C: TypeRefQueryComparator<'a, K, Q> + ?Sized, + Q: ?Sized, + K: Type + ?Sized, +{ + #[inline] + fn query_compare_ref(&self, a: &RecordPointer, b: &Query) -> cmp::Ordering { + self.compare_key(a, &b.0) + } +} + +impl<'a, K, Q, C> QueryEquivalentor> for MemtableComparator +where + C: TypeRefQueryEquivalentor<'a, K, Q> + ?Sized, + Q: ?Sized, + K: Type + ?Sized, +{ + #[inline] + fn query_equivalent(&self, a: &RecordPointer, b: &Query) -> bool { + self.query_equivalent_key(a, &b.0) + } +} + +impl<'a, K, Q, C> QueryComparator> for MemtableComparator +where + C: TypeRefQueryComparator<'a, K, Q> + ?Sized, + Q: ?Sized, + K: Type + ?Sized, +{ + #[inline] + fn query_compare(&self, a: &RecordPointer, b: &Query) -> cmp::Ordering { + self.compare_key(a, &b.0) + } +} diff --git a/src/memtable/generic/range_comparator.rs b/src/memtable/generic/range_comparator.rs new file mode 100644 index 00000000..9d1456d8 --- /dev/null +++ b/src/memtable/generic/range_comparator.rs @@ -0,0 +1,347 @@ +use core::{cmp, marker::PhantomData, ops::Bound}; + +use dbutils::{ + equivalentor::{ + Comparator, Equivalentor, QueryComparator, QueryEquivalentor, TypeRefComparator, + TypeRefEquivalentor, TypeRefQueryComparator, TypeRefQueryEquivalentor, + }, + types::Type, +}; +use triomphe::Arc; + +use crate::types::{ + fetch_raw_range_deletion_entry, fetch_raw_range_key_start_bound, fetch_raw_range_update_entry, + Query, RawRangeRemoveRef, RawRangeUpdateRef, RecordPointer, RefQuery, +}; + +use super::ty_ref; + +pub struct MemtableRangeComparator +where + K: ?Sized, + C: ?Sized, +{ + /// The start pointer of the parent ARENA. + ptr: *const u8, + cmp: Arc, + _k: PhantomData, +} + +unsafe impl Send for MemtableRangeComparator {} +unsafe impl Sync for MemtableRangeComparator {} + +impl crate::types::sealed::ComparatorConstructor for MemtableRangeComparator +where + K: ?Sized, + C: ?Sized, +{ + #[inline] + fn new(ptr: *const u8, cmp: Arc) -> Self { + Self { + ptr, + cmp, + _k: PhantomData, + } + } +} + +impl crate::types::sealed::RangeComparator + for MemtableRangeComparator +{ + fn fetch_range_update<'a>(&self, kp: &RecordPointer) -> RawRangeUpdateRef<'a> { + unsafe { fetch_raw_range_update_entry(self.ptr, kp) } + } + + fn fetch_range_deletion<'a>(&self, kp: &RecordPointer) -> RawRangeRemoveRef<'a> { + unsafe { fetch_raw_range_deletion_entry(self.ptr, kp) } + } +} + +impl MemtableRangeComparator +where + K: ?Sized, + C: ?Sized, +{ + #[inline] + fn equivalent_start_key<'a, Q>(&self, a: &RecordPointer, b: &Q) -> bool + where + C: TypeRefQueryEquivalentor<'a, K, Q>, + K: Type, + Q: ?Sized, + { + unsafe { + let ak = fetch_raw_range_key_start_bound(self.ptr, a).map(|k| ty_ref::(k)); + match ak { + Bound::Included(k) => self.cmp.query_equivalent_ref(&k, b), + Bound::Excluded(k) => self.cmp.query_equivalent_ref(&k, b), + Bound::Unbounded => false, + } + } + } + + #[inline] + fn equivalent_start_key_with_ref<'a>(&self, a: &RecordPointer, b: &K::Ref<'a>) -> bool + where + C: TypeRefEquivalentor<'a, K>, + K: Type, + { + unsafe { + let ak = fetch_raw_range_key_start_bound(self.ptr, a).map(|k| ty_ref::(k)); + match &ak { + Bound::Included(k) => self.cmp.equivalent_refs(k, b), + Bound::Excluded(k) => self.cmp.equivalent_refs(k, b), + Bound::Unbounded => false, + } + } + } + + #[inline] + fn equivalent_in<'a>(&self, a: &RecordPointer, b: &RecordPointer) -> bool + where + C: TypeRefEquivalentor<'a, K>, + K: Type, + { + unsafe { + let ak = fetch_raw_range_key_start_bound(self.ptr, a).map(|k| ty_ref::(k)); + let bk = fetch_raw_range_key_start_bound(self.ptr, b).map(|k| ty_ref::(k)); + + match (&ak, &bk) { + (Bound::Unbounded, Bound::Unbounded) => true, + (Bound::Included(_), Bound::Unbounded) => false, + (Bound::Excluded(_), Bound::Unbounded) => false, + (Bound::Unbounded, Bound::Included(_)) => false, + (Bound::Unbounded, Bound::Excluded(_)) => false, + + (Bound::Included(a), Bound::Included(b)) => self.cmp.equivalent_refs(a, b), + (Bound::Included(a), Bound::Excluded(b)) => self.cmp.equivalent_refs(a, b), + (Bound::Excluded(a), Bound::Included(b)) => self.cmp.equivalent_refs(a, b), + (Bound::Excluded(a), Bound::Excluded(b)) => self.cmp.equivalent_refs(a, b), + } + } + } + + #[inline] + fn compare_start_key<'a, Q>(&self, a: &RecordPointer, b: &Q) -> cmp::Ordering + where + C: TypeRefQueryComparator<'a, K, Q>, + K: Type, + Q: ?Sized, + { + unsafe { + let ak = fetch_raw_range_key_start_bound(self.ptr, a).map(|k| ty_ref::(k)); + match &ak { + Bound::Included(k) => self.cmp.query_compare_ref(k, b), + Bound::Excluded(k) => self + .cmp + .query_compare_ref(k, b) + .then(cmp::Ordering::Greater), + Bound::Unbounded => cmp::Ordering::Less, + } + } + } + + #[inline] + fn compare_start_key_with_ref<'a>(&self, a: &RecordPointer, b: &K::Ref<'a>) -> cmp::Ordering + where + C: TypeRefComparator<'a, K>, + K: Type, + { + unsafe { + let ak = fetch_raw_range_key_start_bound(self.ptr, a).map(|k| ty_ref::(k)); + match &ak { + Bound::Included(k) => self.cmp.compare_refs(k, b), + Bound::Excluded(k) => self.cmp.compare_refs(k, b).then(cmp::Ordering::Greater), + Bound::Unbounded => cmp::Ordering::Less, + } + } + } + + #[inline] + fn compare_in<'a>(&self, a: &RecordPointer, b: &RecordPointer) -> cmp::Ordering + where + C: TypeRefComparator<'a, K>, + K: Type, + { + unsafe { + let ak = fetch_raw_range_key_start_bound(self.ptr, a).map(|k| ty_ref::(k)); + let bk = fetch_raw_range_key_start_bound(self.ptr, b).map(|k| ty_ref::(k)); + + match (&ak, &bk) { + (Bound::Included(_), Bound::Unbounded) => cmp::Ordering::Greater, + (Bound::Excluded(_), Bound::Unbounded) => cmp::Ordering::Greater, + (Bound::Unbounded, Bound::Included(_)) => cmp::Ordering::Less, + (Bound::Unbounded, Bound::Excluded(_)) => cmp::Ordering::Less, + (Bound::Unbounded, Bound::Unbounded) => cmp::Ordering::Equal, + + (Bound::Included(a), Bound::Included(b)) => self.cmp.compare_refs(a, b), + (Bound::Included(a), Bound::Excluded(b)) => self.cmp.compare_refs(a, b), + (Bound::Excluded(a), Bound::Included(b)) => self.cmp.compare_refs(a, b), + (Bound::Excluded(a), Bound::Excluded(b)) => self.cmp.compare_refs(a, b), + } + } + } +} + +impl Clone for MemtableRangeComparator +where + K: ?Sized, + C: ?Sized, +{ + #[inline] + fn clone(&self) -> Self { + Self { + ptr: self.ptr, + cmp: self.cmp.clone(), + _k: PhantomData, + } + } +} + +impl core::fmt::Debug for MemtableRangeComparator +where + C: core::fmt::Debug + ?Sized, + K: ?Sized, +{ + #[inline] + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.debug_struct("MemtableRangeComparator") + .field("ptr", &self.ptr) + .field("cmp", &self.cmp) + .finish() + } +} + +impl<'a, K, C> Equivalentor for MemtableRangeComparator +where + C: TypeRefEquivalentor<'a, K> + ?Sized, + K: Type + ?Sized, +{ + #[inline] + fn equivalent(&self, a: &RecordPointer, b: &RecordPointer) -> bool { + self.equivalent_in(a, b) + } +} + +impl<'a, K, C> TypeRefEquivalentor<'a, RecordPointer> for MemtableRangeComparator +where + C: TypeRefEquivalentor<'a, K> + ?Sized, + K: Type + ?Sized, +{ + #[inline] + fn equivalent_ref(&self, a: &RecordPointer, b: &RecordPointer) -> bool { + self.equivalent_in(a, b) + } + + #[inline] + fn equivalent_refs(&self, a: &RecordPointer, b: &RecordPointer) -> bool { + self.equivalent_in(a, b) + } +} + +impl<'a, K, C> Comparator for MemtableRangeComparator +where + C: TypeRefComparator<'a, K> + ?Sized, + K: Type + ?Sized, +{ + #[inline] + fn compare(&self, a: &RecordPointer, b: &RecordPointer) -> cmp::Ordering { + self.compare_in(a, b) + } +} + +impl<'a, K, C> TypeRefComparator<'a, RecordPointer> for MemtableRangeComparator +where + C: TypeRefComparator<'a, K> + ?Sized, + K: Type + ?Sized, +{ + #[inline] + fn compare_ref(&self, a: &RecordPointer, b: &RecordPointer) -> cmp::Ordering { + self.compare_in(a, b) + } + + fn compare_refs(&self, a: &RecordPointer, b: &RecordPointer) -> cmp::Ordering { + self.compare_in(a, b) + } +} + +impl<'a, K, Q, C> TypeRefQueryEquivalentor<'a, RecordPointer, Query> + for MemtableRangeComparator +where + C: TypeRefQueryEquivalentor<'a, K, Q> + ?Sized, + Q: ?Sized, + K: Type + ?Sized, +{ + #[inline] + fn query_equivalent_ref(&self, a: &RecordPointer, b: &Query) -> bool { + self.equivalent_start_key(a, &b.0) + } +} + +impl<'a, K, Q, C> TypeRefQueryComparator<'a, RecordPointer, Query> + for MemtableRangeComparator +where + C: TypeRefQueryComparator<'a, K, Q> + ?Sized, + Q: ?Sized, + K: Type + ?Sized, +{ + #[inline] + fn query_compare_ref(&self, a: &RecordPointer, b: &Query) -> cmp::Ordering { + self.compare_start_key(a, &b.0) + } +} + +impl<'a, K, C> TypeRefQueryEquivalentor<'a, RecordPointer, RefQuery>> + for MemtableRangeComparator +where + C: TypeRefEquivalentor<'a, K> + ?Sized, + K: Type + ?Sized, +{ + #[inline] + fn query_equivalent_ref(&self, a: &RecordPointer, b: &RefQuery>) -> bool { + self.equivalent_start_key_with_ref(a, &b.query) + } +} + +impl<'a, K, C> TypeRefQueryComparator<'a, RecordPointer, RefQuery>> + for MemtableRangeComparator +where + C: TypeRefComparator<'a, K> + ?Sized, + K: Type + ?Sized, +{ + #[inline] + fn query_compare_ref( + &self, + a: &::Ref<'_>, + b: &RefQuery>, + ) -> cmp::Ordering { + self.compare_start_key_with_ref(a, &b.query) + } +} + +impl<'a, K, C> QueryEquivalentor>> + for MemtableRangeComparator +where + C: TypeRefEquivalentor<'a, K> + ?Sized, + K: Type + ?Sized, +{ + #[inline] + fn query_equivalent(&self, a: &RecordPointer, b: &RefQuery>) -> bool { + self.equivalent_start_key_with_ref(a, &b.query) + } +} + +impl<'a, K, C> QueryComparator>> + for MemtableRangeComparator +where + C: TypeRefComparator<'a, K> + ?Sized, + K: Type + ?Sized, +{ + #[inline] + fn query_compare( + &self, + a: &::Ref<'_>, + b: &RefQuery>, + ) -> cmp::Ordering { + self.compare_start_key_with_ref(a, &b.query) + } +} diff --git a/src/memtable/generic/unbounded.rs b/src/memtable/generic/unbounded.rs new file mode 100644 index 00000000..36e3d7b7 --- /dev/null +++ b/src/memtable/generic/unbounded.rs @@ -0,0 +1,415 @@ +use core::ops::{Bound, ControlFlow, RangeBounds}; + +use dbutils::{ + equivalentor::{TypeRefComparator, TypeRefQueryComparator}, + state::{Active, MaybeTombstone, State}, + types::Type, +}; +use ref_cast::RefCast; + +use crate::{ + memtable::unbounded, + types::{BulkOperation, Generic, Query, Remove, Update}, +}; + +use super::GenericMemtable; + +/// Generic multiple version memtable implementation based on ARNEA based [`SkipMap`](crossbeam_skiplist_mvcc::nested::SkipMap)s. +pub type Table = unbounded::Table>; + +/// Entry of the [`Table`]. +pub type EntryRef<'a, K, V, S, C> = unbounded::EntryRef<'a, S, C, Generic>; + +/// Point entry of the [`Table`]. +pub type PointEntryRef<'a, K, V, S, C> = unbounded::PointEntryRef<'a, S, C, Generic>; + +/// Range entry of the [`Table`]. +pub type RangeEntryRef<'a, K, V, S, O, C> = unbounded::RangeEntryRef<'a, S, O, C, Generic>; + +/// Iterator of the [`Table`]. +pub type Iter<'a, K, V, S, C> = unbounded::Iter<'a, S, C, Generic>; + +/// Range iterator of the [`Table`]. +pub type Range<'a, K, V, S, Q, R, C> = unbounded::Range<'a, S, Q, R, C, Generic>; + +/// Point iterator of the [`Table`]. +pub type IterPoints<'a, K, V, S, C> = unbounded::IterPoints<'a, S, C, Generic>; + +/// Range point iterator of the [`Table`]. +pub type RangePoints<'a, K, V, S, Q, R, C> = unbounded::RangePoints<'a, S, Q, R, C, Generic>; + +/// Bulk operations iterator of the [`Table`]. +pub type IterBulkOperations<'a, K, V, S, O, C> = + unbounded::IterBulkOperations<'a, S, O, C, Generic>; + +/// Bulk operations range iterator of the [`Table`]. +pub type RangeBulkOperations<'a, K, V, S, O, Q, R, C> = + unbounded::RangeBulkOperations<'a, S, O, Q, R, C, Generic>; + +impl GenericMemtable for Table +where + K: Type + ?Sized + 'static, + V: Type + ?Sized + 'static, + C: 'static, +{ + type Comparator = C; + + type Entry<'a, S> + = EntryRef<'a, K, V, S, C> + where + Self: 'a, + S: State + 'a; + + type PointEntry<'a, S> + = PointEntryRef<'a, K, V, S, C> + where + Self: 'a, + S: State + 'a; + + type RangeEntry<'a, S, O> + = RangeEntryRef<'a, K, V, S, O, C> + where + Self: 'a, + S: State + 'a, + O: BulkOperation; + + type Iterator<'a, S> + = Iter<'a, K, V, S, C> + where + Self: 'a, + S: State + 'a; + + type Range<'a, S, Q, R> + = Range<'a, K, V, S, Q, R, C> + where + Self: 'a, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + R: RangeBounds + 'a, + Q: ?Sized, + S: State + 'a; + + type PointsIterator<'a, S> + = IterPoints<'a, K, V, S, C> + where + Self: 'a, + S: State + 'a; + + type RangePoints<'a, S, Q, R> + = RangePoints<'a, K, V, S, Q, R, C> + where + Self: 'a, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + S: State + 'a, + R: RangeBounds + 'a, + Q: ?Sized; + + type BulkOperationsIterator<'a, S, O> + = IterBulkOperations<'a, K, V, S, O, C> + where + Self: 'a, + S: State + 'a, + O: crate::types::BulkOperation; + + type BulkOperationsRange<'a, S, O, Q, R> + = RangeBulkOperations<'a, K, V, S, O, Q, R, C> + where + Self: 'a, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + S: State + 'a, + O: crate::types::BulkOperation, + R: RangeBounds + 'a, + Q: ?Sized; + + #[inline] + fn maximum_version(&self) -> u64 { + self + .skl + .maximum_version() + .max(self.range_deletions_skl.maximum_version()) + .max(self.range_updates_skl.maximum_version()) + } + + #[inline] + fn minimum_version(&self) -> u64 { + self + .skl + .minimum_version() + .min(self.range_deletions_skl.minimum_version()) + .min(self.range_updates_skl.minimum_version()) + } + + #[inline] + fn may_contain_version(&self, version: u64) -> bool { + self.skl.may_contain_version(version) + || self.range_deletions_skl.may_contain_version(version) + || self.range_updates_skl.may_contain_version(version) + } + + #[inline] + fn upper_bound<'a, Q>( + &'a self, + version: u64, + bound: core::ops::Bound<&'a Q>, + ) -> Option> + where + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self + .range::(version, (Bound::Unbounded, bound)) + .next_back() + } + + #[inline] + fn lower_bound<'a, Q>( + &'a self, + version: u64, + bound: core::ops::Bound<&'a Q>, + ) -> Option> + where + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self + .range::(version, (bound, Bound::Unbounded)) + .next() + } + + #[inline] + fn upper_bound_with_tombstone<'a, Q>( + &'a self, + version: u64, + bound: Bound<&'a Q>, + ) -> Option> + where + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self + .range_all::(version, (Bound::Unbounded, bound)) + .next_back() + } + + #[inline] + fn lower_bound_with_tombstone<'a, Q>( + &'a self, + version: u64, + bound: Bound<&'a Q>, + ) -> Option> + where + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + self + .range_all::(version, (bound, Bound::Unbounded)) + .next() + } + + #[inline] + fn first<'a>(&'a self, version: u64) -> Option> + where + Self::Comparator: TypeRefComparator<'a, K>, + { + self.iter(version).next() + } + + #[inline] + fn last<'a>(&'a self, version: u64) -> Option> + where + Self::Comparator: TypeRefComparator<'a, K>, + { + self.iter(version).next_back() + } + + #[inline] + fn first_with_tombstone<'a>(&'a self, version: u64) -> Option> + where + Self::Comparator: TypeRefComparator<'a, K>, + { + self.iter_all(version).next() + } + + #[inline] + fn last_with_tombstone<'a>(&'a self, version: u64) -> Option> + where + Self::Comparator: TypeRefComparator<'a, K>, + { + self.iter_all(version).next_back() + } + + #[inline] + fn get<'a, Q>(&'a self, version: u64, key: &Q) -> Option> + where + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + let ent = self.skl.get(version, Query::ref_cast(key))?; + match self.validate(version, PointEntryRef::new(ent)) { + ControlFlow::Break(entry) => entry, + ControlFlow::Continue(_) => None, + } + } + + #[inline] + fn get_with_tombstone<'a, Q>( + &'a self, + version: u64, + key: &Q, + ) -> Option> + where + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + let ent = self.skl.get_with_tombstone(version, Query::ref_cast(key))?; + match self.validate(version, PointEntryRef::new(ent)) { + ControlFlow::Break(entry) => entry, + ControlFlow::Continue(_) => None, + } + } + + #[inline] + fn iter(&self, version: u64) -> Self::Iterator<'_, Active> { + Iter::new(version, self) + } + + #[inline] + fn iter_all(&self, version: u64) -> Self::Iterator<'_, MaybeTombstone> { + Iter::with_tombstone(version, self) + } + + #[inline] + fn range<'a, Q, R>(&'a self, version: u64, range: R) -> Self::Range<'a, Active, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + Range::new(version, self, range) + } + + #[inline] + fn range_all<'a, Q, R>(&'a self, version: u64, range: R) -> Self::Range<'a, MaybeTombstone, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + Range::with_tombstone(version, self, range) + } + + #[inline] + fn iter_points(&self, version: u64) -> Self::PointsIterator<'_, Active> { + IterPoints::new(self.skl.iter(version)) + } + + #[inline] + fn iter_all_points(&self, version: u64) -> Self::PointsIterator<'_, MaybeTombstone> { + IterPoints::new(self.skl.iter_all(version)) + } + + #[inline] + fn range_points<'a, Q, R>(&'a self, version: u64, range: R) -> Self::RangePoints<'a, Active, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + RangePoints::new(self.skl.range(version, range.into())) + } + + #[inline] + fn range_all_points<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::RangePoints<'a, MaybeTombstone, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + RangePoints::new(self.skl.range_all(version, range.into())) + } + + #[inline] + fn iter_bulk_removes(&self, version: u64) -> Self::BulkOperationsIterator<'_, Active, Remove> { + IterBulkOperations::new(self.range_deletions_skl.iter(version)) + } + + #[inline] + fn iter_all_bulk_removes( + &self, + version: u64, + ) -> Self::BulkOperationsIterator<'_, MaybeTombstone, Remove> { + IterBulkOperations::new(self.range_deletions_skl.iter_all(version)) + } + + #[inline] + fn range_bulk_removes<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::BulkOperationsRange<'a, Active, Remove, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + RangeBulkOperations::new(self.range_deletions_skl.range(version, range.into())) + } + + #[inline] + fn range_all_bulk_removes<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::BulkOperationsRange<'a, MaybeTombstone, Remove, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + RangeBulkOperations::new(self.range_deletions_skl.range_all(version, range.into())) + } + + #[inline] + fn iter_bulk_updates(&self, version: u64) -> Self::BulkOperationsIterator<'_, Active, Update> { + IterBulkOperations::new(self.range_updates_skl.iter(version)) + } + + #[inline] + fn iter_all_bulk_updates( + &self, + version: u64, + ) -> Self::BulkOperationsIterator<'_, MaybeTombstone, Update> { + IterBulkOperations::new(self.range_updates_skl.iter_all(version)) + } + + #[inline] + fn range_bulk_updates<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::BulkOperationsRange<'a, Active, Update, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + RangeBulkOperations::new(self.range_updates_skl.range(version, range.into())) + } + + #[inline] + fn range_all_bulk_updates<'a, Q, R>( + &'a self, + version: u64, + range: R, + ) -> Self::BulkOperationsRange<'a, MaybeTombstone, Update, Q, R> + where + R: RangeBounds + 'a, + Q: ?Sized, + Self::Comparator: TypeRefQueryComparator<'a, K, Q>, + { + RangeBulkOperations::new(self.range_updates_skl.range_all(version, range.into())) + } +} diff --git a/src/memtable/linked.rs b/src/memtable/linked.rs deleted file mode 100644 index f14906f5..00000000 --- a/src/memtable/linked.rs +++ /dev/null @@ -1,7 +0,0 @@ -/// The multiple version memtable implementation. -pub mod multiple_version; -/// The memtable implementation. -pub mod table; - -pub use multiple_version::MultipleVersionTable; -pub use table::Table; diff --git a/src/memtable/linked/multiple_version.rs b/src/memtable/linked/multiple_version.rs deleted file mode 100644 index 94752328..00000000 --- a/src/memtable/linked/multiple_version.rs +++ /dev/null @@ -1,350 +0,0 @@ -use core::{ - convert::Infallible, - ops::{Bound, RangeBounds}, -}; - -use crossbeam_skiplist_mvcc::nested::SkipMap; -pub use crossbeam_skiplist_mvcc::nested::{Entry, Iter, IterAll, Range, RangeAll, VersionedEntry}; - -use dbutils::{ - equivalent::Comparable, - types::{KeyRef, Type}, -}; - -use crate::{ - memtable::{self, BaseEntry, VersionedMemtableEntry}, - sealed::WithVersion, - types::Kind, - wal::{KeyPointer, ValuePointer}, -}; - -/// An memory table implementation based on [`crossbeam_skiplist::SkipSet`]. -pub struct MultipleVersionTable(SkipMap, ValuePointer>); - -impl Default for MultipleVersionTable -where - K: ?Sized, - V: ?Sized, -{ - #[inline] - fn default() -> Self { - Self(SkipMap::new()) - } -} - -impl<'a, K, V> BaseEntry<'a> for Entry<'a, KeyPointer, ValuePointer> -where - K: ?Sized + Type + Ord, - K::Ref<'a>: KeyRef<'a, K>, - V: ?Sized, -{ - type Key = K; - type Value = V; - - #[inline] - fn next(&mut self) -> Option { - Entry::next(self) - } - - #[inline] - fn prev(&mut self) -> Option { - Entry::prev(self) - } - - #[inline] - fn key(&self) -> KeyPointer { - *self.key() - } -} - -impl<'a, K, V> memtable::VersionedMemtableEntry<'a> for Entry<'a, KeyPointer, ValuePointer> -where - K: ?Sized + Type + Ord, - K::Ref<'a>: KeyRef<'a, K>, - V: ?Sized, -{ - #[inline] - fn value(&self) -> Option> { - Some(*self.value()) - } - - #[inline] - fn version(&self) -> u64 { - Entry::version(self) - } -} - -impl WithVersion for Entry<'_, KeyPointer, ValuePointer> -where - K: ?Sized, - V: ?Sized, -{ -} - -impl<'a, K, V> BaseEntry<'a> for VersionedEntry<'a, KeyPointer, ValuePointer> -where - K: ?Sized + Type + Ord, - K::Ref<'a>: KeyRef<'a, K>, - V: ?Sized, -{ - type Key = K; - type Value = V; - - #[inline] - fn next(&mut self) -> Option { - VersionedEntry::next(self) - } - - #[inline] - fn prev(&mut self) -> Option { - VersionedEntry::prev(self) - } - - #[inline] - fn key(&self) -> KeyPointer { - *self.key() - } -} - -impl<'a, K, V> VersionedMemtableEntry<'a> for VersionedEntry<'a, KeyPointer, ValuePointer> -where - K: ?Sized + Type + Ord, - K::Ref<'a>: KeyRef<'a, K>, - V: ?Sized, -{ - #[inline] - fn version(&self) -> u64 { - VersionedEntry::version(self) - } - - #[inline] - fn value(&self) -> Option> { - self.value().copied() - } -} - -impl WithVersion for VersionedEntry<'_, KeyPointer, ValuePointer> -where - K: ?Sized, - V: ?Sized, -{ -} - -impl memtable::BaseTable for MultipleVersionTable -where - K: ?Sized + Type + Ord + 'static, - for<'a> K::Ref<'a>: KeyRef<'a, K>, - V: ?Sized + 'static, -{ - type Key = K; - type Value = V; - type Item<'a> - = Entry<'a, KeyPointer, ValuePointer> - where - Self: 'a; - - type Iterator<'a> - = Iter<'a, KeyPointer, ValuePointer> - where - Self: 'a; - - type Range<'a, Q, R> - = Range<'a, Q, R, KeyPointer, ValuePointer> - where - Self: 'a, - R: RangeBounds + 'a, - Q: ?Sized + Comparable>; - - type Options = (); - type Error = Infallible; - - fn new(_: Self::Options) -> Result - where - Self: Sized, - { - Ok(Self(SkipMap::new())) - } - - #[inline] - fn insert( - &self, - version: Option, - kp: KeyPointer, - vp: ValuePointer, - ) -> Result<(), Self::Error> - where - KeyPointer: Ord + 'static, - { - self.0.insert_unchecked(version.unwrap_or(0), kp, vp); - Ok(()) - } - - #[inline] - fn remove(&self, version: Option, key: KeyPointer) -> Result<(), Self::Error> - where - KeyPointer: Ord + 'static, - { - self.0.remove_unchecked(version.unwrap_or(0), key); - Ok(()) - } - - #[inline] - fn kind() -> Kind { - Kind::MultipleVersion - } -} - -impl memtable::MultipleVersionMemtable for MultipleVersionTable -where - K: ?Sized + Type + Ord + 'static, - for<'a> K::Ref<'a>: KeyRef<'a, K>, - V: ?Sized + 'static, -{ - type VersionedItem<'a> - = VersionedEntry<'a, KeyPointer, ValuePointer> - where - Self: 'a; - - type IterAll<'a> - = IterAll<'a, KeyPointer, ValuePointer> - where - Self: 'a; - - type RangeAll<'a, Q, R> - = RangeAll<'a, Q, R, KeyPointer, ValuePointer> - where - Self: 'a, - R: RangeBounds + 'a, - Q: ?Sized + Comparable>; - - #[inline] - fn maximum_version(&self) -> u64 { - self.0.maximum_version() - } - - #[inline] - fn minimum_version(&self) -> u64 { - self.0.minimum_version() - } - - #[inline] - fn may_contain_version(&self, version: u64) -> bool { - self.0.may_contain_version(version) - } - - fn upper_bound(&self, version: u64, bound: Bound<&Q>) -> Option> - where - Q: ?Sized + Comparable>, - { - self.0.upper_bound(version, bound) - } - - fn upper_bound_versioned( - &self, - version: u64, - bound: Bound<&Q>, - ) -> Option> - where - Q: ?Sized + Comparable>, - { - self.0.upper_bound_versioned(version, bound) - } - - fn lower_bound(&self, version: u64, bound: Bound<&Q>) -> Option> - where - Q: ?Sized + Comparable>, - { - self.0.lower_bound(version, bound) - } - - fn lower_bound_versioned( - &self, - version: u64, - bound: Bound<&Q>, - ) -> Option> - where - Q: ?Sized + Comparable>, - { - self.0.lower_bound_versioned(version, bound) - } - - fn first(&self, version: u64) -> Option> - where - KeyPointer: Ord, - { - self.0.front(version) - } - - fn first_versioned(&self, version: u64) -> Option> - where - KeyPointer: Ord, - { - self.0.front_versioned(version) - } - - fn last(&self, version: u64) -> Option> - where - KeyPointer: Ord, - { - self.0.back(version) - } - - fn last_versioned(&self, version: u64) -> Option> - where - KeyPointer: Ord, - { - self.0.back_versioned(version) - } - - fn get(&self, version: u64, key: &Q) -> Option> - where - Q: ?Sized + Comparable>, - { - self.0.get(version, key) - } - - fn get_versioned(&self, version: u64, key: &Q) -> Option> - where - Q: ?Sized + Comparable>, - { - self.0.get_versioned(version, key) - } - - fn contains(&self, version: u64, key: &Q) -> bool - where - Q: ?Sized + Comparable>, - { - self.0.contains_key(version, key) - } - - fn contains_versioned(&self, version: u64, key: &Q) -> bool - where - Q: ?Sized + Comparable>, - { - self.0.contains_key_versioned(version, key) - } - - fn iter(&self, version: u64) -> Self::Iterator<'_> { - self.0.iter(version) - } - - fn iter_all_versions(&self, version: u64) -> Self::IterAll<'_> { - self.0.iter_all_versions(version) - } - - fn range<'a, Q, R>(&'a self, version: u64, range: R) -> Self::Range<'a, Q, R> - where - R: RangeBounds + 'a, - Q: ?Sized + Comparable>, - { - self.0.range(version, range) - } - - fn range_all_versions<'a, Q, R>(&'a self, version: u64, range: R) -> Self::RangeAll<'a, Q, R> - where - R: RangeBounds + 'a, - Q: ?Sized + Comparable>, - { - self.0.range_all_versions(version, range) - } -} diff --git a/src/memtable/linked/table.rs b/src/memtable/linked/table.rs deleted file mode 100644 index 51acc367..00000000 --- a/src/memtable/linked/table.rs +++ /dev/null @@ -1,213 +0,0 @@ -use core::{convert::Infallible, ops::RangeBounds}; - -use crossbeam_skiplist::SkipMap; -use dbutils::{ - equivalent::Comparable, - types::{KeyRef, Type}, -}; - -use crate::{ - memtable, - sealed::WithoutVersion, - types::Kind, - wal::{KeyPointer, ValuePointer}, -}; - -pub use crossbeam_skiplist::map::{Entry, Iter, Range}; - -/// An memory table implementation based on [`crossbeam_skiplist::SkipMap`]. -pub struct Table(SkipMap, ValuePointer>); - -impl core::fmt::Debug for Table -where - K: ?Sized + Type + Ord, - for<'a> K::Ref<'a>: KeyRef<'a, K>, - V: ?Sized, -{ - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.debug_tuple("Table").field(&self.0).finish() - } -} - -impl Default for Table { - #[inline] - fn default() -> Self { - Self(SkipMap::new()) - } -} - -impl<'a, K, V> memtable::BaseEntry<'a> for Entry<'a, KeyPointer, ValuePointer> -where - K: ?Sized + Type + Ord, - K::Ref<'a>: KeyRef<'a, K>, - V: ?Sized + Type, -{ - type Key = K; - type Value = V; - - #[inline] - fn next(&mut self) -> Option { - Entry::next(self) - } - - #[inline] - fn prev(&mut self) -> Option { - Entry::prev(self) - } - - #[inline] - fn key(&self) -> KeyPointer { - *self.key() - } -} - -impl<'a, K, V> memtable::MemtableEntry<'a> for Entry<'a, KeyPointer, ValuePointer> -where - K: ?Sized + Type + Ord, - K::Ref<'a>: KeyRef<'a, K>, - V: ?Sized + Type, -{ - #[inline] - fn value(&self) -> ValuePointer { - *self.value() - } -} - -impl WithoutVersion for Entry<'_, KeyPointer, ValuePointer> -where - K: ?Sized, - V: ?Sized, -{ -} - -impl memtable::BaseTable for Table -where - K: ?Sized + Type + Ord, - for<'a> K::Ref<'a>: KeyRef<'a, K>, - V: ?Sized + Type + 'static, -{ - type Key = K; - type Value = V; - type Item<'a> - = Entry<'a, KeyPointer, ValuePointer> - where - Self: 'a; - - type Iterator<'a> - = Iter<'a, KeyPointer, ValuePointer> - where - Self: 'a; - - type Range<'a, Q, R> - = Range<'a, Q, R, KeyPointer, ValuePointer> - where - Self: 'a, - R: RangeBounds + 'a, - Q: ?Sized + Comparable>; - - type Options = (); - type Error = Infallible; - - fn new(_: Self::Options) -> Result - where - Self: Sized, - { - Ok(Self(SkipMap::new())) - } - - #[inline] - fn insert( - &self, - _: Option, - kp: KeyPointer, - vp: ValuePointer, - ) -> Result<(), Self::Error> - where - KeyPointer: Ord + 'static, - { - self.0.insert(kp, vp); - Ok(()) - } - - #[inline] - fn remove(&self, _: Option, key: KeyPointer) -> Result<(), Self::Error> - where - KeyPointer: Ord + 'static, - { - self.0.remove(&key); - Ok(()) - } - - #[inline] - fn kind() -> Kind { - Kind::Plain - } -} - -impl memtable::Memtable for Table -where - K: ?Sized + Type + Ord + 'static, - for<'a> K::Ref<'a>: KeyRef<'a, K>, - V: ?Sized + Type + 'static, -{ - #[inline] - fn len(&self) -> usize { - self.0.len() - } - - #[inline] - fn upper_bound(&self, bound: core::ops::Bound<&Q>) -> Option> - where - Q: ?Sized + Comparable>, - { - self.0.upper_bound(bound) - } - - #[inline] - fn lower_bound(&self, bound: core::ops::Bound<&Q>) -> Option> - where - Q: ?Sized + Comparable>, - { - self.0.lower_bound(bound) - } - - #[inline] - fn first(&self) -> Option> { - self.0.front() - } - - #[inline] - fn last(&self) -> Option> { - self.0.back() - } - - #[inline] - fn get(&self, key: &Q) -> Option> - where - Q: ?Sized + Comparable>, - { - self.0.get(key) - } - - #[inline] - fn contains(&self, key: &Q) -> bool - where - Q: ?Sized + Comparable>, - { - self.0.contains_key(key) - } - - #[inline] - fn iter(&self) -> Self::Iterator<'_> { - self.0.iter() - } - - #[inline] - fn range<'a, Q, R>(&'a self, range: R) -> Self::Range<'a, Q, R> - where - R: RangeBounds + 'a, - Q: ?Sized + Comparable>, - { - self.0.range(range) - } -} diff --git a/src/memtable/unbounded.rs b/src/memtable/unbounded.rs new file mode 100644 index 00000000..e01f44e2 --- /dev/null +++ b/src/memtable/unbounded.rs @@ -0,0 +1,230 @@ +use core::{ + convert::Infallible, + ops::ControlFlow, + sync::atomic::{AtomicUsize, Ordering}, +}; + +use crossbeam_skiplist_mvcc::nested::SkipMap; +use dbutils::{ + equivalentor::{Comparator, QueryComparator}, + state::{Active, MaybeTombstone, State}, +}; +use ref_cast::RefCast; +use triomphe::Arc; + +use crate::types::{ + sealed::{ComparatorConstructor, PointComparator, Pointee, RangeComparator}, + Mode, Query, RecordPointer, RefQuery, Remove, Update, +}; + +use super::{sealed, Entry, Memtable, MutableMemtable, RangeEntry, RangeEntryExt, Transfer}; + +pub use entry::*; +pub use iter::*; +pub use point::*; +pub use range_entry::*; + +mod entry; +mod iter; +mod point; +mod range_entry; + +/// A memory table implementation based on ARENA [`SkipMap`](crossbeam_skiplist_mvcc::nested::SkipMap). +pub struct Table +where + T: Mode, +{ + pub(in crate::memtable) skl: SkipMap>, + pub(in crate::memtable) range_deletions_skl: + SkipMap>, + pub(in crate::memtable) range_updates_skl: + SkipMap>, + len: AtomicUsize, +} + +impl Memtable for Table +where + C: 'static, + T: Mode, + T::Comparator: 'static, + T::RangeComparator: 'static, +{ + type Options = C; + type Error = Infallible; + + #[inline] + fn new(arena: A, opts: Self::Options) -> Result + where + Self: Sized, + A: rarena_allocator::Allocator, + { + let cmp = Arc::new(opts); + let ptr = arena.raw_ptr(); + let points_cmp = as ComparatorConstructor<_>>::new(ptr, cmp.clone()); + let range_del_cmp = as ComparatorConstructor<_>>::new(ptr, cmp.clone()); + let range_update_cmp = + as ComparatorConstructor<_>>::new(ptr, cmp.clone()); + + Ok(Self { + skl: SkipMap::with_comparator(points_cmp), + range_deletions_skl: SkipMap::with_comparator(range_del_cmp), + range_updates_skl: SkipMap::with_comparator(range_update_cmp), + len: AtomicUsize::new(0), + }) + } + + #[inline] + fn len(&self) -> usize { + self.len.load(Ordering::Acquire) + } +} + +impl MutableMemtable for Table +where + C: 'static, + T: Mode, + T::Comparator: Comparator + Send + 'static, + T::RangeComparator: Comparator + Send + 'static, +{ + #[inline] + fn insert(&self, version: u64, pointer: RecordPointer) -> Result<(), Self::Error> { + self.skl.insert_unchecked(version, pointer, pointer); + self.len.fetch_add(1, Ordering::Release); + Ok(()) + } + + #[inline] + fn remove(&self, version: u64, key: RecordPointer) -> Result<(), Self::Error> { + self.skl.remove_unchecked(version, key); + self.len.fetch_add(1, Ordering::Release); + Ok(()) + } + + #[inline] + fn range_remove(&self, version: u64, pointer: RecordPointer) -> Result<(), Self::Error> { + self + .range_deletions_skl + .insert_unchecked(version, pointer, pointer); + self.len.fetch_add(1, Ordering::Release); + Ok(()) + } + + #[inline] + fn range_set(&self, version: u64, pointer: RecordPointer) -> Result<(), Self::Error> { + self + .range_updates_skl + .insert_unchecked(version, pointer, pointer); + self.len.fetch_add(1, Ordering::Release); + Ok(()) + } + + #[inline] + fn range_unset(&self, version: u64, key: RecordPointer) -> Result<(), Self::Error> { + self.range_updates_skl.remove_unchecked(version, key); + self.len.fetch_add(1, Ordering::Release); + Ok(()) + } +} + +impl<'a, C, T> Table +where + C: 'static, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]>, + T::Comparator: PointComparator + + Comparator + + Comparator as Pointee<'a>>::Output>> + + 'static, + T::RangeComparator: Comparator + + QueryComparator as Pointee<'a>>::Output>> + + RangeComparator + + 'static, + RangeEntryRef<'a, Active, Remove, C, T>: + RangeEntry<'a, Remove, Key = as Pointee<'a>>::Output>, +{ + pub(in crate::memtable) fn validate( + &'a self, + query_version: u64, + ent: PointEntryRef<'a, S, C, T>, + ) -> ControlFlow>, PointEntryRef<'a, S, C, T>> + where + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, S::Value>: 'a, + PointEntryRef<'a, S, C, T>: Entry<'a, Key = as Pointee<'a>>::Output>, + MaybeTombstone: Transfer<'a, T::Value<'a>>, + RangeEntryRef<'a, MaybeTombstone, Update, C, T>: RangeEntry< + 'a, + Update, + Key = as Pointee<'a>>::Output, + Value = ::Data< + 'a, + >>::Value, + >, + >, + { + let key = ent.key(); + let cmp = ent.ent.comparator(); + let version = ent.ent.version(); + let query = RefQuery::new(key); + let shadow = self + .range_deletions_skl + .range(query_version, ..=&query) + .any(|ent| { + let del_ent_version = ent.version(); + if !(version <= del_ent_version && del_ent_version <= query_version) { + return false; + } + let ent = RangeEntryRef::::new(ent); + dbutils::equivalentor::RangeComparator::contains( + cmp, + &ent.query_range(), + Query::ref_cast(&query.query), + ) + }); + if shadow { + return ControlFlow::Continue(ent); + } + let range_ent = self + .range_updates_skl + .range_all(query_version, ..=&query) + .filter_map(|ent| { + let range_ent_version = ent.version(); + if !(version <= range_ent_version && range_ent_version <= query_version) { + return None; + } + let ent = RangeEntryRef::::new(ent); + if dbutils::equivalentor::RangeComparator::contains( + cmp, + &ent.query_range(), + Query::ref_cast(&query.query), + ) { + Some(ent) + } else { + None + } + }) + .max_by_key(|e| e.version()); + if let Some(range_ent) = range_ent { + let version = range_ent.version(); + if let Some(val) = range_ent.into_value() { + return ControlFlow::Break(Some(EntryRef::new( + self, + query_version, + ent, + key, + Some(S::data(val)), + version, + ))); + } + } + let version = ent.version(); + ControlFlow::Break(Some(EntryRef::new( + self, + query_version, + ent, + key, + None, + version, + ))) + } +} diff --git a/src/memtable/unbounded/entry.rs b/src/memtable/unbounded/entry.rs new file mode 100644 index 00000000..892665df --- /dev/null +++ b/src/memtable/unbounded/entry.rs @@ -0,0 +1,218 @@ +use core::ops::ControlFlow; + +use dbutils::{ + equivalentor::{Comparator, QueryComparator}, + state::{Active, MaybeTombstone, State}, +}; + +use crate::{ + memtable::{sealed, Entry, RangeEntry, RawEntry, Transfer}, + types::{ + sealed::{PointComparator, Pointee, RangeComparator}, + Mode, Query, RecordPointer, RefQuery, Remove, Update, + }, +}; + +use super::{PointEntryRef, RangeEntryRef, Table}; + +/// Entry in the memtable. +pub struct EntryRef<'a, S, C, T> +where + S: State, + T: Mode, +{ + table: &'a Table, + point_ent: PointEntryRef<'a, S, C, T>, + key: as Pointee<'a>>::Output, + val: Option>>, + version: u64, + query_version: u64, +} + +impl<'a, S, C, T> core::fmt::Debug for EntryRef<'a, S, C, T> +where + C: 'static, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, S::Value>: core::fmt::Debug, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]> + 'a, + as Pointee<'a>>::Output: core::fmt::Debug, + T::Comparator: PointComparator + Comparator, + PointEntryRef<'a, S, C, T>: + Entry<'a, Key = as Pointee<'a>>::Output, Value = S::Data<'a, S::Value>>, +{ + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.debug_struct("Entry") + .field("key", &self.key) + .field("value", &self.value_in()) + .field("version", &self.version) + .finish() + } +} + +impl<'a, S, C, T> Clone for EntryRef<'a, S, C, T> +where + S: State, + S::Data<'a, T::Value<'a>>: Clone, + PointEntryRef<'a, S, C, T>: Clone, + T: Mode, + T::Key<'a>: Clone, + T::Value<'a>: Clone, +{ + #[inline] + fn clone(&self) -> Self { + Self { + table: self.table, + point_ent: self.point_ent.clone(), + key: self.key, + val: self.val.clone(), + version: self.version, + query_version: self.query_version, + } + } +} + +impl<'a, S, C, T> RawEntry<'a> for EntryRef<'a, S, C, T> +where + C: 'static, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, &'a [u8]>: 'a, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]> + 'a, + T::Comparator: PointComparator + Comparator, + PointEntryRef<'a, S, C, T>: RawEntry<'a, RawValue = S::Data<'a, &'a [u8]>>, +{ + type RawValue = S::Data<'a, &'a [u8]>; + + #[inline] + fn raw_key(&self) -> &'a [u8] { + self.point_ent.raw_key() + } + + #[inline] + fn raw_value(&self) -> Self::RawValue { + match self.val.as_ref() { + Some(val) => >>::input(val), + None => self.point_ent.raw_value(), + } + } +} + +impl<'a, S, C, T> Entry<'a> for EntryRef<'a, S, C, T> +where + C: 'static, + S: Transfer<'a, T::Value<'a>>, + MaybeTombstone: Transfer<'a, T::Value<'a>>, + S::Data<'a, S::Value>: 'a, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]>, + T::Comparator: PointComparator + + Comparator + + Comparator as Pointee<'a>>::Output>> + + 'static, + T::RangeComparator: Comparator + + QueryComparator as Pointee<'a>>::Output>> + + RangeComparator + + 'static, + PointEntryRef<'a, S, C, T>: + Entry<'a, Key = as Pointee<'a>>::Output, Value = S::Data<'a, S::Value>>, + RangeEntryRef<'a, Active, Remove, C, T>: + RangeEntry<'a, Remove, Key = as Pointee<'a>>::Output>, + RangeEntryRef<'a, MaybeTombstone, Update, C, T>: RangeEntry< + 'a, + Update, + Key = as Pointee<'a>>::Output, + Value = ::Data< + 'a, + >>::Value, + >, + >, +{ + type Key = as Pointee<'a>>::Output; + + type Value = S::Data<'a, S::Value>; + + #[inline] + fn key(&self) -> Self::Key { + self.key + } + + #[inline] + fn value(&self) -> Self::Value { + self.value_in() + } + + #[inline] + fn next(&self) -> Option { + let mut next = self.point_ent.next(); + while let Some(ent) = next { + match self.table.validate(self.query_version, ent) { + ControlFlow::Break(entry) => return entry, + ControlFlow::Continue(ent) => next = ent.next(), + } + } + None + } + + #[inline] + fn prev(&self) -> Option { + let mut prev = self.point_ent.prev(); + while let Some(ent) = prev { + match self.table.validate(self.query_version, ent) { + ControlFlow::Break(entry) => return entry, + ControlFlow::Continue(ent) => prev = ent.prev(), + } + } + None + } + + #[inline] + fn version(&self) -> u64 { + self.version + } +} + +impl<'a, S, C, T> EntryRef<'a, S, C, T> +where + S: State, + T: Mode, +{ + #[inline] + pub(crate) fn new( + table: &'a Table, + query_version: u64, + point_ent: PointEntryRef<'a, S, C, T>, + key: as Pointee<'a>>::Output, + val: Option>>, + version: u64, + ) -> Self { + Self { + table, + point_ent, + key, + val, + version, + query_version, + } + } +} + +impl<'a, S, C, T> EntryRef<'a, S, C, T> +where + C: 'static, + S: State, + S: Transfer<'a, T::Value<'a>>, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]> + 'a, + T::Comparator: PointComparator + Comparator, + PointEntryRef<'a, S, C, T>: + Entry<'a, Key = as Pointee<'a>>::Output, Value = S::Data<'a, S::Value>>, +{ + #[inline] + fn value_in(&self) -> S::Data<'a, S::Value> { + match self.val.as_ref() { + Some(val) => >>::transfer(val), + None => self.point_ent.value(), + } + } +} diff --git a/src/memtable/unbounded/iter.rs b/src/memtable/unbounded/iter.rs new file mode 100644 index 00000000..5157929f --- /dev/null +++ b/src/memtable/unbounded/iter.rs @@ -0,0 +1,280 @@ +use core::ops::{ControlFlow, RangeBounds}; + +use dbutils::{ + equivalentor::{Comparator, QueryComparator}, + state::{Active, MaybeTombstone, State}, +}; + +use crate::{ + memtable::{sealed, Entry, RangeEntry, Transfer}, + types::{ + sealed::{PointComparator, Pointee, RangeComparator}, + Mode, Query, RecordPointer, RefQuery, Remove, Update, + }, +}; + +use super::{EntryRef, IterPoints, PointEntryRef, RangeEntryRef, RangePoints, Table}; + +/// An iterator over the entries of a `Memtable`. +pub struct Iter<'a, S, C, T> +where + C: 'static, + T: Mode, + S: State, +{ + table: &'a Table, + iter: IterPoints<'a, S, C, T>, + query_version: u64, +} + +impl<'a, C, T> Iter<'a, MaybeTombstone, C, T> +where + C: 'static, + T: Mode, + T::Comparator: 'static, +{ + pub(in crate::memtable) fn with_tombstone(version: u64, table: &'a Table) -> Self { + Self { + iter: IterPoints::new(table.skl.iter_all(version)), + query_version: version, + table, + } + } +} + +impl<'a, C, T> Iter<'a, Active, C, T> +where + C: 'static, + T: Mode, + T::Comparator: 'static, +{ + pub(in crate::memtable) fn new(version: u64, table: &'a Table) -> Self { + Self { + iter: IterPoints::new(table.skl.iter(version)), + query_version: version, + table, + } + } +} + +impl<'a, S, C, T> Iterator for Iter<'a, S, C, T> +where + C: 'static, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, S::Value>: 'a, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]>, + T::Comparator: PointComparator + + Comparator + + Comparator as Pointee<'a>>::Output>> + + 'static, + T::RangeComparator: Comparator + + QueryComparator as Pointee<'a>>::Output>> + + RangeComparator + + 'static, + PointEntryRef<'a, S, C, T>: Entry<'a, Key = as Pointee<'a>>::Output>, + MaybeTombstone: Transfer<'a, T::Value<'a>>, + RangeEntryRef<'a, Active, Remove, C, T>: + RangeEntry<'a, Remove, Key = as Pointee<'a>>::Output>, + RangeEntryRef<'a, MaybeTombstone, Update, C, T>: RangeEntry< + 'a, + Update, + Key = as Pointee<'a>>::Output, + Value = ::Data< + 'a, + >>::Value, + >, + >, +{ + type Item = EntryRef<'a, S, C, T>; + + #[inline] + fn next(&mut self) -> Option { + loop { + let next = self.iter.next()?; + match self.table.validate(self.query_version, next) { + ControlFlow::Break(entry) => return entry, + ControlFlow::Continue(_) => continue, + } + } + } +} + +impl<'a, S, C, T> DoubleEndedIterator for Iter<'a, S, C, T> +where + C: 'static, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, S::Value>: 'a, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]>, + T::Comparator: PointComparator + + Comparator + + Comparator as Pointee<'a>>::Output>> + + 'static, + T::RangeComparator: Comparator + + QueryComparator as Pointee<'a>>::Output>> + + RangeComparator + + 'static, + PointEntryRef<'a, S, C, T>: Entry<'a, Key = as Pointee<'a>>::Output>, + MaybeTombstone: Transfer<'a, T::Value<'a>>, + RangeEntryRef<'a, Active, Remove, C, T>: + RangeEntry<'a, Remove, Key = as Pointee<'a>>::Output>, + RangeEntryRef<'a, MaybeTombstone, Update, C, T>: RangeEntry< + 'a, + Update, + Key = as Pointee<'a>>::Output, + Value = ::Data< + 'a, + >>::Value, + >, + >, +{ + #[inline] + fn next_back(&mut self) -> Option { + loop { + let prev = self.iter.next_back()?; + match self.table.validate(self.query_version, prev) { + ControlFlow::Break(entry) => return entry, + ControlFlow::Continue(_) => continue, + } + } + } +} + +/// An iterator over the entries of a `Memtable`. +pub struct Range<'a, S, Q, R, C, T> +where + R: RangeBounds, + Q: ?Sized, + C: 'static, + T: Mode, + S: State, +{ + table: &'a Table, + iter: RangePoints<'a, S, Q, R, C, T>, + query_version: u64, +} + +impl<'a, Q, R, C, T> Range<'a, Active, Q, R, C, T> +where + C: 'static, + R: RangeBounds + 'a, + Q: ?Sized, + T: Mode, + T::Comparator: 'static, +{ + pub(in crate::memtable) fn new(version: u64, table: &'a Table, r: R) -> Self { + Self { + iter: RangePoints::new(table.skl.range(version, r.into())), + query_version: version, + table, + } + } +} + +impl<'a, Q, R, C, T> Range<'a, MaybeTombstone, Q, R, C, T> +where + C: 'static, + R: RangeBounds + 'a, + Q: ?Sized, + T: Mode, + T::Comparator: 'static, +{ + pub(in crate::memtable) fn with_tombstone(version: u64, table: &'a Table, r: R) -> Self { + Self { + iter: RangePoints::new(table.skl.range_all(version, r.into())), + query_version: version, + table, + } + } +} + +impl<'a, S, Q, R, C, T> Iterator for Range<'a, S, Q, R, C, T> +where + R: RangeBounds, + Q: ?Sized, + C: 'static, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, S::Value>: 'a, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]>, + T::Comparator: PointComparator + + Comparator + + QueryComparator> + + Comparator as Pointee<'a>>::Output>> + + 'static, + T::RangeComparator: Comparator + + QueryComparator as Pointee<'a>>::Output>> + + RangeComparator + + 'static, + PointEntryRef<'a, S, C, T>: Entry<'a, Key = as Pointee<'a>>::Output>, + MaybeTombstone: Transfer<'a, T::Value<'a>>, + RangeEntryRef<'a, Active, Remove, C, T>: + RangeEntry<'a, Remove, Key = as Pointee<'a>>::Output>, + RangeEntryRef<'a, MaybeTombstone, Update, C, T>: RangeEntry< + 'a, + Update, + Key = as Pointee<'a>>::Output, + Value = ::Data< + 'a, + >>::Value, + >, + >, +{ + type Item = EntryRef<'a, S, C, T>; + + #[inline] + fn next(&mut self) -> Option { + loop { + let next = self.iter.next()?; + match self.table.validate(self.query_version, next) { + ControlFlow::Break(entry) => return entry, + ControlFlow::Continue(_) => continue, + } + } + } +} + +impl<'a, S, Q, R, C, T> DoubleEndedIterator for Range<'a, S, Q, R, C, T> +where + R: RangeBounds, + Q: ?Sized, + C: 'static, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, S::Value>: 'a, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]>, + T::Comparator: PointComparator + + Comparator + + QueryComparator> + + Comparator as Pointee<'a>>::Output>> + + 'static, + T::RangeComparator: Comparator + + QueryComparator as Pointee<'a>>::Output>> + + RangeComparator + + 'static, + PointEntryRef<'a, S, C, T>: Entry<'a, Key = as Pointee<'a>>::Output>, + MaybeTombstone: Transfer<'a, T::Value<'a>>, + RangeEntryRef<'a, Active, Remove, C, T>: + RangeEntry<'a, Remove, Key = as Pointee<'a>>::Output>, + RangeEntryRef<'a, MaybeTombstone, Update, C, T>: RangeEntry< + 'a, + Update, + Key = as Pointee<'a>>::Output, + Value = ::Data< + 'a, + >>::Value, + >, + >, +{ + #[inline] + fn next_back(&mut self) -> Option { + loop { + let prev = self.iter.next_back()?; + match self.table.validate(self.query_version, prev) { + ControlFlow::Break(entry) => return entry, + ControlFlow::Continue(_) => continue, + } + } + } +} diff --git a/src/memtable/unbounded/point.rs b/src/memtable/unbounded/point.rs new file mode 100644 index 00000000..3736d2f7 --- /dev/null +++ b/src/memtable/unbounded/point.rs @@ -0,0 +1,302 @@ +use core::{cell::OnceCell, ops::RangeBounds}; + +use crossbeam_skiplist_mvcc::nested::{Entry, Iter, Range}; + +use dbutils::{ + equivalentor::{Comparator, QueryComparator}, + state::State, +}; + +use crate::{ + memtable::{sealed, Transfer}, + types::{ + sealed::{PointComparator, Pointee}, + Mode, Query, QueryRange, RawEntryRef, RecordPointer, + }, +}; + +/// Point entry. +pub struct PointEntryRef<'a, S, C, T> +where + S: State, + T: Mode, +{ + pub(in crate::memtable) ent: Entry<'a, RecordPointer, RecordPointer, S, T::Comparator>, + data: OnceCell>, + key: OnceCell>, + pub(in crate::memtable) value: OnceCell>>, +} + +impl core::fmt::Debug for PointEntryRef<'_, S, C, T> +where + S: State, + T: Mode, + T::Comparator: PointComparator, +{ + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + self + .data + .get_or_init(|| self.ent.comparator().fetch_entry(self.ent.key())) + .write_fmt("PointEntryRef", f) + } +} + +impl<'a, S, C, T> Clone for PointEntryRef<'a, S, C, T> +where + S: State, + S::Data<'a, T::Value<'a>>: Clone, + T: Mode, + T::Key<'a>: Clone, +{ + #[inline] + fn clone(&self) -> Self { + Self { + ent: self.ent.clone(), + data: self.data.clone(), + key: self.key.clone(), + value: self.value.clone(), + } + } +} +impl<'a, S, C, T> PointEntryRef<'a, S, C, T> +where + S: State, + T: Mode, +{ + #[inline] + pub(in crate::memtable) fn new( + ent: Entry<'a, RecordPointer, RecordPointer, S, T::Comparator>, + ) -> Self { + Self { + ent, + data: OnceCell::new(), + key: OnceCell::new(), + value: OnceCell::new(), + } + } +} + +impl<'a, S, C, T> crate::memtable::RawEntry<'a> for PointEntryRef<'a, S, C, T> +where + C: 'static, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, &'a [u8]>: 'a, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]> + 'a, + T::Comparator: PointComparator + Comparator, +{ + type RawValue = S::Data<'a, &'a [u8]>; + + #[inline] + fn raw_key(&self) -> &'a [u8] { + let ent = self.data.get_or_init(|| { + let ptr = S::leak(self.ent.value()); + + match ptr { + Some(ptr) => self.ent.comparator().fetch_entry(ptr), + None => self.ent.comparator().fetch_entry(self.ent.key()), + } + }); + + ent.key() + } + + #[inline] + fn raw_value(&self) -> Self::RawValue { + let ent = self.data.get_or_init(|| { + let ptr = S::leak(self.ent.value()); + + match ptr { + Some(ptr) => self.ent.comparator().fetch_entry(ptr), + None => self.ent.comparator().fetch_entry(self.ent.key()), + } + }); + + S::raw(ent.value()) + } +} + +impl<'a, S, C, T> crate::memtable::Entry<'a> for PointEntryRef<'a, S, C, T> +where + C: 'static, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, S::Value>: 'a, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]> + 'a, + T::Comparator: PointComparator + Comparator, +{ + type Key = as Pointee<'a>>::Output; + type Value = S::Data<'a, S::Value>; + + #[inline] + fn key(&self) -> Self::Key { + self + .key + .get_or_init(|| { + let ptr = S::leak(self.ent.value()); + + let ent = match ptr { + Some(ptr) => self + .data + .get_or_init(|| self.ent.comparator().fetch_entry(ptr)), + None => self + .data + .get_or_init(|| self.ent.comparator().fetch_entry(self.ent.key())), + }; + + as Pointee<'a>>::from_input(ent.key()) + }) + .output() + } + + #[inline] + fn value(&self) -> Self::Value { + let val = self.value.get_or_init(|| { + let ptr = S::leak(self.ent.value()); + + let data = ptr.map(|ptr| { + let ent = self + .data + .get_or_init(|| self.ent.comparator().fetch_entry(ptr)); + + >>::from_input(ent.value()) + }); + S::into_state(data) + }); + >>::transfer(val) + } + + #[inline] + fn next(&self) -> Option { + self.ent.next().map(Self::new) + } + + #[inline] + fn prev(&self) -> Option { + self.ent.prev().map(Self::new) + } + + #[inline] + fn version(&self) -> u64 { + self.ent.version() + } +} + +impl PointEntryRef<'_, S, C, T> +where + C: 'static, + S: State, + T: Mode, +{ + /// Returns the version of the entry. + #[inline] + pub fn version(&self) -> u64 { + self.ent.version() + } +} + +/// The iterator for point entries. +pub struct IterPoints<'a, S, C, T> +where + S: State, + T: Mode, +{ + iter: Iter<'a, RecordPointer, RecordPointer, S, T::Comparator>, +} + +impl<'a, S, C, T> IterPoints<'a, S, C, T> +where + S: State, + T: Mode, +{ + #[inline] + pub(in crate::memtable) const fn new( + iter: Iter<'a, RecordPointer, RecordPointer, S, T::Comparator>, + ) -> Self { + Self { iter } + } +} + +impl<'a, S, C, T> Iterator for IterPoints<'a, S, C, T> +where + C: 'static, + S: State, + T: Mode, + T::Comparator: Comparator, +{ + type Item = PointEntryRef<'a, S, C, T>; + #[inline] + fn next(&mut self) -> Option { + self.iter.next().map(PointEntryRef::new) + } +} + +impl DoubleEndedIterator for IterPoints<'_, S, C, T> +where + C: 'static, + S: State, + T: Mode, + T::Comparator: Comparator, +{ + #[inline] + fn next_back(&mut self) -> Option { + self.iter.next_back().map(PointEntryRef::new) + } +} + +/// The iterator over a subset of point entries. +pub struct RangePoints<'a, S, Q, R, C, T> +where + S: State, + Q: ?Sized, + T: Mode, + R: RangeBounds, +{ + range: Range<'a, RecordPointer, RecordPointer, S, Query, QueryRange, T::Comparator>, +} + +impl<'a, S, Q, R, C, T> RangePoints<'a, S, Q, R, C, T> +where + S: State, + Q: ?Sized, + T: Mode, + R: RangeBounds, +{ + #[inline] + pub(in crate::memtable) const fn new( + range: Range<'a, RecordPointer, RecordPointer, S, Query, QueryRange, T::Comparator>, + ) -> Self { + Self { range } + } +} + +impl<'a, S, Q, R, C, T> Iterator for RangePoints<'a, S, Q, R, C, T> +where + C: 'static, + S: State, + R: RangeBounds, + Q: ?Sized, + T: Mode, + T::Comparator: QueryComparator> + 'a, +{ + type Item = PointEntryRef<'a, S, C, T>; + #[inline] + fn next(&mut self) -> Option { + self.range.next().map(PointEntryRef::new) + } +} + +impl<'a, S, Q, R, C, T> DoubleEndedIterator for RangePoints<'a, S, Q, R, C, T> +where + C: 'static, + S: State, + R: RangeBounds, + Q: ?Sized, + T: Mode, + T::Comparator: QueryComparator> + 'a, +{ + #[inline] + fn next_back(&mut self) -> Option { + self.range.next_back().map(PointEntryRef::new) + } +} diff --git a/src/memtable/unbounded/range_deletion.rs b/src/memtable/unbounded/range_deletion.rs new file mode 100644 index 00000000..9fb99392 --- /dev/null +++ b/src/memtable/unbounded/range_deletion.rs @@ -0,0 +1,279 @@ +use core::{ + cell::OnceCell, + ops::{Bound, RangeBounds}, +}; + +use crossbeam_skiplist_mvcc::nested::{Entry, Iter, Range}; +use dbutils::{ + equivalentor::{Comparator, QueryComparator}, + state::State, +}; + +use crate::types::{ + sealed::{Pointee, RangeComparator}, + Mode, Query, QueryRange, RawRangeRemoveRef, RecordPointer, +}; + +/// Range deletion entry. +pub struct RangeRemoveEntry<'a, S, C, T> +where + S: State, + T: Mode, +{ + pub(crate) ent: Entry<'a, RecordPointer, RecordPointer, S, T::RangeComparator>, + data: OnceCell>, + start_bound: OnceCell>>, + end_bound: OnceCell>>, +} +impl core::fmt::Debug for RangeRemoveEntry<'_, S, C, T> +where + C: 'static, + S: State, + T: Mode, + T::RangeComparator: Comparator + RangeComparator, +{ + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + self + .data + .get_or_init(|| self.ent.comparator().fetch_range_deletion(self.ent.key())) + .write_fmt("RangeRemoveEntry", f) + } +} +impl<'a, S, C, T> Clone for RangeRemoveEntry<'a, S, C, T> +where + S: State, + // S::Data<'a, LazyRef<'a, RecordPointer>>: Clone, + T: Mode, + S::Data<'a, T::Value<'a>>: Clone, + T::Key<'a>: Clone, +{ + #[inline] + fn clone(&self) -> Self { + Self { + ent: self.ent.clone(), + data: self.data.clone(), + start_bound: self.start_bound.clone(), + end_bound: self.end_bound.clone(), + } + } +} +impl<'a, S, C, T> RangeRemoveEntry<'a, S, C, T> +where + S: State, + T: Mode, +{ + pub(in crate::memtable) fn new( + ent: Entry<'a, RecordPointer, RecordPointer, S, T::RangeComparator>, + ) -> Self { + Self { + ent, + data: OnceCell::new(), + start_bound: OnceCell::new(), + end_bound: OnceCell::new(), + } + } +} + +impl<'a, S, C, T> crate::memtable::RawRangeEntry<'a> for RangeRemoveEntry<'a, S, C, T> +where + C: 'static, + S: State, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]> + 'a, + T::RangeComparator: Comparator + RangeComparator, +{ + #[inline] + fn raw_start_bound(&self) -> Bound<&'a [u8]> { + let ent = self + .data + .get_or_init(|| self.ent.comparator().fetch_range_deletion(self.ent.key())); + ent.start_bound() + } + + #[inline] + fn raw_end_bound(&self) -> Bound<&'a [u8]> { + let ent = self + .data + .get_or_init(|| self.ent.comparator().fetch_range_deletion(self.ent.key())); + ent.end_bound() + } +} + +impl<'a, S, C, T> crate::memtable::RangeEntry<'a> for RangeRemoveEntry<'a, S, C, T> +where + C: 'static, + S: State, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]> + 'a, + T::RangeComparator: Comparator + RangeComparator, +{ + type Key = as Pointee<'a>>::Output; + + #[inline] + fn start_bound(&self) -> Bound { + let start_bound = self.start_bound.get_or_init(|| { + let ent = self + .data + .get_or_init(|| self.ent.comparator().fetch_range_deletion(self.ent.key())); + ent.start_bound().map( as Pointee>::from_input) + }); + start_bound.as_ref().map(|k| k.output()) + } + + #[inline] + fn end_bound(&self) -> Bound { + let end_bound = self.end_bound.get_or_init(|| { + let ent = self + .data + .get_or_init(|| self.ent.comparator().fetch_range_deletion(self.ent.key())); + ent.end_bound().map( as Pointee>::from_input) + }); + end_bound.as_ref().map(|k| k.output()) + } + + #[inline] + fn next(&mut self) -> Option { + self.ent.next().map(Self::new) + } + + #[inline] + fn prev(&mut self) -> Option { + self.ent.prev().map(Self::new) + } + + #[inline] + fn version(&self) -> u64 { + self.ent.version() + } +} +impl RangeRemoveEntry<'_, S, C, T> +where + C: 'static, + S: State, + T: Mode, +{ + /// Returns the version of the entry. + #[inline] + pub fn version(&self) -> u64 { + self.ent.version() + } +} + +impl<'a, S, C, T> crate::memtable::RangeRemoveEntry<'a> for RangeRemoveEntry<'a, S, C, T> +where + C: 'static, + S: State, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]> + 'a, + T::RangeComparator: Comparator + RangeComparator, +{ +} + +/// The iterator for point entries. +pub struct IterRangeRemove<'a, S, C, T> +where + S: State, + T: Mode, +{ + iter: Iter<'a, RecordPointer, RecordPointer, S, T::RangeComparator>, +} +impl<'a, S, C, T> IterRangeRemove<'a, S, C, T> +where + S: State, + T: Mode, +{ + #[inline] + pub(in crate::memtable) const fn new( + iter: Iter<'a, RecordPointer, RecordPointer, S, T::RangeComparator>, + ) -> Self { + Self { iter } + } +} +impl<'a, S, C, T> Iterator for IterRangeRemove<'a, S, C, T> +where + C: 'static, + S: State, + T: Mode, + T::RangeComparator: Comparator + 'a, +{ + type Item = RangeRemoveEntry<'a, S, C, T>; + #[inline] + fn next(&mut self) -> Option { + self.iter.next().map(RangeRemoveEntry::new) + } +} +impl<'a, S, C, T> DoubleEndedIterator for IterRangeRemove<'a, S, C, T> +where + C: 'static, + S: State, + T: Mode, + T::RangeComparator: Comparator + 'a, +{ + #[inline] + fn next_back(&mut self) -> Option { + self.iter.next_back().map(RangeRemoveEntry::new) + } +} +/// The iterator over a subset of point entries. +pub struct RangeRangeRemove<'a, S, Q, R, C, T> +where + S: State, + T: Mode, + Q: ?Sized, + R: RangeBounds, +{ + range: + Range<'a, RecordPointer, RecordPointer, S, Query, QueryRange, T::RangeComparator>, +} + +impl<'a, S, Q, R, C, T> RangeRangeRemove<'a, S, Q, R, C, T> +where + S: State, + T: Mode, + Q: ?Sized, + R: RangeBounds, +{ + #[inline] + pub(in crate::memtable) const fn new( + range: Range< + 'a, + RecordPointer, + RecordPointer, + S, + Query, + QueryRange, + T::RangeComparator, + >, + ) -> Self { + Self { range } + } +} +impl<'a, S, Q, R, C, T> Iterator for RangeRangeRemove<'a, S, Q, R, C, T> +where + C: 'static, + S: State, + R: RangeBounds, + Q: ?Sized, + T: Mode, + T::RangeComparator: QueryComparator> + 'a, +{ + type Item = RangeRemoveEntry<'a, S, C, T>; + #[inline] + fn next(&mut self) -> Option { + self.range.next().map(RangeRemoveEntry::new) + } +} +impl<'a, S, Q, R, C, T> DoubleEndedIterator for RangeRangeRemove<'a, S, Q, R, C, T> +where + C: 'static, + S: State, + R: RangeBounds, + Q: ?Sized, + T: Mode, + T::RangeComparator: QueryComparator> + 'a, +{ + #[inline] + fn next_back(&mut self) -> Option { + self.range.next_back().map(RangeRemoveEntry::new) + } +} diff --git a/src/memtable/unbounded/range_entry.rs b/src/memtable/unbounded/range_entry.rs new file mode 100644 index 00000000..e4e06564 --- /dev/null +++ b/src/memtable/unbounded/range_entry.rs @@ -0,0 +1,375 @@ +use core::{ + cell::OnceCell, + marker::PhantomData, + ops::{Bound, RangeBounds}, +}; + +use crossbeam_skiplist_mvcc::nested::{Entry, Iter, Range}; +use dbutils::{ + equivalentor::{Comparator, QueryComparator}, + state::State, +}; + +use crate::{ + memtable::{sealed, Transfer}, + types::{ + sealed::{Pointee, RangeComparator}, + BulkOperation, Mode, Query, QueryRange, RecordPointer, WithValue, + }, +}; + +/// Range entry. +pub struct RangeEntryRef<'a, S, O, C, T> +where + O: BulkOperation, + S: State, + T: Mode, +{ + pub(crate) ent: Entry<'a, RecordPointer, RecordPointer, S, T::RangeComparator>, + data: OnceCell>, + start_bound: OnceCell>>, + end_bound: OnceCell>>, + value: OnceCell>>, +} + +impl core::fmt::Debug for RangeEntryRef<'_, S, O, C, T> +where + C: 'static, + O: BulkOperation, + S: State, + T: Mode, + T::RangeComparator: Comparator + RangeComparator, +{ + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + O::fmt( + self + .data + .get_or_init(|| O::fetch(self.ent.comparator(), self.ent.key())), + "RangeEntryRef", + f, + ) + } +} + +impl<'a, S, O, C, T> Clone for RangeEntryRef<'a, S, O, C, T> +where + O: BulkOperation, + S: State, + T: Mode, + S::Data<'a, T::Value<'a>>: Clone, + O::Output<'a>: Clone, + T::Key<'a>: Clone, +{ + #[inline] + fn clone(&self) -> Self { + Self { + ent: self.ent.clone(), + data: self.data.clone(), + start_bound: self.start_bound.clone(), + end_bound: self.end_bound.clone(), + value: self.value.clone(), + } + } +} + +impl<'a, S, O, C, T> RangeEntryRef<'a, S, O, C, T> +where + O: BulkOperation, + S: State, + T: Mode, +{ + pub(in crate::memtable) fn new( + ent: Entry<'a, RecordPointer, RecordPointer, S, T::RangeComparator>, + ) -> Self { + Self { + ent, + data: OnceCell::new(), + start_bound: OnceCell::new(), + end_bound: OnceCell::new(), + value: OnceCell::new(), + } + } +} + +impl<'a, S, O, C, T> crate::memtable::RawRangeEntry<'a, O> for RangeEntryRef<'a, S, O, C, T> +where + C: 'static, + O: BulkOperation, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, &'a [u8]>: 'a, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]> + 'a, + T::RangeComparator: Comparator + RangeComparator, +{ + type RawValue + = S::Data<'a, &'a [u8]> + where + O: WithValue; + + #[inline] + fn raw_start_bound(&self) -> Bound<&'a [u8]> { + let ent = self + .data + .get_or_init(|| O::fetch(self.ent.comparator(), self.ent.key())); + O::start_bound(ent) + } + + #[inline] + fn raw_end_bound(&self) -> Bound<&'a [u8]> { + let ent = self + .data + .get_or_init(|| O::fetch(self.ent.comparator(), self.ent.key())); + O::end_bound(ent) + } + + #[inline] + fn raw_value(&self) -> Self::RawValue + where + O: WithValue, + { + let ent = self.data.get_or_init(|| { + let ptr = S::leak(self.ent.value()); + + match ptr { + Some(ptr) => O::fetch(self.ent.comparator(), ptr), + None => O::fetch(self.ent.comparator(), self.ent.key()), + } + }); + + S::raw(O::value(ent)) + } +} + +impl<'a, S, O, C, T> crate::memtable::RangeEntry<'a, O> for RangeEntryRef<'a, S, O, C, T> +where + C: 'static, + O: BulkOperation, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, S::Value>: 'a, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]> + 'a, + T::RangeComparator: Comparator + RangeComparator, +{ + type Key = as Pointee<'a>>::Output; + type Value + = S::Data<'a, S::Value> + where + O: WithValue; + + #[inline] + fn start_bound(&self) -> Bound { + let start_bound = self.start_bound.get_or_init(|| { + let ent = self + .data + .get_or_init(|| O::fetch(self.ent.comparator(), self.ent.key())); + O::start_bound(ent).map( as Pointee>::from_input) + }); + start_bound.as_ref().map(|k| k.output()) + } + + #[inline] + fn end_bound(&self) -> Bound { + let end_bound = self.end_bound.get_or_init(|| { + let ent = self + .data + .get_or_init(|| O::fetch(self.ent.comparator(), self.ent.key())); + O::end_bound(ent).map( as Pointee>::from_input) + }); + end_bound.as_ref().map(|k| k.output()) + } + + #[inline] + fn value(&self) -> Self::Value + where + O: WithValue, + { + let val = self.value.get_or_init(|| { + let ptr = S::leak(self.ent.value()); + + let data = ptr.map(|ptr| { + let ent = self + .data + .get_or_init(|| O::fetch(self.ent.comparator(), ptr)); + + >>::from_input(O::value(ent)) + }); + S::into_state(data) + }); + >>::transfer(val) + } + + #[inline] + fn next(&mut self) -> Option { + self.ent.next().map(Self::new) + } + + #[inline] + fn prev(&mut self) -> Option { + self.ent.prev().map(Self::new) + } + + #[inline] + fn version(&self) -> u64 { + self.ent.version() + } +} + +impl<'a, S, O, C, T> RangeEntryRef<'a, S, O, C, T> +where + C: 'static, + O: WithValue, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, S::Value>: 'a, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]> + 'a, + T::RangeComparator: Comparator + RangeComparator, +{ + #[inline] + pub(in crate::memtable) fn into_value(self) -> S::Data<'a, T::Value<'a>> { + self.value.get_or_init(|| { + let ptr = S::leak(self.ent.value()); + + let data = ptr.map(|ptr| { + let ent = self + .data + .get_or_init(|| O::fetch(self.ent.comparator(), ptr)); + + >>::from_input(O::value(ent)) + }); + S::into_state(data) + }); + self.value.into_inner().unwrap() + } +} + +/// The iterator for point entries. +pub struct IterBulkOperations<'a, S, O, C, T> +where + O: BulkOperation, + S: State, + T: Mode, +{ + iter: Iter<'a, RecordPointer, RecordPointer, S, T::RangeComparator>, + _op: PhantomData, +} + +impl<'a, S, O, C, T> IterBulkOperations<'a, S, O, C, T> +where + O: BulkOperation, + S: State, + T: Mode, +{ + #[inline] + pub(in crate::memtable) const fn new( + iter: Iter<'a, RecordPointer, RecordPointer, S, T::RangeComparator>, + ) -> Self { + Self { + iter, + _op: PhantomData, + } + } +} + +impl<'a, S, O, C, T> Iterator for IterBulkOperations<'a, S, O, C, T> +where + C: 'static, + O: BulkOperation, + S: State, + T: Mode, + T::RangeComparator: Comparator + 'a, +{ + type Item = RangeEntryRef<'a, S, O, C, T>; + + #[inline] + fn next(&mut self) -> Option { + self.iter.next().map(RangeEntryRef::new) + } +} + +impl<'a, S, O, C, T> DoubleEndedIterator for IterBulkOperations<'a, S, O, C, T> +where + C: 'static, + O: BulkOperation, + S: State, + T: Mode, + T::RangeComparator: Comparator + 'a, +{ + #[inline] + fn next_back(&mut self) -> Option { + self.iter.next_back().map(RangeEntryRef::new) + } +} + +/// The iterator over a subset of point entries. +pub struct RangeBulkOperations<'a, S, O, Q, R, C, T> +where + O: BulkOperation, + S: State, + Q: ?Sized, + T: Mode, + R: RangeBounds, +{ + range: + Range<'a, RecordPointer, RecordPointer, S, Query, QueryRange, T::RangeComparator>, + _op: PhantomData, +} + +impl<'a, S, O, Q, R, C, T> RangeBulkOperations<'a, S, O, Q, R, C, T> +where + O: BulkOperation, + S: State, + Q: ?Sized, + T: Mode, + R: RangeBounds, +{ + #[inline] + pub(in crate::memtable) const fn new( + range: Range< + 'a, + RecordPointer, + RecordPointer, + S, + Query, + QueryRange, + T::RangeComparator, + >, + ) -> Self { + Self { + range, + _op: PhantomData, + } + } +} + +impl<'a, S, O, Q, R, C, T> Iterator for RangeBulkOperations<'a, S, O, Q, R, C, T> +where + C: 'static, + O: BulkOperation, + S: State, + R: RangeBounds, + Q: ?Sized, + T: Mode, + T::RangeComparator: QueryComparator> + 'a, +{ + type Item = RangeEntryRef<'a, S, O, C, T>; + #[inline] + fn next(&mut self) -> Option { + self.range.next().map(RangeEntryRef::new) + } +} + +impl<'a, S, O, Q, R, C, T> DoubleEndedIterator for RangeBulkOperations<'a, S, O, Q, R, C, T> +where + C: 'static, + O: BulkOperation, + S: State, + R: RangeBounds, + Q: ?Sized, + T: Mode, + T::RangeComparator: QueryComparator> + 'a, +{ + #[inline] + fn next_back(&mut self) -> Option { + self.range.next_back().map(RangeEntryRef::new) + } +} diff --git a/src/memtable/unbounded/range_update.rs b/src/memtable/unbounded/range_update.rs new file mode 100644 index 00000000..7914feb2 --- /dev/null +++ b/src/memtable/unbounded/range_update.rs @@ -0,0 +1,368 @@ +use core::{ + cell::OnceCell, + ops::{Bound, RangeBounds}, +}; + +use crossbeam_skiplist_mvcc::nested::{Entry, Iter, Range}; +use dbutils::{ + equivalentor::{Comparator, QueryComparator}, + state::State, +}; + +use crate::{ + memtable::{sealed, Transfer}, + types::{ + sealed::{Pointee, RangeComparator}, + Mode, Query, QueryRange, RawRangeUpdateRef, RecordPointer, + }, +}; + +/// Range update entry. +pub struct RangeUpdateEntry<'a, S, C, T> +where + S: State, + T: Mode, +{ + pub(crate) ent: Entry<'a, RecordPointer, RecordPointer, S, T::RangeComparator>, + data: OnceCell>, + start_bound: OnceCell>>, + end_bound: OnceCell>>, + value: OnceCell>>, +} + +impl core::fmt::Debug for RangeUpdateEntry<'_, S, C, T> +where + C: 'static, + S: State, + T: Mode, + T::RangeComparator: Comparator + RangeComparator, +{ + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + use RangeComparator; + self + .data + .get_or_init(|| self.ent.comparator().fetch_range_update(self.ent.key())) + .write_fmt("RangeUpdateEntry", f) + } +} + +impl<'a, S, C, T> Clone for RangeUpdateEntry<'a, S, C, T> +where + S: State, + T: Mode, + S::Data<'a, T::Value<'a>>: Clone, + T::Key<'a>: Clone, +{ + #[inline] + fn clone(&self) -> Self { + Self { + ent: self.ent.clone(), + data: self.data.clone(), + start_bound: self.start_bound.clone(), + end_bound: self.end_bound.clone(), + value: self.value.clone(), + } + } +} + +impl<'a, S, C, T> RangeUpdateEntry<'a, S, C, T> +where + S: State, + T: Mode, +{ + pub(in crate::memtable) fn new( + ent: Entry<'a, RecordPointer, RecordPointer, S, T::RangeComparator>, + ) -> Self { + Self { + ent, + data: OnceCell::new(), + start_bound: OnceCell::new(), + end_bound: OnceCell::new(), + value: OnceCell::new(), + } + } +} + +impl<'a, S, C, T> crate::memtable::RawRangeEntry<'a> for RangeUpdateEntry<'a, S, C, T> +where + C: 'static, + S: State, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]> + 'a, + T::RangeComparator: Comparator + RangeComparator, +{ + #[inline] + fn raw_start_bound(&self) -> Bound<&'a [u8]> { + let ent = self + .data + .get_or_init(|| self.ent.comparator().fetch_range_update(self.ent.key())); + ent.start_bound() + } + + #[inline] + fn raw_end_bound(&self) -> Bound<&'a [u8]> { + let ent = self + .data + .get_or_init(|| self.ent.comparator().fetch_range_update(self.ent.key())); + ent.end_bound() + } +} + +impl<'a, S, C, T> crate::memtable::RangeEntry<'a> for RangeUpdateEntry<'a, S, C, T> +where + C: 'static, + S: State, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]> + 'a, + T::RangeComparator: Comparator + RangeComparator, +{ + type Key = as Pointee<'a>>::Output; + + #[inline] + fn start_bound(&self) -> Bound { + let start_bound = self.start_bound.get_or_init(|| { + let ent = self + .data + .get_or_init(|| self.ent.comparator().fetch_range_update(self.ent.key())); + ent.start_bound().map( as Pointee>::from_input) + }); + start_bound.as_ref().map(|k| k.output()) + } + + #[inline] + fn end_bound(&self) -> Bound { + let end_bound = self.end_bound.get_or_init(|| { + let ent = self + .data + .get_or_init(|| self.ent.comparator().fetch_range_update(self.ent.key())); + ent.end_bound().map( as Pointee>::from_input) + }); + end_bound.as_ref().map(|k| k.output()) + } + + #[inline] + fn next(&mut self) -> Option { + self.ent.next().map(Self::new) + } + + #[inline] + fn prev(&mut self) -> Option { + self.ent.prev().map(Self::new) + } + + #[inline] + fn version(&self) -> u64 { + self.ent.version() + } +} + +impl RangeUpdateEntry<'_, S, C, T> +where + C: 'static, + S: State, + T: Mode, +{ + /// Returns the version of the entry. + #[inline] + pub fn version(&self) -> u64 { + self.ent.version() + } +} + +impl<'a, S, C, T> crate::memtable::RawRangeUpdateEntry<'a> for RangeUpdateEntry<'a, S, C, T> +where + C: 'static, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, &'a [u8]>: 'a, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]> + 'a, + T::RangeComparator: Comparator + RangeComparator, +{ + type RawValue = S::Data<'a, &'a [u8]>; + + #[inline] + fn raw_value(&self) -> Self::RawValue { + let ent = self.data.get_or_init(|| { + let ptr = S::leak(self.ent.value()); + + match ptr { + Some(ptr) => self.ent.comparator().fetch_range_update(ptr), + None => self.ent.comparator().fetch_range_update(self.ent.key()), + } + }); + + S::raw(ent.value()) + } +} + +impl<'a, S, C, T> crate::memtable::RangeUpdateEntry<'a> for RangeUpdateEntry<'a, S, C, T> +where + C: 'static, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, S::Value>: 'a, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]> + 'a, + T::RangeComparator: Comparator + RangeComparator, +{ + type Value = S::Data<'a, S::Value>; + + #[inline] + fn value(&self) -> Self::Value { + let val = self.value.get_or_init(|| { + let ptr = S::leak(self.ent.value()); + + let data = ptr.map(|ptr| { + let ent = self + .data + .get_or_init(|| self.ent.comparator().fetch_range_update(ptr)); + + >>::from_input(ent.value()) + }); + S::into_state(data) + }); + >>::transfer(val) + } +} + +impl<'a, S, C, T> RangeUpdateEntry<'a, S, C, T> +where + C: 'static, + S: Transfer<'a, T::Value<'a>>, + S::Data<'a, S::Value>: 'a, + T: Mode, + T::Key<'a>: Pointee<'a, Input = &'a [u8]> + 'a, + T::RangeComparator: Comparator + RangeComparator, +{ + #[inline] + pub(in crate::memtable) fn into_value(self) -> S::Data<'a, T::Value<'a>> { + self.value.get_or_init(|| { + let ptr = S::leak(self.ent.value()); + + let data = ptr.map(|ptr| { + let ent = self + .data + .get_or_init(|| self.ent.comparator().fetch_range_update(ptr)); + + >>::from_input(ent.value()) + }); + S::into_state(data) + }); + self.value.into_inner().unwrap() + } +} + +/// The iterator for point entries. +pub struct IterRangeUpdate<'a, S, C, T> +where + S: State, + T: Mode, +{ + iter: Iter<'a, RecordPointer, RecordPointer, S, T::RangeComparator>, +} + +impl<'a, S, C, T> IterRangeUpdate<'a, S, C, T> +where + S: State, + T: Mode, +{ + #[inline] + pub(in crate::memtable) const fn new( + iter: Iter<'a, RecordPointer, RecordPointer, S, T::RangeComparator>, + ) -> Self { + Self { iter } + } +} + +impl<'a, S, C, T> Iterator for IterRangeUpdate<'a, S, C, T> +where + C: 'static, + S: State, + T: Mode, + T::RangeComparator: Comparator + 'a, +{ + type Item = RangeUpdateEntry<'a, S, C, T>; + + #[inline] + fn next(&mut self) -> Option { + self.iter.next().map(RangeUpdateEntry::new) + } +} + +impl<'a, S, C, T> DoubleEndedIterator for IterRangeUpdate<'a, S, C, T> +where + C: 'static, + S: State, + T: Mode, + T::RangeComparator: Comparator + 'a, +{ + #[inline] + fn next_back(&mut self) -> Option { + self.iter.next_back().map(RangeUpdateEntry::new) + } +} + +/// The iterator over a subset of point entries. +pub struct RangeRangeUpdate<'a, S, Q, R, C, T> +where + S: State, + Q: ?Sized, + T: Mode, + R: RangeBounds, +{ + range: + Range<'a, RecordPointer, RecordPointer, S, Query, QueryRange, T::RangeComparator>, +} + +impl<'a, S, Q, R, C, T> RangeRangeUpdate<'a, S, Q, R, C, T> +where + S: State, + Q: ?Sized, + T: Mode, + R: RangeBounds, +{ + #[inline] + pub(in crate::memtable) const fn new( + range: Range< + 'a, + RecordPointer, + RecordPointer, + S, + Query, + QueryRange, + T::RangeComparator, + >, + ) -> Self { + Self { range } + } +} + +impl<'a, S, Q, R, C, T> Iterator for RangeRangeUpdate<'a, S, Q, R, C, T> +where + C: 'static, + S: State, + R: RangeBounds, + Q: ?Sized, + T: Mode, + T::RangeComparator: QueryComparator> + 'a, +{ + type Item = RangeUpdateEntry<'a, S, C, T>; + #[inline] + fn next(&mut self) -> Option { + self.range.next().map(RangeUpdateEntry::new) + } +} + +impl<'a, S, Q, R, C, T> DoubleEndedIterator for RangeRangeUpdate<'a, S, Q, R, C, T> +where + C: 'static, + S: State, + R: RangeBounds, + Q: ?Sized, + T: Mode, + T::RangeComparator: QueryComparator> + 'a, +{ + #[inline] + fn next_back(&mut self) -> Option { + self.range.next_back().map(RangeUpdateEntry::new) + } +} diff --git a/src/options.rs b/src/options.rs index d3859de4..91c7a26f 100644 --- a/src/options.rs +++ b/src/options.rs @@ -1,5 +1,4 @@ use rarena_allocator::{Freelist, Options as ArenaOptions}; -pub use skl::KeySize; use super::{CURRENT_VERSION, HEADER_SIZE}; @@ -13,7 +12,7 @@ pub(crate) use memmap::*; /// Options for the WAL. #[derive(Debug, Clone)] pub struct Options { - maximum_key_size: KeySize, + maximum_key_size: u32, maximum_value_size: u32, sync: bool, magic_version: u16, @@ -66,7 +65,7 @@ impl Options { #[inline] pub const fn new() -> Self { Self { - maximum_key_size: KeySize::new(), + maximum_key_size: u16::MAX as u32, maximum_value_size: u32::MAX, sync: true, magic_version: 0, @@ -180,13 +179,13 @@ impl Options { /// ## Example /// /// ```rust - /// use orderwal::{Options, KeySize}; + /// use orderwal::Options; /// - /// let options = Options::new().with_maximum_key_size(KeySize::with(1024)); - /// assert_eq!(options.maximum_key_size(), KeySize::with(1024)); + /// let options = Options::new().with_maximum_key_size(1024); + /// assert_eq!(options.maximum_key_size(), 1024); /// ``` #[inline] - pub const fn maximum_key_size(&self) -> KeySize { + pub const fn maximum_key_size(&self) -> u32 { self.maximum_key_size } @@ -249,13 +248,13 @@ impl Options { /// ## Example /// /// ```rust - /// use orderwal::{Options, KeySize}; + /// use orderwal::Options; /// - /// let options = Options::new().with_maximum_key_size(KeySize::with(1024)); - /// assert_eq!(options.maximum_key_size(), KeySize::with(1024)); + /// let options = Options::new().with_maximum_key_size(1024); + /// assert_eq!(options.maximum_key_size(), 1024); /// ``` #[inline] - pub const fn with_maximum_key_size(mut self, size: KeySize) -> Self { + pub const fn with_maximum_key_size(mut self, size: u32) -> Self { self.maximum_key_size = size; self } diff --git a/src/sealed.rs b/src/sealed.rs deleted file mode 100644 index 490225ea..00000000 --- a/src/sealed.rs +++ /dev/null @@ -1,1393 +0,0 @@ -use core::{ - ops::{Bound, RangeBounds}, - ptr::NonNull, -}; - -use among::Among; -use dbutils::{ - buffer::VacantBuffer, - equivalent::Comparable, - leb128::encoded_u64_varint_len, - types::{KeyRef, Type}, -}; -use rarena_allocator::{either::Either, Allocator, ArenaPosition, Buffer}; -use skl::KeySize; - -use crate::{ - memtable::{MemtableEntry, VersionedMemtableEntry}, - utils::merge_lengths, - wal::{KeyPointer, ValuePointer}, -}; - -use super::{ - batch::Batch, - checksum::{BuildChecksumer, Checksumer}, - error::Error, - memtable::{BaseTable, Memtable, MultipleVersionMemtable}, - options::Options, - types::{BufWriter, EncodedEntryMeta, EntryFlags}, - Flags, CHECKSUM_SIZE, HEADER_SIZE, MAGIC_TEXT, MAGIC_TEXT_SIZE, RECORD_FLAG_SIZE, VERSION_SIZE, - WAL_KIND_SIZE, -}; - -/// A marker trait which indicates that such pointer has a version. -pub trait WithVersion {} - -/// A marker trait which indicates that such pointer does not have a version. -pub trait WithoutVersion {} - -/// A marker trait which indicates that such WAL is immutable. -pub trait Immutable {} - -pub trait WalReader { - type Allocator: Allocator; - type Memtable; - - fn memtable(&self) -> &Self::Memtable; - - /// Returns the number of entries in the WAL. - fn len(&self) -> usize - where - Self::Memtable: Memtable, - for<'a> ::Item<'a>: MemtableEntry<'a>, - { - self.memtable().len() - } - - /// Returns `true` if the WAL is empty. - #[inline] - fn is_empty(&self) -> bool - where - Self::Memtable: Memtable, - for<'a> ::Item<'a>: MemtableEntry<'a>, - { - self.memtable().is_empty() - } - - #[inline] - fn iter(&self) -> ::Iterator<'_> - where - Self::Memtable: Memtable, - for<'a> ::Item<'a>: MemtableEntry<'a>, - { - Memtable::iter(self.memtable()) - } - - #[inline] - fn range<'a, Q, R>(&'a self, range: R) -> ::Range<'a, Q, R> - where - R: RangeBounds, - Q: ?Sized + Comparable::Key>>, - Self::Memtable: Memtable, - for<'b> ::Item<'b>: MemtableEntry<'b>, - { - Memtable::range(self.memtable(), range) - } - - /// Returns the first key-value pair in the map. The key in this pair is the minimum key in the wal. - #[inline] - fn first(&self) -> Option<::Item<'_>> - where - Self::Memtable: Memtable, - ::Key: Type + Ord, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - for<'a> ::Item<'a>: MemtableEntry<'a>, - { - Memtable::first(self.memtable()) - } - - /// Returns the last key-value pair in the map. The key in this pair is the maximum key in the wal. - #[inline] - fn last(&self) -> Option<::Item<'_>> - where - Self::Memtable: Memtable, - ::Key: Type + Ord, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - for<'a> ::Item<'a>: MemtableEntry<'a>, - { - Memtable::last(self.memtable()) - } - - /// Returns `true` if the WAL contains the specified key. - fn contains_key(&self, key: &Q) -> bool - where - Q: ?Sized + Comparable::Key>>, - Self::Memtable: Memtable, - ::Key: Type + Ord, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - for<'a> ::Item<'a>: MemtableEntry<'a>, - { - Memtable::contains(self.memtable(), key) - } - - /// Returns the value associated with the key. - #[inline] - fn get(&self, key: &Q) -> Option<::Item<'_>> - where - Q: ?Sized + Comparable::Key>>, - Self::Memtable: Memtable, - ::Key: Type + Ord, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - for<'a> ::Item<'a>: MemtableEntry<'a>, - { - Memtable::get(self.memtable(), key) - } - - #[inline] - fn upper_bound(&self, bound: Bound<&Q>) -> Option<::Item<'_>> - where - Q: ?Sized + Comparable::Key>>, - Self::Memtable: Memtable, - ::Key: Type + Ord, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - for<'a> ::Item<'a>: MemtableEntry<'a>, - { - Memtable::upper_bound(self.memtable(), bound) - } - - #[inline] - fn lower_bound(&self, bound: Bound<&Q>) -> Option<::Item<'_>> - where - Q: ?Sized + Comparable::Key>>, - Self::Memtable: Memtable, - ::Key: Type + Ord, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - for<'a> ::Item<'a>: MemtableEntry<'a>, - { - Memtable::lower_bound(self.memtable(), bound) - } -} - -pub trait MultipleVersionWalReader { - type Allocator: Allocator; - type Memtable; - - fn memtable(&self) -> &Self::Memtable; - - #[inline] - fn iter(&self, version: u64) -> ::Iterator<'_> - where - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - for<'a> ::Item<'a>: VersionedMemtableEntry<'a>, - { - MultipleVersionMemtable::iter(self.memtable(), version) - } - - #[inline] - fn range(&self, version: u64, range: R) -> ::Range<'_, Q, R> - where - R: RangeBounds, - Q: ?Sized + Comparable::Key>>, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - for<'a> ::Item<'a>: VersionedMemtableEntry<'a>, - { - MultipleVersionMemtable::range(self.memtable(), version, range) - } - - #[inline] - fn iter_all_versions( - &self, - version: u64, - ) -> ::IterAll<'_> - where - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - for<'a> ::Item<'a>: VersionedMemtableEntry<'a>, - { - MultipleVersionMemtable::iter_all_versions(self.memtable(), version) - } - - #[inline] - fn range_all_versions( - &self, - version: u64, - range: R, - ) -> ::RangeAll<'_, Q, R> - where - R: RangeBounds, - Q: ?Sized + Comparable::Key>>, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - for<'a> ::Item<'a>: VersionedMemtableEntry<'a>, - { - self.memtable().range_all_versions(version, range) - } - - /// Returns the first key-value pair in the map. The key in this pair is the minimum key in the wal. - #[inline] - fn first(&self, version: u64) -> Option<::Item<'_>> - where - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - for<'a> ::Item<'a>: VersionedMemtableEntry<'a>, - { - self.memtable().first(version) - } - - /// Returns the first key-value pair in the map. The key in this pair is the minimum key in the wal. - /// - /// Compared to [`first`](MultipleVersionWalReader::first), this method returns a versioned item, which means that the returned item - /// may already be marked as removed. - #[inline] - fn first_versioned( - &self, - version: u64, - ) -> Option<::VersionedItem<'_>> - where - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - for<'a> ::Item<'a>: VersionedMemtableEntry<'a>, - { - self.memtable().first_versioned(version) - } - - /// Returns the last key-value pair in the map. The key in this pair is the maximum key in the wal. - fn last(&self, version: u64) -> Option<::Item<'_>> - where - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - for<'a> ::Item<'a>: VersionedMemtableEntry<'a>, - { - self.memtable().last(version) - } - - /// Returns the last key-value pair in the map. The key in this pair is the maximum key in the wal. - /// - /// Compared to [`last`](MultipleVersionWalReader::last), this method returns a versioned item, which means that the returned item - /// may already be marked as removed. - fn last_versioned( - &self, - version: u64, - ) -> Option<::VersionedItem<'_>> - where - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - for<'a> ::Item<'a>: VersionedMemtableEntry<'a>, - { - self.memtable().last_versioned(version) - } - - /// Returns `true` if the WAL contains the specified key. - fn contains_key(&self, version: u64, key: &Q) -> bool - where - Q: ?Sized + Comparable::Key>>, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - for<'a> ::Item<'a>: VersionedMemtableEntry<'a>, - { - self.memtable().contains(version, key) - } - - /// Returns `true` if the WAL contains the specified key. - /// - /// Compared to [`contains_key`](MultipleVersionWalReader::contains_key), this method returns a versioned item, which means that the returned item - /// may already be marked as removed. - fn contains_key_versioned(&self, version: u64, key: &Q) -> bool - where - Q: ?Sized + Comparable::Key>>, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - for<'a> ::Item<'a>: VersionedMemtableEntry<'a>, - { - self.memtable().contains_versioned(version, key) - } - - /// Returns the entry associated with the key. The returned entry is the latest version of the key. - #[inline] - fn get(&self, version: u64, key: &Q) -> Option<::Item<'_>> - where - Q: ?Sized + Comparable::Key>>, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - for<'a> ::Item<'a>: VersionedMemtableEntry<'a>, - { - self.memtable().get(version, key) - } - - /// Returns the value associated with the key. - /// - /// Compared to [`get`](MultipleVersionWalReader::get), this method returns a versioned item, which means that the returned item - /// may already be marked as removed. - fn get_versioned( - &self, - version: u64, - key: &Q, - ) -> Option<::VersionedItem<'_>> - where - Q: ?Sized + Comparable::Key>>, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - for<'a> ::Item<'a>: VersionedMemtableEntry<'a>, - { - self.memtable().get_versioned(version, key) - } - - fn upper_bound( - &self, - version: u64, - bound: Bound<&Q>, - ) -> Option<::Item<'_>> - where - Q: ?Sized + Comparable::Key>>, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - for<'a> ::Item<'a>: VersionedMemtableEntry<'a>, - { - self.memtable().upper_bound(version, bound) - } - - fn upper_bound_versioned( - &self, - version: u64, - bound: Bound<&Q>, - ) -> Option<::VersionedItem<'_>> - where - Q: ?Sized + Comparable::Key>>, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - for<'a> ::Item<'a>: VersionedMemtableEntry<'a>, - { - self.memtable().upper_bound_versioned(version, bound) - } - - fn lower_bound( - &self, - version: u64, - bound: Bound<&Q>, - ) -> Option<::Item<'_>> - where - Q: ?Sized + Comparable::Key>>, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - for<'a> ::Item<'a>: VersionedMemtableEntry<'a>, - { - self.memtable().lower_bound(version, bound) - } - - fn lower_bound_versioned( - &self, - version: u64, - bound: Bound<&Q>, - ) -> Option<::VersionedItem<'_>> - where - Q: ?Sized + Comparable::Key>>, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - for<'a> ::Item<'a>: VersionedMemtableEntry<'a>, - { - self.memtable().lower_bound_versioned(version, bound) - } -} - -pub trait Wal { - type Allocator: Allocator; - type Memtable; - - fn construct(arena: Self::Allocator, base: Self::Memtable, opts: Options, checksumer: S) -> Self; - - fn allocator(&self) -> &Self::Allocator; - - fn options(&self) -> &Options; - - fn memtable(&self) -> &Self::Memtable; - - fn memtable_mut(&mut self) -> &mut Self::Memtable; - - fn hasher(&self) -> &S; - - /// Returns `true` if this WAL instance is read-only. - #[inline] - fn read_only(&self) -> bool { - self.allocator().read_only() - } - - /// Returns the path of the WAL if it is backed by a file. - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - #[inline] - fn path<'a>(&'a self) -> Option<&'a ::Path> - where - Self::Allocator: 'a, - { - self.allocator().path() - } - - /// Returns the maximum key size allowed in the WAL. - #[inline] - fn maximum_key_size(&self) -> KeySize { - self.options().maximum_key_size() - } - - /// Returns the maximum value size allowed in the WAL. - #[inline] - fn maximum_value_size(&self) -> u32 { - self.options().maximum_value_size() - } - - /// Returns the remaining capacity of the WAL. - #[inline] - fn remaining(&self) -> u32 { - self.allocator().remaining() as u32 - } - - /// Returns the capacity of the WAL. - #[inline] - fn capacity(&self) -> u32 { - self.options().capacity() - } - - /// Returns the reserved space in the WAL. - /// - /// ## Safety - /// - The writer must ensure that the returned slice is not modified. - /// - This method is not thread-safe, so be careful when using it. - unsafe fn reserved_slice<'a>(&'a self) -> &'a [u8] - where - Self::Allocator: 'a, - { - let reserved = self.options().reserved(); - if reserved == 0 { - return &[]; - } - - let allocator = self.allocator(); - let reserved_slice = allocator.reserved_slice(); - &reserved_slice[HEADER_SIZE..] - } - - /// Returns the mutable reference to the reserved slice. - /// - /// ## Safety - /// - The caller must ensure that the there is no others accessing reserved slice for either read or write. - /// - This method is not thread-safe, so be careful when using it. - #[allow(clippy::mut_from_ref)] - unsafe fn reserved_slice_mut<'a>(&'a self) -> &'a mut [u8] - where - Self::Allocator: 'a, - { - let reserved = self.options().reserved(); - if reserved == 0 { - return &mut []; - } - - let allocator = self.allocator(); - let reserved_slice = allocator.reserved_slice_mut(); - &mut reserved_slice[HEADER_SIZE..] - } - - /// Flushes the to disk. - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - fn flush(&self) -> Result<(), Error> - where - Self::Memtable: BaseTable, - { - if !self.read_only() { - self.allocator().flush().map_err(Into::into) - } else { - Err(Error::read_only()) - } - } - - /// Flushes the to disk. - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - fn flush_async(&self) -> Result<(), Error> - where - Self::Memtable: BaseTable, - { - if !self.read_only() { - self.allocator().flush_async().map_err(Into::into) - } else { - Err(Error::read_only()) - } - } - - #[inline] - fn insert_pointer<'a>( - &'a self, - version: Option, - kp: KeyPointer<::Key>, - vp: Option::Value>>, - ) -> Result<(), Error> - where - Self::Memtable: BaseTable, - ::Key: Type + Ord + 'static, - <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - ::Value: Type + 'static, - { - let t = self.memtable(); - if let Some(vp) = vp { - t.insert(version, kp, vp).map_err(Error::memtable) - } else { - t.remove(version, kp).map_err(Error::memtable) - } - } - - #[inline] - fn insert_pointers<'a>( - &'a self, - mut ptrs: impl Iterator< - Item = ( - Option, - KeyPointer<::Key>, - Option::Value>>, - ), - >, - ) -> Result<(), Error> - where - Self::Memtable: BaseTable, - ::Key: Type + Ord + 'static, - <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - ::Value: Type + 'static, - { - ptrs.try_for_each(|(version, kp, vp)| self.insert_pointer(version, kp, vp)) - } - - fn insert<'a, KE, VE>( - &'a self, - version: Option, - kb: KE, - vb: VE, - ) -> Result<(), Among>> - where - KE: super::types::BufWriterOnce, - VE: super::types::BufWriterOnce, - S: BuildChecksumer, - Self::Memtable: BaseTable, - ::Key: Type + Ord + 'static, - <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - ::Value: Type + 'static, - { - self.update(version, kb, Some(vb)) - } - - fn remove<'a, KE>( - &'a self, - version: Option, - kb: KE, - ) -> Result<(), Either>> - where - KE: super::types::BufWriterOnce, - S: BuildChecksumer, - Self::Memtable: BaseTable, - ::Key: Type + Ord + 'static, - <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - ::Value: Type + 'static, - { - struct Noop; - - impl super::types::BufWriterOnce for Noop { - type Error = (); - - #[inline(never)] - #[cold] - fn encoded_len(&self) -> usize { - 0 - } - - #[inline(never)] - #[cold] - fn write_once(self, _: &mut VacantBuffer<'_>) -> Result { - Ok(0) - } - } - - self - .update::(version, kb, None) - .map_err(Among::into_left_right) - } - - fn update<'a, KE, VE>( - &'a self, - version: Option, - kb: KE, - vb: Option, - ) -> Result<(), Among>> - where - KE: super::types::BufWriterOnce, - VE: super::types::BufWriterOnce, - S: BuildChecksumer, - Self::Memtable: BaseTable, - ::Key: Type + Ord + 'static, - <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - ::Value: Type + 'static, - { - if self.read_only() { - return Err(Among::Right(Error::read_only())); - } - - let res = { - let klen = kb.encoded_len(); - let (vlen, remove) = vb - .as_ref() - .map(|vb| (vb.encoded_len(), false)) - .unwrap_or((0, true)); - let encoded_entry_meta = check( - klen, - vlen, - version.is_some(), - self.maximum_key_size().to_u32(), - self.maximum_value_size(), - self.read_only(), - ) - .map_err(Either::Right)?; - - let allocator = self.allocator(); - - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - let is_ondisk = allocator.is_ondisk(); - - let buf = allocator.alloc_bytes(encoded_entry_meta.entry_size); - let mut cks = self.hasher().build_checksumer(); - - match buf { - Err(e) => Err(Among::Right(Error::from_insufficient_space(e))), - Ok(mut buf) => { - unsafe { - // We allocate the buffer with the exact size, so it's safe to write to the buffer. - let flag = Flags::COMMITTED.bits(); - - cks.update(&[flag]); - - buf.put_u8_unchecked(Flags::empty().bits()); - let written = buf.put_u64_varint_unchecked(encoded_entry_meta.packed_kvlen); - debug_assert_eq!( - written, encoded_entry_meta.packed_kvlen_size, - "the precalculated size should be equal to the written size" - ); - - let mut entry_flag = if !remove { - EntryFlags::empty() - } else { - EntryFlags::REMOVED - }; - - if let Some(version) = version { - entry_flag |= EntryFlags::VERSIONED; - buf.put_u8_unchecked(entry_flag.bits()); - buf.put_u64_le_unchecked(version); - } else { - buf.put_u8_unchecked(entry_flag.bits()); - } - - let ko = encoded_entry_meta.key_offset(); - let ptr = buf.as_mut_ptr().add(ko); - buf.set_len(encoded_entry_meta.entry_size as usize - VERSION_SIZE); - - let mut key_buf = VacantBuffer::new( - encoded_entry_meta.klen as usize, - NonNull::new_unchecked(ptr), - ); - let written = kb.write_once(&mut key_buf).map_err(Among::Left)?; - debug_assert_eq!( - written, encoded_entry_meta.klen as usize, - "the actual bytes written to the key buffer not equal to the expected size, expected {} but got {}.", - encoded_entry_meta.klen, written, - ); - - if let Some(vb) = vb { - let vo = encoded_entry_meta.value_offset(); - let mut value_buf = VacantBuffer::new( - encoded_entry_meta.vlen as usize, - NonNull::new_unchecked(buf.as_mut_ptr().add(vo)), - ); - let written = vb.write_once(&mut value_buf).map_err(Among::Middle)?; - - debug_assert_eq!( - written, encoded_entry_meta.vlen as usize, - "the actual bytes written to the value buffer not equal to the expected size, expected {} but got {}.", - encoded_entry_meta.vlen, written, - ); - } - - let cks = { - cks.update(&buf[1..]); - cks.digest() - }; - buf.put_u64_le_unchecked(cks); - - // commit the entry - buf[0] |= Flags::COMMITTED.bits(); - - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - if self.options().sync() && is_ondisk { - allocator - .flush_header_and_range(buf.offset(), encoded_entry_meta.entry_size as usize) - .map_err(|e| Among::Right(e.into()))?; - } - - buf.detach(); - let ptr = buf.as_ptr().add(encoded_entry_meta.key_offset() as usize); - let kp = KeyPointer::new(entry_flag, encoded_entry_meta.klen, ptr); - let vp = (!remove).then(|| { - ValuePointer::new(encoded_entry_meta.vlen, ptr.add(encoded_entry_meta.klen)) - }); - Ok((buf.buffer_offset(), kp, vp)) - } - } - } - }; - - res.and_then(|(offset, kp, vp)| { - self.insert_pointer(version, kp, vp).map_err(|e| { - unsafe { - self.allocator().rewind(ArenaPosition::Start(offset as u32)); - }; - Among::Right(e) - }) - }) - } - - fn insert_batch<'a, W, B>( - &'a self, - batch: &mut B, - ) -> Result< - (), - Among<::Error, ::Error, Error>, - > - where - B: Batch, - B::Key: BufWriter, - B::Value: BufWriter, - S: BuildChecksumer, - W: Constructable, - Self::Memtable: BaseTable, - ::Key: Type + Ord + 'static, - <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - ::Value: Type + 'static, - { - if self.read_only() { - return Err(Among::Right(Error::read_only())); - } - - let opts = self.options(); - let maximum_key_size = opts.maximum_key_size().to_u32(); - let minimum_value_size = opts.maximum_value_size(); - let start_offset = unsafe { - let (mut cursor, _allocator, mut buf) = batch - .iter_mut() - .try_fold((0u32, 0u64), |(num_entries, size), ent| { - let klen = ent.encoded_key_len(); - let vlen = ent.value_len(); - check_batch_entry(klen, vlen, maximum_key_size, minimum_value_size, ent.internal_version().is_some()).map(|meta| { - let ent_size = meta.entry_size as u64; - ent.set_encoded_meta(meta); - (num_entries + 1, size + ent_size) - }) - }) - .and_then(|(num_entries, batch_encoded_size)| { - // safe to cast batch_encoded_size to u32 here, we already checked it's less than capacity (less than u32::MAX). - let batch_meta = merge_lengths(num_entries, batch_encoded_size as u32); - let batch_meta_size = encoded_u64_varint_len(batch_meta); - let allocator = self.allocator(); - let remaining = allocator.remaining() as u64; - let total_size = RECORD_FLAG_SIZE as u64 - + batch_meta_size as u64 - + batch_encoded_size - + CHECKSUM_SIZE as u64; - if total_size > remaining { - return Err(Error::insufficient_space(total_size, remaining as u32)); - } - - let mut buf = allocator - .alloc_bytes(total_size as u32) - .map_err(Error::from_insufficient_space)?; - - let flag = Flags::BATCHING; - - buf.put_u8_unchecked(flag.bits()); - let size = buf.put_u64_varint_unchecked(batch_meta); - debug_assert_eq!( - size, batch_meta_size, - "the actual encoded u64 varint length ({}) doos not match the length ({}) returned by `dbutils::leb128::encoded_u64_varint_len`, please report bug to https://github.com/al8n/layer0/issues", - size, batch_meta_size, - ); - - Ok((RECORD_FLAG_SIZE + batch_meta_size, allocator, buf)) - }) - .map_err(Among::Right)?; - - for ent in batch.iter_mut() { - let meta = ent.encoded_meta(); - let version_size = if ent.internal_version().is_some() { - VERSION_SIZE - } else { - 0 - }; - - let remaining = buf.remaining(); - if remaining - < meta.packed_kvlen_size + EntryFlags::SIZE + version_size + meta.klen + meta.vlen - { - return Err(Among::Right( - Error::larger_batch_size(buf.capacity() as u32), - )); - } - - let ent_len_size = buf.put_u64_varint_unchecked(meta.packed_kvlen); - debug_assert_eq!( - ent_len_size, meta.packed_kvlen_size, - "the actual encoded u64 varint length ({}) doos not match the length ({}) returned by `dbutils::leb128::encoded_u64_varint_len`, please report bug to https://github.com/al8n/layer0/issues", - ent_len_size, meta.packed_kvlen_size, - ); - - buf.put_u8_unchecked(ent.flag.bits()); - let ptr = buf.as_mut_ptr(); - let (key_ptr, val_ptr) = if let Some(version) = ent.internal_version() { - buf.put_u64_le_unchecked(version); - - ( - ptr.add(cursor + meta.key_offset()), - ptr.add(cursor + meta.value_offset()), - ) - } else { - ( - ptr.add(cursor + meta.key_offset()), - ptr.add(cursor + meta.value_offset()), - ) - }; - buf.set_len(cursor + meta.value_offset()); - - let (kb, vb) = (ent.key(), ent.value()); - let mut key_buf = VacantBuffer::new(meta.klen, NonNull::new_unchecked(key_ptr)); - let written = kb.write(&mut key_buf).map_err(Among::Left)?; - debug_assert_eq!( - written, meta.klen, - "the actual bytes written to the key buffer not equal to the expected size, expected {} but got {}.", - meta.klen, written, - ); - - buf.set_len(cursor + meta.checksum_offset()); - if let Some(vb) = vb { - let mut value_buf = VacantBuffer::new(meta.vlen, NonNull::new_unchecked(val_ptr)); - let written = vb.write(&mut value_buf).map_err(Among::Middle)?; - - debug_assert_eq!( - written, meta.vlen, - "the actual bytes written to the value buffer not equal to the expected size, expected {} but got {}.", - meta.vlen, written, - ); - } - - let entry_size = meta.entry_size as usize; - let kp = KeyPointer::new(ent.flag, meta.klen, key_ptr); - let vp = vb.is_some().then(|| ValuePointer::new(meta.vlen, val_ptr)); - ent.set_pointer(kp, vp); - cursor += entry_size; - } - - let total_size = buf.capacity(); - if cursor + CHECKSUM_SIZE != total_size { - return Err(Among::Right(Error::batch_size_mismatch( - total_size as u32 - CHECKSUM_SIZE as u32, - cursor as u32, - ))); - } - - let mut cks = self.hasher().build_checksumer(); - let committed_flag = Flags::BATCHING | Flags::COMMITTED; - cks.update(&[committed_flag.bits()]); - cks.update(&buf[1..]); - let checksum = cks.digest(); - buf.put_u64_le_unchecked(checksum); - - // commit the entry - buf[0] = committed_flag.bits(); - - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - if self.options().sync() && _allocator.is_ondisk() { - _allocator - .flush_header_and_range(Buffer::offset(&buf), buf.capacity()) - .map_err(|e| Among::Right(e.into()))?; - } - buf.detach(); - Buffer::buffer_offset(&buf) - }; - - self - .insert_pointers(batch.iter_mut().map(|e| { - let (kp, vp) = e.take_pointer().unwrap(); - (e.internal_version(), kp, vp) - })) - .map_err(|e| { - // Safety: the writer is single threaded, the memory chunk in buf cannot be accessed by other threads, - // so it's safe to rewind the arena. - unsafe { - self - .allocator() - .rewind(ArenaPosition::Start(start_offset as u32)); - } - Among::Right(e) - }) - } -} - -impl WalReader for T -where - T: Wal, - T::Memtable: Memtable, - for<'a> ::Item<'a>: MemtableEntry<'a>, -{ - type Allocator = T::Allocator; - - type Memtable = T::Memtable; - - #[inline] - fn memtable(&self) -> &Self::Memtable { - T::memtable(self) - } -} - -impl MultipleVersionWalReader for T -where - T: Wal, - T::Memtable: MultipleVersionMemtable, - for<'a> ::Item<'a>: VersionedMemtableEntry<'a>, - for<'a> ::VersionedItem<'a>: VersionedMemtableEntry<'a>, -{ - type Allocator = T::Allocator; - - type Memtable = T::Memtable; - - #[inline] - fn memtable(&self) -> &Self::Memtable { - T::memtable(self) - } -} - -pub trait Constructable: Sized { - type Allocator: Allocator + 'static; - type Wal: Wal + 'static; - type Memtable: BaseTable; - type Checksumer; - type Reader; - - #[inline] - fn allocator<'a>(&'a self) -> &'a Self::Allocator - where - Self::Allocator: 'a, - Self::Wal: 'a, - { - self.as_wal().allocator() - } - - fn as_wal(&self) -> &Self::Wal; - - fn new_in( - arena: Self::Allocator, - opts: Options, - memtable_opts: ::Options, - cks: Self::Checksumer, - ) -> Result> { - unsafe { - let slice = arena.reserved_slice_mut(); - let mut cursor = 0; - slice[0..MAGIC_TEXT_SIZE].copy_from_slice(&MAGIC_TEXT); - cursor += MAGIC_TEXT_SIZE; - slice[MAGIC_TEXT_SIZE] = ::kind() as u8; - cursor += WAL_KIND_SIZE; - slice[cursor..HEADER_SIZE].copy_from_slice(&opts.magic_version().to_le_bytes()); - } - - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - let this = arena - .flush_range(0, HEADER_SIZE) - .map_err(Into::into) - .and_then(|_| { - Self::Memtable::new(memtable_opts) - .map(|memtable| { - >::construct(arena, memtable, opts, cks) - }) - .map_err(Error::memtable) - }); - - #[cfg(not(all(feature = "memmap", not(target_family = "wasm"))))] - let this = Self::Memtable::new(memtable_opts) - .map(|memtable| >::construct(arena, memtable, opts, cks)) - .map_err(Error::memtable); - - this - } - - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - fn replay<'a>( - arena: Self::Allocator, - opts: Options, - memtable_opts: ::Options, - ro: bool, - checksumer: Self::Checksumer, - ) -> Result> - where - Self::Checksumer: BuildChecksumer, - ::Key: Type + Ord + 'static, - <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - { - use super::{types::Kind, utils::split_lengths}; - use dbutils::leb128::decode_u64_varint; - - let slice = arena.reserved_slice(); - let mut cursor = 0; - let magic_text = &slice[0..MAGIC_TEXT_SIZE]; - if magic_text != MAGIC_TEXT { - return Err(Error::magic_text_mismatch()); - } - cursor += MAGIC_TEXT_SIZE; - let kind = Kind::try_from(slice[cursor])?; - let created_kind = ::kind(); - if kind != created_kind { - return Err(Error::wal_kind_mismatch(kind, created_kind)); - } - cursor += WAL_KIND_SIZE; - - let magic_version = u16::from_le_bytes(slice[cursor..HEADER_SIZE].try_into().unwrap()); - if magic_version != opts.magic_version() { - return Err(Error::magic_version_mismatch()); - } - - let set = >::Memtable::new(memtable_opts) - .map_err(Error::memtable)?; - - let mut cursor = arena.data_offset(); - let allocated = arena.allocated(); - let mut minimum_version = u64::MAX; - let mut maximum_version = 0; - - loop { - unsafe { - // we reached the end of the arena, if we have any remaining, then if means two possibilities: - // 1. the remaining is a partial entry, but it does not be persisted to the disk, so following the write-ahead log principle, we should discard it. - // 2. our file may be corrupted, so we discard the remaining. - if cursor + RECORD_FLAG_SIZE > allocated { - if !ro && cursor < allocated { - arena.rewind(ArenaPosition::Start(cursor as u32)); - arena.flush()?; - } - break; - } - - let header = arena.get_u8(cursor).unwrap(); - let flag = Flags::from_bits_retain(header); - - if !flag.contains(Flags::BATCHING) { - let (readed, encoded_len) = - arena - .get_u64_varint(cursor + RECORD_FLAG_SIZE) - .map_err(|e| { - #[cfg(feature = "tracing")] - tracing::error!(err=%e); - - Error::corrupted(e) - })?; - let (key_len, value_len) = split_lengths(encoded_len); - let key_len = key_len as usize; - let value_len = value_len as usize; - let entry_flag = arena - .get_u8(cursor + RECORD_FLAG_SIZE + readed) - .map_err(|e| { - #[cfg(feature = "tracing")] - tracing::error!(err=%e); - - Error::corrupted(e) - })?; - - let entry_flag = EntryFlags::from_bits_retain(entry_flag); - let version_size = if entry_flag.contains(EntryFlags::VERSIONED) { - VERSION_SIZE - } else { - 0 - }; - // Same as above, if we reached the end of the arena, we should discard the remaining. - let cks_offset = - RECORD_FLAG_SIZE + readed + EntryFlags::SIZE + version_size + key_len + value_len; - if cks_offset + CHECKSUM_SIZE > allocated { - // If the entry is committed, then it means our file is truncated, so we should report corrupted. - if flag.contains(Flags::COMMITTED) { - return Err(Error::corrupted("file is truncated")); - } - - if !ro { - arena.rewind(ArenaPosition::Start(cursor as u32)); - arena.flush()?; - } - - break; - } - - let cks = arena.get_u64_le(cursor + cks_offset).unwrap(); - if cks != checksumer.checksum_one(arena.get_bytes(cursor, cks_offset)) { - return Err(Error::corrupted("checksum mismatch")); - } - - // If the entry is not committed, we should not rewind - if !flag.contains(Flags::COMMITTED) { - if !ro { - arena.rewind(ArenaPosition::Start(cursor as u32)); - arena.flush()?; - } - - break; - } - - let ptr = arena.get_pointer(cursor + RECORD_FLAG_SIZE + readed); - let flag = EntryFlags::from_bits_retain(*ptr); - - let (version, ptr) = if flag.contains(EntryFlags::VERSIONED) { - let version_ptr = ptr.add(EntryFlags::SIZE); - let version = u64::from_le_bytes( - core::slice::from_raw_parts(version_ptr, VERSION_SIZE) - .try_into() - .unwrap(), - ); - minimum_version = minimum_version.min(version); - maximum_version = maximum_version.max(version); - (Some(version), version_ptr.add(VERSION_SIZE)) - } else { - (None, ptr.add(EntryFlags::SIZE)) - }; - - let kp = KeyPointer::new(flag, key_len, ptr); - if flag.contains(EntryFlags::REMOVED) { - set.remove(version, kp).map_err(Error::memtable)?; - } else { - let vp = ValuePointer::new(value_len, ptr.add(key_len)); - set.insert(version, kp, vp).map_err(Error::memtable)?; - } - - cursor += cks_offset + CHECKSUM_SIZE; - } else { - let (readed, encoded_len) = - arena - .get_u64_varint(cursor + RECORD_FLAG_SIZE) - .map_err(|e| { - #[cfg(feature = "tracing")] - tracing::error!(err=%e); - - Error::corrupted(e) - })?; - - let (num_entries, encoded_data_len) = split_lengths(encoded_len); - // Same as above, if we reached the end of the arena, we should discard the remaining. - let cks_offset = RECORD_FLAG_SIZE + readed + encoded_data_len as usize; - let total_size = cks_offset + CHECKSUM_SIZE; - - if total_size > allocated { - // If the entry is committed, then it means our file is truncated, so we should report corrupted. - if flag.contains(Flags::COMMITTED) { - return Err(Error::corrupted("file is truncated")); - } - - if !ro { - arena.rewind(ArenaPosition::Start(cursor as u32)); - arena.flush()?; - } - - break; - } - let cks = arena.get_u64_le(cursor + cks_offset).unwrap(); - let mut batch_data_buf = arena.get_bytes(cursor, cks_offset); - if cks != checksumer.checksum_one(batch_data_buf) { - return Err(Error::corrupted("checksum mismatch")); - } - - let mut sub_cursor = 0; - batch_data_buf = &batch_data_buf[RECORD_FLAG_SIZE + readed..]; - for _ in 0..num_entries { - let (kvlen, ent_len) = decode_u64_varint(batch_data_buf).map_err(|e| { - #[cfg(feature = "tracing")] - tracing::error!(err=%e); - - Error::corrupted(e) - })?; - - let (klen, vlen) = split_lengths(ent_len); - let klen = klen as usize; - let vlen = vlen as usize; - - let ptr = arena.get_pointer(cursor + RECORD_FLAG_SIZE + readed + sub_cursor + kvlen); - let flag = EntryFlags::from_bits_retain(*ptr); - - let (version, ptr, ent_len) = if flag.contains(EntryFlags::VERSIONED) { - let version_ptr = ptr.add(EntryFlags::SIZE); - let version = u64::from_le_bytes( - core::slice::from_raw_parts(version_ptr, VERSION_SIZE) - .try_into() - .unwrap(), - ); - minimum_version = minimum_version.min(version); - maximum_version = maximum_version.max(version); - let ent_len = kvlen + EntryFlags::SIZE + VERSION_SIZE + klen + vlen; - (Some(version), version_ptr.add(VERSION_SIZE), ent_len) - } else { - let ent_len = kvlen + EntryFlags::SIZE + klen + vlen; - (None, ptr.add(EntryFlags::SIZE), ent_len) - }; - - let kp = KeyPointer::new(flag, klen, ptr); - if flag.contains(EntryFlags::REMOVED) { - set.remove(version, kp).map_err(Error::memtable)?; - } else { - let vp = ValuePointer::new(vlen, ptr.add(klen)); - set.insert(version, kp, vp).map_err(Error::memtable)?; - } - - sub_cursor += ent_len; - batch_data_buf = &batch_data_buf[ent_len..]; - } - - debug_assert_eq!( - encoded_data_len as usize, sub_cursor, - "expected encoded batch data size ({}) is not equal to the actual size ({})", - encoded_data_len, sub_cursor, - ); - - cursor += total_size; - } - } - } - - Ok(>::construct( - arena, set, opts, checksumer, - )) - } - - fn from_core(core: Self::Wal) -> Self; -} - -#[inline] -const fn min_u64(a: u64, b: u64) -> u64 { - if a < b { - a - } else { - b - } -} - -#[inline] -const fn check( - klen: usize, - vlen: usize, - versioned: bool, - max_key_size: u32, - max_value_size: u32, - ro: bool, -) -> Result> { - if ro { - return Err(Error::read_only()); - } - - let max_ksize = min_u64(max_key_size as u64, u32::MAX as u64); - let max_vsize = min_u64(max_value_size as u64, u32::MAX as u64); - - if max_ksize < klen as u64 { - return Err(Error::key_too_large(klen as u64, max_key_size)); - } - - if max_vsize < vlen as u64 { - return Err(Error::value_too_large(vlen as u64, max_value_size)); - } - - let encoded_entry_meta = EncodedEntryMeta::new(klen, vlen, versioned); - if encoded_entry_meta.entry_size == u32::MAX { - let version_size = if versioned { VERSION_SIZE } else { 0 }; - return Err(Error::entry_too_large( - encoded_entry_meta.entry_size as u64, - min_u64( - RECORD_FLAG_SIZE as u64 - + 10 - + EntryFlags::SIZE as u64 - + version_size as u64 - + max_key_size as u64 - + max_value_size as u64, - u32::MAX as u64, - ), - )); - } - - Ok(encoded_entry_meta) -} - -#[inline] -fn check_batch_entry( - klen: usize, - vlen: usize, - max_key_size: u32, - max_value_size: u32, - versioned: bool, -) -> Result> { - let max_ksize = min_u64(max_key_size as u64, u32::MAX as u64); - let max_vsize = min_u64(max_value_size as u64, u32::MAX as u64); - - if max_ksize < klen as u64 { - return Err(Error::key_too_large(klen as u64, max_key_size)); - } - - if max_vsize < vlen as u64 { - return Err(Error::value_too_large(vlen as u64, max_value_size)); - } - - let encoded_entry_meta = EncodedEntryMeta::batch(klen, vlen, versioned); - if encoded_entry_meta.entry_size == u32::MAX { - let version_size = if versioned { VERSION_SIZE } else { 0 }; - return Err(Error::entry_too_large( - encoded_entry_meta.entry_size as u64, - min_u64( - 10 + EntryFlags::SIZE as u64 - + version_size as u64 - + max_key_size as u64 - + max_value_size as u64, - u32::MAX as u64, - ), - )); - } - - Ok(encoded_entry_meta) -} diff --git a/src/swmr.rs b/src/swmr.rs index cbac855f..dbc43f10 100644 --- a/src/swmr.rs +++ b/src/swmr.rs @@ -2,135 +2,21 @@ mod reader; mod wal; mod writer; +pub use reader::OrderWalReader; +pub use writer::OrderWal; + #[cfg(all( test, any( all_orderwal_tests, - test_swmr_constructor, - test_swmr_insert, - test_swmr_get, - test_swmr_iters, + test_generic_constructor, + test_generic_insert, + test_generic_get, + test_generic_iters, + test_dynamic_constructor, + test_dynamic_insert, + test_dynamic_get, + test_dynamic_iters, ) ))] mod tests; - -/// The ordered write-ahead log without multiple version support. -pub mod base { - use dbutils::checksum::Crc32; - - use super::{reader, writer}; - #[cfg(feature = "std")] - use crate::memtable::linked::Table as BaseLinkedTable; - use crate::memtable::{ - alternative::Table as BaseAlternativeTable, arena::Table as BaseArenaTable, - }; - - pub use crate::{ - memtable::arena::TableOptions as ArenaTableOptions, - types::base::{Entry, Key, Value}, - wal::base::{Iter, Keys, RangeKeys, RangeValues, Reader, Writer}, - }; - - /// An memory table for [`OrderWal`] or [`OrderWalReader`] based on [`linked::Table`](BaseLinkedTable). - #[cfg(feature = "std")] - #[cfg_attr(docsrs, doc(cfg(feature = "std")))] - pub type LinkedTable = BaseLinkedTable; - - /// An memory table for [`OrderWal`] or [`OrderWalReader`] based on [`arena::Table`](BaseArenaTable). - pub type ArenaTable = BaseArenaTable; - - /// An memory table for [`OrderWal`] or [`OrderWalReader`] based on [`alternative::Table`](BaseAlternativeTable). - pub type AlternativeTable = BaseAlternativeTable; - - /// The default memory table used by [`OrderWal`] or [`OrderWalReader`]. - #[cfg(feature = "std")] - pub type DefaultTable = LinkedTable; - - /// The default memory table used by [`OrderWal`] or [`OrderWalReader`]. - #[cfg(not(feature = "std"))] - pub type DefaultTable = ArenaTable; - - /// A generic ordered write-ahead log implementation for multiple threads environments. - /// - /// ```text - /// +----------------------+-------------------------+--------------------+ - /// | magic text (6 bytes) | magic version (2 bytes) | header (8 bytes) | - /// +----------------------+-------------------------+--------------------+---------------------+-----------------+--------------------+ - /// | flag (1 byte) | key len (4 bytes) | key (n bytes) | value len (4 bytes) | value (n bytes) | checksum (8 bytes) | - /// +----------------------+-------------------------+--------------------+---------------------+-----------------|--------------------+ - /// | flag (1 byte) | key len (4 bytes) | key (n bytes) | value len (4 bytes) | value (n bytes) | checksum (8 bytes) | - /// +----------------------+-------------------------+--------------------+---------------------+-----------------+--------------------+ - /// | flag (1 byte) | key len (4 bytes) | key (n bytes) | value len (4 bytes) | value (n bytes) | checksum (8 bytes) | - /// +----------------------+-------------------------+--------------------+---------------------+-----------------+--------------------+ - /// | ... | ... | ... | ... | ... | ... | - /// +----------------------+-------------------------+--------------------+---------------------+-----------------+--------------------+ - /// | ... | ... | ... | ... | ... | ... | - /// +----------------------+-------------------------+--------------------+---------------------+-----------------+--------------------+ - /// ``` - pub type OrderWal, S = Crc32> = writer::OrderWal; - - /// Immutable reader for the generic ordered write-ahead log [`OrderWal`]. - pub type OrderWalReader, S = Crc32> = - reader::OrderWalReader; -} - -/// A multiple version ordered write-ahead log implementation for multiple threads environments. -pub mod multiple_version { - use dbutils::checksum::Crc32; - - use super::{reader, writer}; - #[cfg(feature = "std")] - use crate::memtable::linked::MultipleVersionTable as BaseLinkedTable; - use crate::memtable::{ - alternative::MultipleVersionTable as BaseAlternativeTable, - arena::MultipleVersionTable as BaseArenaTable, - }; - - pub use crate::{ - memtable::arena::TableOptions as ArenaTableOptions, - types::multiple_version::{Entry, Key, Value, VersionedEntry}, - wal::multiple_version::{ - Iter, IterAll, Keys, RangeAll, RangeKeys, RangeValues, Reader, Writer, - }, - }; - - /// An memory table for multiple version [`OrderWal`] or [`OrderWalReader`] based on [`linked::MultipleVersionTable`](BaseLinkedTable). - #[cfg(feature = "std")] - #[cfg_attr(docsrs, doc(cfg(feature = "std")))] - pub type LinkedTable = BaseLinkedTable; - - /// An memory table for multiple version [`OrderWal`] or [`OrderWalReader`] based on [`arena::MultipleVersionTable`](BaseArenaTable). - pub type ArenaTable = BaseArenaTable; - - /// An memory table for multiple version [`OrderWal`] or [`OrderWalReader`] based on [`alternative::MultipleVersionTable`](BaseAlternativeTable). - pub type AlternativeTable = BaseAlternativeTable; - - /// The default memory table used by [`OrderWal`] or [`OrderWalReader`]. - #[cfg(feature = "std")] - pub type DefaultTable = LinkedTable; - - /// The default memory table used by [`OrderWal`] or [`OrderWalReader`]. - #[cfg(not(feature = "std"))] - pub type DefaultTable = ArenaTable; - - /// A multiple versioned generic ordered write-ahead log implementation for multiple threads environments. - /// - /// ```text - /// +----------------------+-------------------------+--------------------+ - /// | magic text (6 bytes) | magic version (2 bytes) | header (8 bytes) | - /// +----------------------+-------------------------+--------------------+---------------------+---------------------+-----------------+--------------------+ - /// | flag (1 byte) | version (8 bytes) | key len (4 bytes) | key (n bytes) | value len (4 bytes) | value (n bytes) | checksum (8 bytes) | - /// +----------------------+-------------------------+--------------------+---------------------+---------------------+-----------------+--------------------+ - /// | flag (1 byte) | version (8 bytes) | key len (4 bytes) | key (n bytes) | value len (4 bytes) | value (n bytes) | checksum (8 bytes) | - /// +----------------------+-------------------------+--------------------+---------------------+---------------------+-----------------+--------------------+ - /// | flag (1 byte) | version (8 bytes) | key len (4 bytes) | key (n bytes) | value len (4 bytes) | value (n bytes) | checksum (8 bytes) | - /// +----------------------+-------------------------+--------------------+---------------------+---------------------+-----------------+--------------------+ - /// | ... | ... | ... | ... | ... | ... | ,,, | - /// +----------------------+-------------------------+--------------------+---------------------+---------------------+-----------------+--------------------+ - /// ``` - pub type OrderWal, S = Crc32> = writer::OrderWal; - - /// Immutable reader for the multiple versioned generic ordered write-ahead log [`OrderWal`]. - pub type OrderWalReader, S = Crc32> = - reader::OrderWalReader; -} diff --git a/src/swmr/reader.rs b/src/swmr/reader.rs index 92965cfc..61e93e8d 100644 --- a/src/swmr/reader.rs +++ b/src/swmr/reader.rs @@ -1,65 +1,68 @@ -use std::sync::Arc; - +use super::{super::swmr::wal::OrderCore, writer::OrderWal}; +use crate::{log::Log, memtable::Memtable, Immutable}; use rarena_allocator::sync::Arena; - -use crate::{ - memtable::BaseTable, - sealed::{Constructable, Immutable}, - swmr::wal::OrderCore, -}; - -use super::writer::OrderWal; +use triomphe::Arc; /// An [`OrderWal`] reader. -pub struct OrderWalReader(OrderWal); +pub struct OrderWalReader(pub(crate) OrderWal); -impl core::fmt::Debug for OrderWalReader -where - K: ?Sized, - V: ?Sized, -{ +impl core::fmt::Debug for OrderWalReader { #[inline] fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { f.debug_tuple("OrderWalReader").field(&self.0.core).finish() } } -impl Immutable for OrderWalReader {} +impl Immutable for OrderWalReader {} -impl OrderWalReader -where - K: ?Sized, - V: ?Sized, -{ +impl OrderWalReader { /// Creates a new read-only WAL reader. #[inline] - pub(super) fn new(wal: Arc>) -> Self { - Self(OrderWal::construct(wal)) + pub(crate) fn from_core(wal: Arc>) -> Self { + Self(OrderWal::from_core(wal)) } } -impl Constructable for OrderWalReader +impl Log for OrderWalReader where - K: ?Sized + 'static, - V: ?Sized + 'static, S: 'static, - M: BaseTable + 'static, + M: Memtable + 'static, { type Allocator = Arena; - type Wal = OrderCore; type Memtable = M; type Checksumer = S; - type Reader = OrderWalReader; + type Reader = OrderWalReader; + + #[inline] + fn allocator<'a>(&'a self) -> &'a Self::Allocator + where + Self::Allocator: 'a, + { + self.0.allocator() + } + + #[inline] + fn construct( + arena: Self::Allocator, + base: Self::Memtable, + opts: crate::Options, + checksumer: Self::Checksumer, + ) -> Self { + Self(OrderWal::construct(arena, base, opts, checksumer)) + } + + #[inline] + fn options(&self) -> &crate::Options { + self.0.options() + } #[inline] - fn as_wal(&self) -> &Self::Wal { - self.0.as_wal() + fn memtable(&self) -> &Self::Memtable { + self.0.memtable() } #[inline] - fn from_core(core: Self::Wal) -> Self { - Self(OrderWal { - core: Arc::new(core), - }) + fn hasher(&self) -> &Self::Checksumer { + self.0.hasher() } } diff --git a/src/swmr/tests.rs b/src/swmr/tests.rs index 4f64104c..753bdf87 100644 --- a/src/swmr/tests.rs +++ b/src/swmr/tests.rs @@ -8,11 +8,10 @@ use std::{ vec::Vec, }; -use base::{AlternativeTable, OrderWal, OrderWalReader}; use dbutils::{ equivalent::{Comparable, Equivalent}, leb128::{decode_u64_varint, encode_u64_varint, encoded_u64_varint_len}, - types::{KeyRef, Type, TypeRef}, + types::{Type, TypeRef}, }; use super::*; @@ -151,14 +150,6 @@ macro_rules! expand_unit_tests { }; } -type OrderWalAlternativeTable = OrderWal>; -type OrderWalReaderAlternativeTable = OrderWalReader>; - -type MultipleVersionOrderWalAlternativeTable = - multiple_version::OrderWal>; -type MultipleVersionOrderWalReaderAlternativeTable = - multiple_version::OrderWalReader>; - #[doc(hidden)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] pub struct Person { @@ -253,28 +244,6 @@ impl Comparable> for Person { } } -impl KeyRef<'_, Person> for PersonRef<'_> { - fn compare(&self, a: &Q) -> cmp::Ordering - where - Q: ?Sized + Comparable, - { - Comparable::compare(a, self).reverse() - } - - unsafe fn compare_binary(this: &[u8], other: &[u8]) -> cmp::Ordering { - let (this_id_size, this_id) = decode_u64_varint(this).unwrap(); - let (other_id_size, other_id) = decode_u64_varint(other).unwrap(); - PersonRef { - id: this_id, - name: std::str::from_utf8(&this[this_id_size..]).unwrap(), - } - .cmp(&PersonRef { - id: other_id, - name: std::str::from_utf8(&other[other_id_size..]).unwrap(), - }) - } -} - impl Type for Person { type Ref<'a> = PersonRef<'a>; type Error = dbutils::error::InsufficientBuffer; @@ -319,26 +288,26 @@ impl PersonRef<'_> { } } -#[cfg(all(test, any(test_swmr_constructor, all_orderwal_tests)))] -mod constructor; - -#[cfg(all(test, any(test_swmr_insert, all_orderwal_tests)))] -mod insert; - -#[cfg(all(test, any(test_swmr_iters, all_orderwal_tests)))] -mod iters; - -#[cfg(all(test, any(test_swmr_get, all_orderwal_tests)))] -mod get; - -#[cfg(all(test, any(test_swmr_multiple_version_constructor, all_orderwal_tests)))] -mod multiple_version_constructor; - -#[cfg(all(test, any(test_swmr_multiple_version_get, all_orderwal_tests)))] -mod multiple_version_get; - -#[cfg(all(test, any(test_swmr_multiple_version_insert, all_orderwal_tests)))] -mod multiple_version_insert; - -#[cfg(all(test, any(test_swmr_multiple_version_iters, all_orderwal_tests)))] -mod multiple_version_iters; +#[cfg(all( + test, + any( + test_generic_iters, + test_generic_get, + test_generic_insert, + test_generic_constructor, + all_orderwal_tests + ) +))] +mod generic; + +#[cfg(all( + test, + any( + test_dynamic_iters, + test_dynamic_get, + test_dynamic_insert, + test_dynamic_constructor, + all_orderwal_tests + ) +))] +mod dynamic; diff --git a/src/swmr/tests/constructor.rs b/src/swmr/tests/constructor.rs deleted file mode 100644 index 27c54d89..00000000 --- a/src/swmr/tests/constructor.rs +++ /dev/null @@ -1,113 +0,0 @@ -use base::{Reader, Writer}; -use skl::KeySize; - -use crate::memtable::{ - alternative::{Table, TableOptions}, - Memtable, MemtableEntry, -}; - -use super::*; - -fn zero_reserved(wal: &mut OrderWal) -where - M: Memtable + 'static, - for<'a> M::Item<'a>: MemtableEntry<'a>, - M::Error: std::fmt::Debug, -{ - unsafe { - assert_eq!(wal.reserved_slice(), b""); - assert_eq!(wal.reserved_slice_mut(), b""); - - let wal = wal.reader(); - assert_eq!(wal.reserved_slice(), b""); - } -} - -fn reserved(wal: &mut OrderWal) -where - M: Memtable + 'static, - for<'a> M::Item<'a>: MemtableEntry<'a>, - M::Error: std::fmt::Debug, -{ - unsafe { - let buf = wal.reserved_slice_mut(); - buf.copy_from_slice(b"al8n"); - assert_eq!(wal.reserved_slice(), b"al8n"); - assert_eq!(wal.reserved_slice_mut(), b"al8n"); - - let wal = wal.reader(); - assert_eq!(wal.reserved_slice(), b"al8n"); - } -} - -#[cfg(feature = "std")] -expand_unit_tests!( - "linked": OrderWalAlternativeTable [TableOptions::Linked]: Table<_, _> { - zero_reserved, - } -); - -#[cfg(feature = "std")] -expand_unit_tests!( - "linked": OrderWalAlternativeTable [TableOptions::Linked]: Table<_, _> { - reserved({ - crate::Builder::new() - .with_capacity(MB) - .with_reserved(4) - }), - } -); - -expand_unit_tests!( - "arena": OrderWalAlternativeTable [TableOptions::Arena(Default::default())]: Table<_, _> { - zero_reserved, - } -); - -expand_unit_tests!( - "arena": OrderWalAlternativeTable [TableOptions::Arena(Default::default())]: Table<_, _> { - reserved({ - crate::Builder::new() - .with_capacity(MB) - .with_reserved(4) - }), - } -); - -#[test] -#[cfg(all(feature = "memmap", not(target_family = "wasm")))] -#[cfg_attr(miri, ignore)] -fn reopen_wrong_kind() { - use crate::Builder; - - let dir = tempfile::tempdir().unwrap(); - let path = dir.path().join("test_reopen_wrong_kind"); - let wal = unsafe { - Builder::new() - .with_capacity(MB) - .with_maximum_key_size(KeySize::with(10)) - .with_maximum_value_size(10) - .with_create_new(true) - .with_read(true) - .with_write(true) - .map_mut::, _>(path.as_path()) - .unwrap() - }; - - assert!(!wal.read_only()); - assert_eq!(wal.capacity(), MB); - assert!(wal.remaining() < MB); - assert_eq!(wal.maximum_key_size(), 10); - assert_eq!(wal.maximum_value_size(), 10); - assert_eq!(wal.path().unwrap().as_path(), path.as_path()); - assert_eq!(wal.options().maximum_key_size(), 10); - - let err = unsafe { - Builder::new() - .with_capacity(MB) - .with_read(true) - .map_mut::, _>(path.as_path()) - .unwrap_err() - }; - assert!(matches!(err, crate::error::Error::KindMismatch { .. })); -} diff --git a/src/swmr/tests/dynamic.rs b/src/swmr/tests/dynamic.rs new file mode 100644 index 00000000..8e14205a --- /dev/null +++ b/src/swmr/tests/dynamic.rs @@ -0,0 +1,13 @@ +use super::*; + +#[cfg(all(test, any(test_dynamic_insert, all_orderwal_tests)))] +mod insert; + +#[cfg(all(test, any(test_dynamic_iters, all_orderwal_tests)))] +mod iters; + +#[cfg(all(test, any(test_dynamic_get, all_orderwal_tests)))] +mod get; + +#[cfg(all(test, any(test_dynamic_constructor, all_orderwal_tests)))] +mod constructor; diff --git a/src/swmr/tests/dynamic/constructor.rs b/src/swmr/tests/dynamic/constructor.rs new file mode 100644 index 00000000..a660b006 --- /dev/null +++ b/src/swmr/tests/dynamic/constructor.rs @@ -0,0 +1,75 @@ +use crate::dynamic::{DynamicMemtable, OrderWal, Reader, Writer}; + +#[cfg(feature = "bounded")] +use crate::dynamic::BoundedTable; + +#[cfg(feature = "unbounded")] +use crate::dynamic::UnboundedTable; + +use super::*; + +fn zero_reserved(wal: &mut OrderWal) +where + M: DynamicMemtable + 'static, + M::Error: std::fmt::Debug, +{ + unsafe { + assert_eq!(wal.reserved_slice(), b""); + assert_eq!(wal.reserved_slice_mut(), b""); + + let wal = wal.reader(); + assert_eq!(wal.reserved_slice(), b""); + } +} + +fn reserved(wal: &mut OrderWal) +where + M: DynamicMemtable + 'static, + M::Error: std::fmt::Debug, +{ + unsafe { + let buf = wal.reserved_slice_mut(); + buf.copy_from_slice(b"al8n"); + assert_eq!(wal.reserved_slice(), b"al8n"); + assert_eq!(wal.reserved_slice_mut(), b"al8n"); + + let wal = wal.reader(); + assert_eq!(wal.reserved_slice(), b"al8n"); + } +} + +#[cfg(feature = "unbounded")] +expand_unit_tests!( + "unbounded": OrderWal [Default::default()]: UnboundedTable { + zero_reserved, + } +); + +#[cfg(feature = "unbounded")] +expand_unit_tests!( + "unbounded": OrderWal [Default::default()]: UnboundedTable { + reserved({ + crate::Builder::new() + .with_capacity(MB) + .with_reserved(4) + }), + } +); + +#[cfg(feature = "bounded")] +expand_unit_tests!( + "bounded": OrderWal [Default::default()]: BoundedTable { + zero_reserved, + } +); + +#[cfg(feature = "bounded")] +expand_unit_tests!( + "bounded": OrderWal [Default::default()]: BoundedTable { + reserved({ + crate::Builder::new() + .with_capacity(MB) + .with_reserved(4) + }), + } +); diff --git a/src/swmr/tests/dynamic/get.rs b/src/swmr/tests/dynamic/get.rs new file mode 100644 index 00000000..3624aeae --- /dev/null +++ b/src/swmr/tests/dynamic/get.rs @@ -0,0 +1,774 @@ +use dbutils::{ + buffer::VacantBuffer, + state::{Active, MaybeTombstone}, + types::Type, +}; + +use core::ops::Bound; + +use crate::{ + dynamic::{DynamicMemtable, OrderWal, Reader, Writer}, + memtable::{Entry, MutableMemtable, RawEntry}, + types::{KeyBuilder, ValueBuilder}, +}; + +#[cfg(feature = "bounded")] +use crate::dynamic::BoundedTable; + +#[cfg(feature = "unbounded")] +use crate::dynamic::UnboundedTable; + +use super::*; + +#[cfg(feature = "unbounded")] +expand_unit_tests!("unbounded": OrderWal [Default::default()]: UnboundedTable { + mvcc, + gt, + ge, + le, + lt, +}); + +#[cfg(feature = "bounded")] +expand_unit_tests!("bounded": OrderWal [Default::default()]: BoundedTable { + mvcc, + gt, + ge, + le, + lt, +}); + +#[cfg(feature = "unbounded")] +expand_unit_tests!("unbounded": OrderWal [Default::default()]: UnboundedTable { + insert, + unbounded_insert_with_value_builder, + unbounded_insert_with_key_builder, + unbounded_insert_with_bytes, + unbounded_insert_with_builders, +}); + +#[cfg(feature = "bounded")] +expand_unit_tests!("bounded": OrderWal [Default::default()]: BoundedTable { + insert, + bounded_insert_with_value_builder, + bounded_insert_with_key_builder, + bounded_insert_with_bytes, + bounded_insert_with_builders, +}); + +fn mvcc(wal: &mut OrderWal) +where + M: DynamicMemtable + MutableMemtable + 'static, + M::Error: std::fmt::Debug, + for<'a> M::Entry<'a, Active>: Entry<'a, Key = &'a [u8], Value = &'a [u8]> + + RawEntry<'a, RawValue = &'a [u8]> + + std::fmt::Debug, + for<'a> M::Entry<'a, MaybeTombstone>: Entry<'a, Key = &'a [u8], Value = Option<&'a [u8]>> + + RawEntry<'a, RawValue = Option<&'a [u8]>> + + std::fmt::Debug, +{ + wal.insert(1, b"a", b"a1").unwrap(); + wal.insert(3, b"a", b"a2").unwrap(); + wal.insert(1, b"c", b"c1").unwrap(); + wal.insert(3, b"c", b"c2").unwrap(); + + let ent = wal.get(1, b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.raw_value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = wal.get(2, b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.raw_value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = wal.get(3, b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.raw_value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = wal.get(4, b"a").unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.raw_value(), b"a2"); + assert_eq!(ent.version(), 3); + + assert!(wal.get(0, b"b").is_none()); + assert!(wal.get(1, b"b").is_none()); + assert!(wal.get(2, b"b").is_none()); + assert!(wal.get(3, b"b").is_none()); + assert!(wal.get(4, b"b").is_none()); + + let ent = wal.get(1, b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.raw_value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = wal.get(2, b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.raw_value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = wal.get(3, b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.raw_value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = wal.get(4, b"c").unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.raw_value(), b"c2"); + assert_eq!(ent.version(), 3); + + assert!(wal.get(5, b"d").is_none()); +} + +fn gt(wal: &mut OrderWal) +where + M: DynamicMemtable + MutableMemtable + 'static, + M::Error: std::fmt::Debug, + for<'a> M::Entry<'a, Active>: Entry<'a, Key = &'a [u8], Value = &'a [u8]> + + RawEntry<'a, RawValue = &'a [u8]> + + std::fmt::Debug, + for<'a> M::Entry<'a, MaybeTombstone>: Entry<'a, Key = &'a [u8], Value = Option<&'a [u8]>> + + RawEntry<'a, RawValue = Option<&'a [u8]>> + + std::fmt::Debug, +{ + wal.insert(1, b"a", b"a1").unwrap(); + wal.insert(3, b"a", b"a2").unwrap(); + wal.insert(1, b"c", b"c1").unwrap(); + wal.insert(3, b"c", b"c2").unwrap(); + wal.insert(5, b"c", b"c3").unwrap(); + + assert!(wal.lower_bound(0, Bound::Excluded(b"a")).is_none()); + assert!(wal.lower_bound(0, Bound::Excluded(b"b")).is_none()); + assert!(wal.lower_bound(0, Bound::Excluded(b"c")).is_none()); + + let ent = wal.lower_bound(1, Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.raw_value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = wal.lower_bound(2, Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.raw_value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = wal.lower_bound(3, Bound::Excluded(b"")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.raw_value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = wal.lower_bound(1, Bound::Excluded(b"a")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.raw_value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = wal.lower_bound(2, Bound::Excluded(b"a")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.raw_value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = wal.lower_bound(3, Bound::Excluded(b"a")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.raw_value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = wal.lower_bound(1, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.raw_value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = wal.lower_bound(2, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.raw_value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = wal.lower_bound(3, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.raw_value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = wal.lower_bound(4, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.raw_value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = wal.lower_bound(5, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c3"); + assert_eq!(ent.version(), 5); + + let ent = wal.lower_bound(6, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c3"); + assert_eq!(ent.version(), 5); + + assert!(wal.lower_bound(1, Bound::Excluded(b"c")).is_none()); + assert!(wal.lower_bound(2, Bound::Excluded(b"c")).is_none()); + assert!(wal.lower_bound(3, Bound::Excluded(b"c")).is_none()); + assert!(wal.lower_bound(4, Bound::Excluded(b"c")).is_none()); + assert!(wal.lower_bound(5, Bound::Excluded(b"c")).is_none()); + assert!(wal.lower_bound(6, Bound::Excluded(b"c")).is_none()); +} + +fn ge(wal: &mut OrderWal) +where + M: DynamicMemtable + MutableMemtable + 'static, + M::Error: std::fmt::Debug, + + for<'a> M::Entry<'a, Active>: Entry<'a, Key = &'a [u8], Value = &'a [u8]> + + RawEntry<'a, RawValue = &'a [u8]> + + std::fmt::Debug, + for<'a> M::Entry<'a, MaybeTombstone>: Entry<'a, Key = &'a [u8], Value = Option<&'a [u8]>> + + RawEntry<'a, RawValue = Option<&'a [u8]>> + + std::fmt::Debug, +{ + wal.insert(1, b"a", b"a1").unwrap(); + wal.insert(3, b"a", b"a2").unwrap(); + wal.insert(1, b"c", b"c1").unwrap(); + wal.insert(3, b"c", b"c2").unwrap(); + + assert!(wal.lower_bound(0, Bound::Included(b"a")).is_none()); + assert!(wal.lower_bound(0, Bound::Included(b"b")).is_none()); + assert!(wal.lower_bound(0, Bound::Included(b"c")).is_none()); + + let ent = wal.lower_bound(1, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.raw_value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = wal.lower_bound(2, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.raw_value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = wal.lower_bound(3, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.raw_value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = wal.lower_bound(4, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.raw_value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = wal.lower_bound(1, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.raw_value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = wal.lower_bound(2, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.raw_value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = wal.lower_bound(3, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.raw_value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = wal.lower_bound(4, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.raw_value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = wal.lower_bound(1, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.raw_value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = wal.lower_bound(2, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.raw_value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = wal.lower_bound(3, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.raw_value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = wal.lower_bound(4, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.raw_value(), b"c2"); + assert_eq!(ent.version(), 3); + + assert!(wal.lower_bound(0, Bound::Included(b"d")).is_none()); + assert!(wal.lower_bound(1, Bound::Included(b"d")).is_none()); + assert!(wal.lower_bound(2, Bound::Included(b"d")).is_none()); + assert!(wal.lower_bound(3, Bound::Included(b"d")).is_none()); + assert!(wal.lower_bound(4, Bound::Included(b"d")).is_none()); +} + +fn le(wal: &mut OrderWal) +where + M: DynamicMemtable + MutableMemtable + 'static, + M::Error: std::fmt::Debug, + + for<'a> M::Entry<'a, Active>: Entry<'a, Key = &'a [u8], Value = &'a [u8]> + + RawEntry<'a, RawValue = &'a [u8]> + + std::fmt::Debug, + for<'a> M::Entry<'a, MaybeTombstone>: Entry<'a, Key = &'a [u8], Value = Option<&'a [u8]>> + + RawEntry<'a, RawValue = Option<&'a [u8]>> + + std::fmt::Debug, +{ + wal.insert(1, b"a", b"a1").unwrap(); + wal.insert(3, b"a", b"a2").unwrap(); + wal.insert(1, b"c", b"c1").unwrap(); + wal.insert(3, b"c", b"c2").unwrap(); + + assert!(wal.upper_bound(0, Bound::Included(b"a")).is_none()); + assert!(wal.upper_bound(0, Bound::Included(b"b")).is_none()); + assert!(wal.upper_bound(0, Bound::Included(b"c")).is_none()); + + let ent = wal.upper_bound(1, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.raw_value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = wal.upper_bound(2, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.raw_value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = wal.upper_bound(3, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.raw_value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = wal.upper_bound(4, Bound::Included(b"a")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.raw_value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = wal.upper_bound(1, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.raw_value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = wal.upper_bound(2, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.raw_value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = wal.upper_bound(3, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.raw_value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = wal.upper_bound(4, Bound::Included(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.raw_value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = wal.upper_bound(1, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.raw_value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = wal.upper_bound(2, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.raw_value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = wal.upper_bound(3, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.raw_value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = wal.upper_bound(4, Bound::Included(b"c")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.raw_value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = wal.upper_bound(1, Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.raw_value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = wal.upper_bound(2, Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.raw_value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = wal.upper_bound(3, Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.raw_value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = wal.upper_bound(4, Bound::Included(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.raw_value(), b"c2"); + assert_eq!(ent.version(), 3); +} + +fn lt(wal: &mut OrderWal) +where + M: DynamicMemtable + MutableMemtable + 'static, + M::Error: std::fmt::Debug, + + for<'a> M::Entry<'a, Active>: Entry<'a, Key = &'a [u8], Value = &'a [u8]> + + RawEntry<'a, RawValue = &'a [u8]> + + std::fmt::Debug, + for<'a> M::Entry<'a, MaybeTombstone>: Entry<'a, Key = &'a [u8], Value = Option<&'a [u8]>> + + RawEntry<'a, RawValue = Option<&'a [u8]>> + + std::fmt::Debug, +{ + wal.insert(1, b"a", b"a1").unwrap(); + wal.insert(3, b"a", b"a2").unwrap(); + wal.insert(1, b"c", b"c1").unwrap(); + wal.insert(3, b"c", b"c2").unwrap(); + + assert!(wal.upper_bound(0, Bound::Excluded(b"a")).is_none()); + assert!(wal.upper_bound(0, Bound::Excluded(b"b")).is_none()); + assert!(wal.upper_bound(0, Bound::Excluded(b"c")).is_none()); + assert!(wal.upper_bound(1, Bound::Excluded(b"a")).is_none()); + assert!(wal.upper_bound(2, Bound::Excluded(b"a")).is_none()); + + let ent = wal.upper_bound(1, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.raw_value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = wal.upper_bound(2, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.raw_value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = wal.upper_bound(3, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.raw_value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = wal.upper_bound(4, Bound::Excluded(b"b")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.raw_value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = wal.upper_bound(1, Bound::Excluded(b"c")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.raw_value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = wal.upper_bound(2, Bound::Excluded(b"c")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a1"); + assert_eq!(ent.raw_value(), b"a1"); + assert_eq!(ent.version(), 1); + + let ent = wal.upper_bound(3, Bound::Excluded(b"c")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.raw_value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = wal.upper_bound(4, Bound::Excluded(b"c")).unwrap(); + assert_eq!(ent.key(), b"a"); + assert_eq!(ent.raw_key(), b"a"); + assert_eq!(ent.value(), b"a2"); + assert_eq!(ent.raw_value(), b"a2"); + assert_eq!(ent.version(), 3); + + let ent = wal.upper_bound(1, Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.raw_value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = wal.upper_bound(2, Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c1"); + assert_eq!(ent.raw_value(), b"c1"); + assert_eq!(ent.version(), 1); + + let ent = wal.upper_bound(3, Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.raw_value(), b"c2"); + assert_eq!(ent.version(), 3); + + let ent = wal.upper_bound(4, Bound::Excluded(b"d")).unwrap(); + assert_eq!(ent.key(), b"c"); + assert_eq!(ent.raw_key(), b"c"); + assert_eq!(ent.value(), b"c2"); + assert_eq!(ent.raw_value(), b"c2"); + assert_eq!(ent.version(), 3); +} + +#[allow(clippy::needless_borrows_for_generic_args)] +fn insert(wal: &mut OrderWal) +where + M: DynamicMemtable + MutableMemtable + 'static, + M::Error: std::fmt::Debug, + for<'a> M::Entry<'a, Active>: Entry<'a, Value = &'a [u8]> + RawEntry<'a> + std::fmt::Debug, + for<'a> M::Entry<'a, MaybeTombstone>: + Entry<'a, Value = Option<&'a [u8]>> + RawEntry<'a> + std::fmt::Debug, +{ + let people = (0..100) + .map(|_| { + let p = Person::random(); + let v = std::format!("My name is {}", p.name); + wal.insert(0, &p.to_vec(), v.as_bytes()).unwrap(); + (p, v) + }) + .collect::>(); + + for (p, pv) in &people { + assert!(wal.contains_key(0, &p.to_vec())); + + assert_eq!(wal.get(0, &p.to_vec()).unwrap().value(), pv.as_bytes()); + } +} + +macro_rules! insert_with_value_builder { + ($wal:ident) => {{ + let people = (0..100) + .map(|_| { + let p = Person::random(); + let v = std::format!("My name is {}", p.name); + $wal + .insert_with_value_builder( + 0, + &p.to_vec(), + ValueBuilder::once(v.len(), |buf: &mut VacantBuffer<'_>| { + buf.put_slice(v.as_bytes()).map(|_| v.len()) + }), + ) + .unwrap(); + (p, v) + }) + .collect::>(); + + for (p, _) in &people { + assert!($wal.contains_key(0, &p.to_vec())); + } + }}; +} + +#[cfg(feature = "bounded")] +fn bounded_insert_with_value_builder(wal: &mut OrderWal) { + insert_with_value_builder!(wal); +} + +#[cfg(feature = "unbounded")] +fn unbounded_insert_with_value_builder(wal: &mut OrderWal) { + insert_with_value_builder!(wal); +} + +macro_rules! insert_with_key_builder { + ($wal:ident) => {{ + let people = (0..100) + .map(|_| { + let p = Person::random(); + let v = std::format!("My name is {}", p.name); + $wal + .insert_with_key_builder( + 0, + KeyBuilder::once(p.encoded_len(), |buf| p.encode_to_buffer(buf)), + v.as_bytes(), + ) + .unwrap(); + (p, v) + }) + .collect::>(); + + for (p, pv) in &people { + assert!($wal.contains_key(0, &p.to_vec())); + assert_eq!($wal.get(0, &p.to_vec()).unwrap().value(), pv.as_bytes()); + } + }}; +} + +#[cfg(feature = "bounded")] +fn bounded_insert_with_key_builder(wal: &mut OrderWal) { + insert_with_key_builder!(wal); +} + +#[cfg(feature = "unbounded")] +fn unbounded_insert_with_key_builder(wal: &mut OrderWal) { + insert_with_key_builder!(wal); +} + +macro_rules! insert_with_bytes { + ($wal:ident) => {{ + let people = (0..100) + .map(|_| { + let p = Person::random(); + let v = std::format!("My name is {}", p.name); + $wal.insert(0, p.to_vec().as_slice(), v.as_bytes()).unwrap(); + (p, v) + }) + .collect::>(); + + for (p, pv) in &people { + assert!($wal.contains_key(0, &p.to_vec())); + assert_eq!($wal.get(0, &p.to_vec()).unwrap().value(), pv.as_bytes()); + } + }}; +} + +#[cfg(feature = "bounded")] +fn bounded_insert_with_bytes(wal: &mut OrderWal) { + insert_with_bytes!(wal); +} + +#[cfg(feature = "unbounded")] +fn unbounded_insert_with_bytes(wal: &mut OrderWal) { + insert_with_bytes!(wal); +} + +macro_rules! insert_with_builders { + ($wal:ident) => {{ + let people = (0..1) + .map(|_| { + let p = Person::random(); + let pvec = p.to_vec(); + let v = std::format!("My name is {}", p.name); + $wal + .insert_with_builders( + 0, + KeyBuilder::new(pvec.len(), |buf: &mut VacantBuffer<'_>| { + p.encode_to_buffer(buf) + }), + ValueBuilder::new(v.len(), |buf: &mut VacantBuffer<'_>| { + buf.put_slice(v.as_bytes()).map(|_| v.len()) + }), + ) + .unwrap(); + (p, pvec, v) + }) + .collect::>(); + + for (p, _, pv) in &people { + assert!($wal.contains_key(0, &p.to_vec())); + assert!($wal.contains_key_with_tombstone(0, &p.to_vec())); + assert_eq!($wal.get(0, &p.to_vec()).unwrap().value(), pv.as_bytes()); + assert_eq!( + $wal + .get_with_tombstone(0, &p.to_vec()) + .unwrap() + .value() + .unwrap(), + pv.as_bytes() + ); + } + }}; +} + +#[cfg(feature = "bounded")] +fn bounded_insert_with_builders(wal: &mut OrderWal) { + insert_with_builders!(wal); +} + +#[cfg(feature = "unbounded")] +fn unbounded_insert_with_builders(wal: &mut OrderWal) { + insert_with_builders!(wal); +} diff --git a/src/swmr/tests/dynamic/insert.rs b/src/swmr/tests/dynamic/insert.rs new file mode 100644 index 00000000..8f795d59 --- /dev/null +++ b/src/swmr/tests/dynamic/insert.rs @@ -0,0 +1,478 @@ +use dbutils::{buffer::VacantBuffer, state::Active, types::Type}; + +#[cfg(feature = "std")] +use std::thread::spawn; + +use crate::{ + batch::BatchEntry, + dynamic::{DynamicMemtable, OrderWal, OrderWalReader, Reader, Writer}, + memtable::{Entry, MutableMemtable}, + types::{KeyBuilder, ValueBuilder}, + Builder, +}; + +#[cfg(feature = "bounded")] +use crate::dynamic::BoundedTable; + +#[cfg(feature = "unbounded")] +use crate::dynamic::UnboundedTable; + +use super::*; + +#[cfg(feature = "std")] +fn concurrent_basic(mut w: OrderWal) +where + M: DynamicMemtable + MutableMemtable + Send + 'static, + M::Error: core::fmt::Debug, + for<'a> M::Entry<'a, Active>: Entry<'a, Key = &'a [u8], Value = &'a [u8]>, +{ + let readers = (0..100u32).map(|i| (i, w.reader())).collect::>(); + + let handles = readers.into_iter().map(|(i, reader)| { + spawn(move || loop { + if let Some(p) = reader.get(1, &i.to_le_bytes()) { + assert_eq!(p.key(), i.to_le_bytes()); + assert_eq!(p.value(), i.to_le_bytes()); + break; + } + }) + }); + + spawn(move || { + for i in 0..100u32 { + #[allow(clippy::needless_borrows_for_generic_args)] + w.insert(1, &i.to_le_bytes(), &i.to_le_bytes()).unwrap(); + } + }); + + for handle in handles { + handle.join().unwrap(); + } +} + +#[cfg(feature = "std")] +fn concurrent_one_key(mut w: OrderWal) +where + M: DynamicMemtable + MutableMemtable + Send + 'static, + M::Error: core::fmt::Debug, + for<'a> M::Entry<'a, Active>: Entry<'a, Key = &'a [u8], Value = &'a [u8]>, +{ + let readers = (0..100u32).map(|i| (i, w.reader())).collect::>(); + let handles = readers.into_iter().map(|(_, reader)| { + spawn(move || loop { + if let Some(p) = reader.get(1, &1u32.to_le_bytes()) { + assert_eq!(p.key(), 1u32.to_le_bytes()); + assert_eq!(p.value(), 1u32.to_le_bytes()); + break; + } + }) + }); + + w.insert(1, &1u32.to_le_bytes(), &1u32.to_le_bytes()) + .unwrap(); + + for handle in handles { + handle.join().unwrap(); + } +} + +fn apply(mut wal: OrderWal) -> (Person, Vec<(Person, String)>, Person) +where + M: DynamicMemtable + MutableMemtable + Send + 'static, + M::Error: core::fmt::Debug, + for<'a> M::Entry<'a, Active>: Entry<'a, Value = &'a [u8]>, +{ + const N: u32 = 5; + + let mut batch = vec![]; + let output = (0..N) + .map(|i| { + ( + { + let mut p = Person::random(); + p.id = i as u64; + p + }, + std::format!("My id is {i}"), + ) + .clone() + }) + .collect::>(); + + for (person, val) in output.iter() { + batch.push(BatchEntry::insert(1, person.to_vec(), val.as_bytes())); + } + + let rp1 = Person::random(); + wal.insert(1, &rp1.to_vec(), "rp1".as_bytes()).unwrap(); + wal.apply(&mut batch).unwrap(); + let rp2 = Person::random(); + wal.insert(1, &rp2.to_vec(), "rp2".as_bytes()).unwrap(); + + for (p, val) in output.iter() { + assert_eq!(wal.get(1, &p.to_vec()).unwrap().value(), val.as_bytes()); + } + + assert_eq!(wal.get(1, &rp1.to_vec()).unwrap().value(), b"rp1"); + assert_eq!(wal.get(1, &rp2.to_vec()).unwrap().value(), b"rp2"); + + let wal = wal.reader(); + for (p, val) in output.iter() { + assert_eq!(wal.get(1, &p.to_vec()).unwrap().value(), val.as_bytes()); + } + + assert_eq!(wal.get(1, &rp1.to_vec()).unwrap().value(), b"rp1"); + assert_eq!(wal.get(1, &rp2.to_vec()).unwrap().value(), b"rp2"); + + (rp1, output, rp2) +} + +fn apply_with_key_builder(mut wal: OrderWal) -> (Person, Vec<(Person, String)>, Person) +where + M: DynamicMemtable + MutableMemtable + Send + 'static, + M::Error: core::fmt::Debug, + for<'a> M::Entry<'a, Active>: Entry<'a, Value = &'a [u8]>, +{ + const N: u32 = 5; + + let mut batch = vec![]; + let output = (0..N) + .map(|i| { + ( + { + let mut p = Person::random(); + p.id = i as u64; + p + }, + std::format!("My id is {i}"), + ) + .clone() + }) + .collect::>(); + + for (person, val) in output.iter() { + batch.push(BatchEntry::insert( + 1, + KeyBuilder::new(person.encoded_len(), |buf: &mut VacantBuffer<'_>| { + buf.set_len(person.encoded_len()); + person.encode(buf) + }), + val.as_bytes(), + )); + } + + let rp1 = Person::random(); + wal.insert(1, &rp1.to_vec(), b"rp1").unwrap(); + wal.apply_with_key_builder(&mut batch).unwrap(); + let rp2 = Person::random(); + wal.insert(1, &rp2.to_vec(), b"rp2").unwrap(); + + for (p, val) in output.iter() { + assert_eq!(wal.get(1, &p.to_vec()).unwrap().value(), val.as_bytes()); + } + + assert_eq!(wal.get(1, &rp1.to_vec()).unwrap().value(), b"rp1"); + assert_eq!(wal.get(1, &rp2.to_vec()).unwrap().value(), b"rp2"); + + let wal = wal.reader(); + for (p, val) in output.iter() { + assert_eq!(wal.get(1, &p.to_vec()).unwrap().value(), val.as_bytes()); + } + + assert_eq!(wal.get(1, &rp1.to_vec()).unwrap().value(), b"rp1"); + assert_eq!(wal.get(1, &rp2.to_vec()).unwrap().value(), b"rp2"); + + (rp1, output, rp2) +} + +fn apply_with_value_builder(mut wal: OrderWal) -> (Person, Vec<(Person, String)>, Person) +where + M: DynamicMemtable + MutableMemtable + Send + 'static, + M::Error: core::fmt::Debug, + for<'a> M::Entry<'a, Active>: Entry<'a, Value = &'a [u8]>, +{ + const N: u32 = 5; + + let mut batch = vec![]; + let output = (0..N) + .map(|i| { + ( + { + let mut p = Person::random(); + p.id = i as u64; + p + }, + std::format!("My id is {i}"), + ) + .clone() + }) + .collect::>(); + + for (person, val) in output.iter() { + batch.push(BatchEntry::insert( + 1, + person.to_vec(), + ValueBuilder::new(val.len(), |buf: &mut VacantBuffer<'_>| { + buf.put_slice(val.as_bytes()).map(|_| val.len()) + }), + )); + } + + let rp1 = Person::random(); + wal.insert(1, &rp1.to_vec(), b"rp1").unwrap(); + wal.apply_with_value_builder(&mut batch).unwrap(); + let rp2 = Person::random(); + wal.insert(1, &rp2.to_vec(), b"rp2").unwrap(); + + for (p, val) in output.iter() { + assert_eq!(wal.get(1, &p.to_vec()).unwrap().value(), val.as_bytes()); + } + + assert_eq!(wal.get(1, &rp1.to_vec()).unwrap().value(), b"rp1"); + assert_eq!(wal.get(1, &rp2.to_vec()).unwrap().value(), b"rp2"); + + let wal = wal.reader(); + for (p, val) in output.iter() { + assert_eq!(wal.get(1, &p.to_vec()).unwrap().value(), val.as_bytes()); + } + + assert_eq!(wal.get(1, &rp1.to_vec()).unwrap().value(), b"rp1"); + assert_eq!(wal.get(1, &rp2.to_vec()).unwrap().value(), b"rp2"); + + (rp1, output, rp2) +} + +fn apply_with_builders(mut wal: OrderWal) -> (Person, Vec<(Person, String)>, Person) +where + M: DynamicMemtable + MutableMemtable + Send + 'static, + M::Error: core::fmt::Debug, + for<'a> M::Entry<'a, Active>: Entry<'a, Value = &'a [u8]>, +{ + const N: u32 = 1; + + let mut batch = vec![]; + let output = (0..N) + .map(|i| { + ( + { + let mut p = Person::random(); + p.id = i as u64; + p + }, + std::format!("My id is {i}"), + ) + .clone() + }) + .collect::>(); + + for (person, val) in output.iter() { + batch.push(BatchEntry::insert( + 1, + KeyBuilder::new(person.encoded_len(), |buf: &mut VacantBuffer<'_>| { + buf.set_len(person.encoded_len()); + person.encode(buf) + }), + ValueBuilder::new(val.len(), |buf: &mut VacantBuffer<'_>| { + buf.put_slice(val.as_bytes()).map(|_| val.len()) + }), + )); + } + + let rp1 = Person::random(); + wal.insert(1, &rp1.to_vec(), b"rp1").unwrap(); + wal.apply_with_builders(&mut batch).unwrap(); + let rp2 = Person::random(); + wal.insert(1, &rp2.to_vec(), b"rp2").unwrap(); + + for (p, val) in output.iter() { + assert_eq!(wal.get(1, &p.to_vec()).unwrap().value(), val.as_bytes()); + } + + assert_eq!(wal.get(1, &rp1.to_vec()).unwrap().value(), b"rp1"); + assert_eq!(wal.get(1, &rp2.to_vec()).unwrap().value(), b"rp2"); + + let wal = wal.reader(); + for (p, val) in output.iter() { + assert_eq!(wal.get(1, &p.to_vec()).unwrap().value(), val.as_bytes()); + } + + assert_eq!(wal.get(1, &rp1.to_vec()).unwrap().value(), b"rp1"); + assert_eq!(wal.get(1, &rp2.to_vec()).unwrap().value(), b"rp2"); + + (rp1, output, rp2) +} + +#[cfg(feature = "unbounded")] +expand_unit_tests!( + move "unbounded": OrderWal [Default::default()]: UnboundedTable { + concurrent_basic |p, _res| { + let wal = unsafe { Builder::new().map::, _>(p).unwrap() }; + + for i in 0..100u32 { + assert!(wal.contains_key(1, &i.to_le_bytes())); + } + }, + concurrent_one_key |p, _res| { + let wal = unsafe { Builder::new().map::, _>(p).unwrap() }; + assert!(wal.contains_key(1, &1u32.to_le_bytes())); + }, + } +); + +#[cfg(feature = "unbounded")] +expand_unit_tests!( + move "unbounded": OrderWal [Default::default()]: UnboundedTable { + apply |p, (rp1, data, rp2)| { + let map = unsafe { + Builder::new() + .map::, _>(&p) + .unwrap() + }; + + let data: Vec<(Person, String)> = data; + for (p, val) in data { + assert_eq!(map.get(1, &p.to_vec()).unwrap().value(), val.as_bytes()); + } + let rp1: Person = rp1; + let rp2: Person = rp2; + assert_eq!(map.get(1, &rp1.to_vec()).unwrap().value(), b"rp1"); + assert_eq!(map.get(1, &rp2.to_vec()).unwrap().value(), b"rp2"); + }, + apply_with_key_builder |p, (rp1, data, rp2)| { + let map = unsafe { + Builder::new() + .map::, _>(&p) + .unwrap() + }; + + let data: Vec<(Person, String)> = data; + for (p, val) in data { + assert_eq!(map.get(1, &p.to_vec()).unwrap().value(), val.as_bytes()); + } + let rp1: Person = rp1; + let rp2: Person = rp2; + assert_eq!(map.get(1, &rp1.to_vec()).unwrap().value(), b"rp1"); + assert_eq!(map.get(1, &rp2.to_vec()).unwrap().value(), b"rp2"); + }, + apply_with_value_builder |p, (rp1, data, rp2)| { + let map = unsafe { + Builder::new() + .map::, _>(&p) + .unwrap() + }; + + let data: Vec<(Person, String)> = data; + for (p, val) in data { + assert_eq!(map.get(1, &p.to_vec()).unwrap().value(), val.as_bytes()); + } + let rp1: Person = rp1; + let rp2: Person = rp2; + assert_eq!(map.get(1, &rp1.to_vec()).unwrap().value(), b"rp1"); + assert_eq!(map.get(1, &rp2.to_vec()).unwrap().value(), b"rp2"); + }, + apply_with_builders |p, (rp1, data, rp2)| { + let map = unsafe { + Builder::new() + .map::, _>(&p) + .unwrap() + }; + + let data: Vec<(Person, String)> = data; + + for (p, val) in data { + assert_eq!(map.get(1, &p.to_vec()).unwrap().value(), val.as_bytes()); + } + let rp1: Person = rp1; + let rp2: Person = rp2; + assert_eq!(map.get(1, &rp1.to_vec()).unwrap().value(), b"rp1"); + assert_eq!(map.get(1, &rp2.to_vec()).unwrap().value(), b"rp2"); + } + } +); + +#[cfg(all(feature = "bounded", feature = "std"))] +expand_unit_tests!( + move "bounded": OrderWal [Default::default()]: BoundedTable { + concurrent_basic |p, _res| { + let wal = unsafe { Builder::new().map::, _>(p).unwrap() }; + + for i in 0..100u32 { + assert!(wal.contains_key(1, &i.to_le_bytes())); + } + }, + concurrent_one_key |p, _res| { + let wal = unsafe { Builder::new().map::, _>(p).unwrap() }; + assert!(wal.contains_key(1, &1u32.to_le_bytes())); + }, + } +); + +#[cfg(feature = "bounded")] +expand_unit_tests!( + move "bounded": OrderWal [Default::default()]: BoundedTable { + apply |p, (rp1, data, rp2)| { + let map = unsafe { + Builder::new() + .map::, _>(&p) + .unwrap() + }; + + let data: Vec<(Person, String)> = data; + for (p, val) in data { + assert_eq!(map.get(1, &p.to_vec()).unwrap().value(), val.as_bytes()); + } + let rp1: Person = rp1; + let rp2: Person = rp2; + assert_eq!(map.get(1, &rp1.to_vec()).unwrap().value(), b"rp1"); + assert_eq!(map.get(1, &rp2.to_vec()).unwrap().value(), b"rp2"); + }, + apply_with_key_builder |p, (rp1, data, rp2)| { + let map = unsafe { + Builder::new() + .map::, _>(&p) + .unwrap() + }; + + let data: Vec<(Person, String)> = data; + for (p, val) in data { + assert_eq!(map.get(1, &p.to_vec()).unwrap().value(), val.as_bytes()); + } + let rp1: Person = rp1; + let rp2: Person = rp2; + assert_eq!(map.get(1, &rp1.to_vec()).unwrap().value(), b"rp1"); + assert_eq!(map.get(1, &rp2.to_vec()).unwrap().value(), b"rp2"); + }, + apply_with_value_builder |p, (rp1, data, rp2)| { + let map = unsafe { + Builder::new() + .map::, _>(&p) + .unwrap() + }; + + let data: Vec<(Person, String)> = data; + for (p, val) in data { + assert_eq!(map.get(1, &p.to_vec()).unwrap().value(), val.as_bytes()); + } + let rp1: Person = rp1; + let rp2: Person = rp2; + assert_eq!(map.get(1, &rp1.to_vec()).unwrap().value(), b"rp1"); + assert_eq!(map.get(1, &rp2.to_vec()).unwrap().value(), b"rp2"); + }, + apply_with_builders |p, (rp1, data, rp2)| { + let map = unsafe { + Builder::new() + .map::, _>(&p) + .unwrap() + }; + + let data: Vec<(Person, String)> = data; + for (p, val) in data { + assert_eq!(map.get(1, &p.to_vec()).unwrap().value(), val.as_bytes()); + } + let rp1: Person = rp1; + let rp2: Person = rp2; + assert_eq!(map.get(1, &rp1.to_vec()).unwrap().value(), b"rp1"); + assert_eq!(map.get(1, &rp2.to_vec()).unwrap().value(), b"rp2"); + } + } +); diff --git a/src/swmr/tests/dynamic/iters.rs b/src/swmr/tests/dynamic/iters.rs new file mode 100644 index 00000000..0f01e1da --- /dev/null +++ b/src/swmr/tests/dynamic/iters.rs @@ -0,0 +1,523 @@ +use core::ops::Bound; + +use crate::{ + dynamic::{OrderWal, Reader, Writer}, + memtable::{Entry, RawEntry as _}, +}; + +#[cfg(feature = "bounded")] +use crate::dynamic::BoundedTable; + +#[cfg(feature = "unbounded")] +use crate::dynamic::UnboundedTable; + +use super::*; + +#[cfg(feature = "unbounded")] +expand_unit_tests!("unbounded": OrderWal [Default::default()]: UnboundedTable { + unbounded_iter_with_tombstone_mvcc, +}); + +#[cfg(feature = "bounded")] +expand_unit_tests!("bounded": OrderWal [Default::default()]: BoundedTable { + bounded_iter_with_tombstone_mvcc, +}); + +#[cfg(feature = "unbounded")] +expand_unit_tests!("unbounded": OrderWal [Default::default()]: UnboundedTable { + unbounded_iter_with_tombstone_next_by_entry, + unbounded_iter_with_tombstone_next_by_with_tombstone_entry, + unbounded_iter_next, + unbounded_range_next, + unbounded_iter_prev, + unbounded_range_prev, + unbounded_iter_with_tombstone_prev_by_entry, + unbounded_iter_with_tombstone_prev_by_with_tombstone_entry, +}); + +macro_rules! bounded_builder { + () => {{ + crate::Builder::new() + .with_memtable_options( + crate::memtable::bounded::TableOptions::new() + .with_capacity(1024 * 1024) + .into(), + ) + .with_capacity(8 * 1024) + }}; +} + +#[cfg(feature = "bounded")] +expand_unit_tests!("bounded": OrderWal [Default::default()]: BoundedTable { + bounded_iter_with_tombstone_next_by_entry(bounded_builder!()), + bounded_iter_with_tombstone_next_by_with_tombstone_entry(bounded_builder!()), + bounded_iter_next(bounded_builder!()), + bounded_range_next(bounded_builder!()), + bounded_iter_prev(bounded_builder!()), + bounded_range_prev(bounded_builder!()), + bounded_iter_with_tombstone_prev_by_entry(bounded_builder!()), + bounded_iter_with_tombstone_prev_by_with_tombstone_entry(bounded_builder!()), +}); + +fn make_int_key(i: usize) -> String { + ::std::format!("{:05}", i) +} + +fn make_value(i: usize) -> String { + ::std::format!("v{:05}", i) +} + +macro_rules! iter_with_tombstone_mvcc { + ($wal:ident) => {{ + $wal.insert(1, b"a", b"a1").unwrap(); + $wal.insert(3, b"a", b"a2").unwrap(); + $wal.insert(1, b"c", b"c1").unwrap(); + $wal.insert(3, b"c", b"c2").unwrap(); + + let mut iter = $wal.iter_all(0); + let mut num = 0; + while iter.next().is_some() { + num += 1; + } + assert_eq!(num, 0); + + let mut iter = $wal.iter_all(1); + let mut num = 0; + while iter.next().is_some() { + num += 1; + } + assert_eq!(num, 2); + + let mut iter = $wal.iter_all(2); + let mut num = 0; + while iter.next().is_some() { + num += 1; + } + assert_eq!(num, 2); + + let mut iter = $wal.iter_all(3); + let mut num = 0; + while iter.next().is_some() { + num += 1; + } + assert_eq!(num, 4); + + let upper_bound = $wal.upper_bound(1, Bound::Included(b"b")).unwrap(); + assert_eq!(upper_bound.value(), b"a1"); + + let upper_bound = $wal + .upper_bound_with_tombstone(1, Bound::Included(b"b")) + .unwrap(); + assert_eq!(upper_bound.value().unwrap(), b"a1"); + + let upper_bound = $wal.upper_bound(1, Bound::Included(b"b")).unwrap(); + assert_eq!(upper_bound.value(), b"a1"); + + let upper_bound = $wal + .upper_bound_with_tombstone(1, Bound::Included(b"b")) + .unwrap(); + assert_eq!(upper_bound.value().unwrap(), b"a1"); + + let lower_bound = $wal.lower_bound(1, Bound::Included(b"b")).unwrap(); + assert_eq!(lower_bound.value(), b"c1"); + + let lower_bound = $wal + .lower_bound_with_tombstone(1, Bound::Included(b"b")) + .unwrap(); + assert_eq!(lower_bound.value().unwrap(), b"c1"); + + let lower_bound = $wal.lower_bound(1, Bound::Included(b"b")).unwrap(); + assert_eq!(lower_bound.value(), b"c1"); + + let lower_bound = $wal + .lower_bound_with_tombstone(1, Bound::Included(b"b")) + .unwrap(); + assert_eq!(lower_bound.value().unwrap(), b"c1"); + }}; +} + +#[cfg(feature = "bounded")] +fn bounded_iter_with_tombstone_mvcc(wal: &mut OrderWal) { + iter_with_tombstone_mvcc!(wal); +} + +#[cfg(feature = "unbounded")] +fn unbounded_iter_with_tombstone_mvcc(wal: &mut OrderWal) { + iter_with_tombstone_mvcc!(wal); +} + +macro_rules! iter_next { + ($wal:ident) => {{ + const N: usize = 100; + + for i in (0..N).rev() { + $wal + .insert(0, make_int_key(i).as_bytes(), make_value(i).as_bytes()) + .unwrap(); + } + + let iter = $wal.iter_all(0); + + let mut i = 0; + for ent in iter { + assert_eq!(ent.key(), make_int_key(i).as_bytes()); + assert_eq!(ent.raw_key(), make_int_key(i).as_bytes()); + assert_eq!(ent.value().unwrap(), make_value(i).as_bytes()); + assert_eq!(ent.raw_value().unwrap(), make_value(i).as_bytes()); + i += 1; + } + + assert_eq!(i, N); + + let iter = $wal.iter(0); + let mut i = 0; + for ent in iter { + assert_eq!(ent.key(), make_int_key(i).as_bytes()); + assert_eq!(ent.raw_key(), make_int_key(i).as_bytes()); + assert_eq!(ent.value(), make_value(i).as_bytes()); + assert_eq!(ent.raw_value(), make_value(i).as_bytes()); + i += 1; + } + + assert_eq!(i, N); + }}; +} + +#[cfg(feature = "bounded")] +fn bounded_iter_next(wal: &mut OrderWal) { + iter_next!(wal); +} + +#[cfg(feature = "unbounded")] +fn unbounded_iter_next(wal: &mut OrderWal) { + iter_next!(wal); +} + +macro_rules! iter_with_tombstone_next_by_entry { + ($wal:ident) => {{ + const N: usize = 100; + + for i in (0..N).rev() { + $wal + .insert(0, make_int_key(i).as_bytes(), make_value(i).as_bytes()) + .unwrap(); + } + + let mut ent = $wal.first(0).clone(); + #[cfg(feature = "std")] + std::println!("{ent:?}"); + let mut i = 0; + while let Some(ref mut entry) = ent { + assert_eq!(entry.key(), make_int_key(i).as_bytes()); + assert_eq!(entry.value(), make_value(i).as_bytes()); + ent = entry.next(); + i += 1; + } + assert_eq!(i, N); + + let mut ent = $wal.iter(0).next().clone(); + #[cfg(feature = "std")] + std::println!("{ent:?}"); + + let mut i = 0; + while let Some(ref mut entry) = ent { + assert_eq!(entry.key(), make_int_key(i).as_bytes()); + assert_eq!(entry.value(), make_value(i).as_bytes()); + ent = entry.next(); + i += 1; + } + assert_eq!(i, N); + }}; +} + +#[cfg(feature = "bounded")] +fn bounded_iter_with_tombstone_next_by_entry(wal: &mut OrderWal) { + iter_with_tombstone_next_by_entry!(wal); +} + +#[cfg(feature = "unbounded")] +fn unbounded_iter_with_tombstone_next_by_entry(wal: &mut OrderWal) { + iter_with_tombstone_next_by_entry!(wal); +} + +macro_rules! iter_with_tombstone_next_by_with_tombstone_entry { + ($wal:ident) => {{ + const N: usize = 100; + + for i in 0..N { + let k = make_int_key(i); + let v = make_value(i); + $wal.insert(0, k.as_bytes(), v.as_bytes()).unwrap(); + $wal.remove(1, k.as_bytes()).unwrap(); + } + + let mut ent = $wal.first(0).clone(); + let mut i = 0; + while let Some(ref mut entry) = ent { + assert_eq!(entry.key(), make_int_key(i).as_bytes()); + assert_eq!(entry.value(), make_value(i).as_bytes()); + ent = entry.next(); + i += 1; + } + assert_eq!(i, N); + + let mut ent = $wal.first_with_tombstone(1).clone(); + #[cfg(feature = "std")] + std::println!("{ent:?}"); + let mut i = 0; + while let Some(ref mut entry) = ent { + if i % 2 == 1 { + assert_eq!(entry.version(), 0); + assert_eq!(entry.key(), make_int_key(i / 2).as_bytes()); + assert_eq!(entry.value().unwrap(), make_value(i / 2).as_bytes()); + } else { + assert_eq!(entry.version(), 1); + assert_eq!(entry.key(), make_int_key(i / 2).as_bytes()); + assert!(entry.value().is_none()); + } + + ent = entry.next(); + i += 1; + } + assert_eq!(i, N * 2); + let ent = $wal.first(1); + assert!(ent.is_none()); + }}; +} + +#[cfg(feature = "bounded")] +fn bounded_iter_with_tombstone_next_by_with_tombstone_entry(wal: &mut OrderWal) { + iter_with_tombstone_next_by_with_tombstone_entry!(wal); +} + +#[cfg(feature = "unbounded")] +fn unbounded_iter_with_tombstone_next_by_with_tombstone_entry(wal: &mut OrderWal) { + iter_with_tombstone_next_by_with_tombstone_entry!(wal); +} + +macro_rules! range_next { + ($wal:ident) => {{ + const N: usize = 100; + + for i in (0..N).rev() { + $wal + .insert(0, make_int_key(i).as_bytes(), make_value(i).as_bytes()) + .unwrap(); + } + + let upper = make_int_key(50); + let mut i = 0; + let mut iter = $wal.range(0, ..=upper.as_bytes()); + for ent in &mut iter { + assert_eq!(ent.key(), make_int_key(i).as_bytes()); + assert_eq!(ent.raw_key(), make_int_key(i).as_bytes()); + assert_eq!(ent.value(), make_value(i).as_bytes()); + assert_eq!(ent.raw_value(), make_value(i).as_bytes()); + i += 1; + } + + assert_eq!(i, 51); + + let mut i = 0; + let mut iter = $wal.range_all(0, ..=upper.as_bytes()); + for ent in &mut iter { + assert_eq!(ent.key(), make_int_key(i).as_bytes()); + assert_eq!(ent.raw_key(), make_int_key(i).as_bytes()); + assert_eq!(ent.value().unwrap(), make_value(i).as_bytes()); + assert_eq!(ent.raw_value().unwrap(), make_value(i).as_bytes()); + i += 1; + } + + assert_eq!(i, 51); + }}; +} + +#[cfg(feature = "bounded")] +fn bounded_range_next(wal: &mut OrderWal) { + range_next!(wal); +} + +#[cfg(feature = "unbounded")] +fn unbounded_range_next(wal: &mut OrderWal) { + range_next!(wal); +} + +macro_rules! iter_prev { + ($wal:ident) => {{ + const N: usize = 100; + + for i in 0..N { + $wal + .insert(0, make_int_key(i).as_bytes(), make_value(i).as_bytes()) + .unwrap(); + } + + let iter = $wal.iter_all(0).rev(); + let mut i = N; + for ent in iter { + assert_eq!(ent.key(), make_int_key(i - 1).as_bytes()); + assert_eq!(ent.value().unwrap(), make_value(i - 1).as_bytes()); + i -= 1; + } + + assert_eq!(i, 0); + + let iter = $wal.iter(0).rev(); + let mut i = N; + for ent in iter { + assert_eq!(ent.key(), make_int_key(i - 1).as_bytes()); + assert_eq!(ent.value(), make_value(i - 1).as_bytes()); + i -= 1; + } + + assert_eq!(i, 0); + }}; +} + +#[cfg(feature = "bounded")] +fn bounded_iter_prev(wal: &mut OrderWal) { + iter_prev!(wal); +} + +#[cfg(feature = "unbounded")] +fn unbounded_iter_prev(wal: &mut OrderWal) { + iter_prev!(wal); +} + +macro_rules! iter_with_tombstone_prev_by_entry { + ($wal:ident) => { + const N: usize = 100; + + for i in 0..N { + $wal + .insert(0, make_int_key(i).as_bytes(), make_value(i).as_bytes()) + .unwrap(); + } + + let mut ent = $wal.last(0); + + let mut i = 0; + while let Some(ref mut entry) = ent { + i += 1; + assert_eq!(entry.key(), make_int_key(N - i).as_bytes()); + assert_eq!(entry.value(), make_value(N - i).as_bytes()); + ent = entry.prev(); + } + assert_eq!(i, N); + }; +} + +#[cfg(feature = "bounded")] +fn bounded_iter_with_tombstone_prev_by_entry(wal: &mut OrderWal) { + iter_with_tombstone_prev_by_entry!(wal); +} + +#[cfg(feature = "unbounded")] +fn unbounded_iter_with_tombstone_prev_by_entry(wal: &mut OrderWal) { + iter_with_tombstone_prev_by_entry!(wal); +} + +macro_rules! iter_with_tombstone_prev_by_with_tombstone_entry { + ($wal:ident) => {{ + const N: usize = 100; + + for i in 0..N { + let k = make_int_key(i); + let v = make_value(i); + $wal.insert(0, k.as_bytes(), v.as_bytes()).unwrap(); + $wal.remove(1, k.as_bytes()).unwrap(); + } + + let mut ent = $wal.last(0); + let mut i = 0; + while let Some(ref mut entry) = ent { + i += 1; + assert_eq!(entry.key(), make_int_key(N - i).as_bytes()); + assert_eq!(entry.value(), make_value(N - i).as_bytes()); + ent = entry.prev(); + } + assert_eq!(i, N); + + let mut ent = $wal.last_with_tombstone(1); + let mut i = 0; + while let Some(ref mut entry) = ent { + if i % 2 == 0 { + assert_eq!(entry.version(), 0); + assert_eq!(entry.key(), make_int_key(N - 1 - i / 2).as_bytes()); + assert_eq!(entry.value().unwrap(), make_value(N - 1 - i / 2).as_bytes()); + } else { + assert_eq!(entry.version(), 1); + assert_eq!(entry.key(), make_int_key(N - 1 - i / 2).as_bytes()); + assert!(entry.value().is_none()); + } + + ent = entry.prev(); + i += 1; + } + + assert_eq!(i, N * 2); + let ent = $wal.last(1); + assert!(ent.is_none()); + }}; +} + +#[cfg(feature = "bounded")] +fn bounded_iter_with_tombstone_prev_by_with_tombstone_entry(wal: &mut OrderWal) { + iter_with_tombstone_prev_by_with_tombstone_entry!(wal); +} + +#[cfg(feature = "unbounded")] +fn unbounded_iter_with_tombstone_prev_by_with_tombstone_entry(wal: &mut OrderWal) { + iter_with_tombstone_prev_by_with_tombstone_entry!(wal); +} + +macro_rules! range_prev { + ($wal:ident) => {{ + const N: usize = 100; + + for i in 0..N { + $wal + .insert(0, make_int_key(i).as_bytes(), make_value(i).as_bytes()) + .unwrap(); + } + + let lower = make_int_key(50); + let it = $wal.range(0, lower.as_bytes()..).rev(); + let mut i = N - 1; + + for ent in it { + assert_eq!(ent.key(), make_int_key(i).as_bytes()); + assert_eq!(ent.raw_key(), make_int_key(i).as_bytes()); + assert_eq!(ent.value(), make_value(i).as_bytes()); + assert_eq!(ent.raw_value(), make_value(i).as_bytes()); + assert_eq!(ent.version(), 0); + i -= 1; + } + + assert_eq!(i, 49); + + let it = $wal.range_all(0, lower.as_bytes()..).rev(); + let mut i = N - 1; + + for ent in it { + assert_eq!(ent.key(), make_int_key(i).as_bytes()); + assert_eq!(ent.raw_key(), make_int_key(i).as_bytes()); + assert_eq!(ent.value().unwrap(), make_value(i).as_bytes()); + assert_eq!(ent.raw_value().unwrap(), make_value(i).as_bytes()); + assert_eq!(ent.version(), 0); + i -= 1; + } + + assert_eq!(i, 49); + }}; +} + +#[cfg(feature = "bounded")] +fn bounded_range_prev(wal: &mut OrderWal) { + range_prev!(wal); +} + +#[cfg(feature = "unbounded")] +fn unbounded_range_prev(wal: &mut OrderWal) { + range_prev!(wal); +} diff --git a/src/swmr/tests/generic.rs b/src/swmr/tests/generic.rs new file mode 100644 index 00000000..d2f9aef9 --- /dev/null +++ b/src/swmr/tests/generic.rs @@ -0,0 +1,13 @@ +use super::*; + +#[cfg(all(test, any(test_generic_insert, all_orderwal_tests)))] +mod insert; + +#[cfg(all(test, any(test_generic_iters, all_orderwal_tests)))] +mod iters; + +#[cfg(all(test, any(test_generic_get, all_orderwal_tests)))] +mod get; + +#[cfg(all(test, any(test_generic_constructor, all_orderwal_tests)))] +mod constructor; diff --git a/src/swmr/tests/generic/constructor.rs b/src/swmr/tests/generic/constructor.rs new file mode 100644 index 00000000..3e45ea01 --- /dev/null +++ b/src/swmr/tests/generic/constructor.rs @@ -0,0 +1,75 @@ +use crate::generic::{GenericMemtable, OrderWal, Reader, Writer}; + +#[cfg(feature = "bounded")] +use crate::generic::BoundedTable; + +#[cfg(feature = "unbounded")] +use crate::generic::UnboundedTable; + +use super::*; + +fn zero_reserved(wal: &mut OrderWal) +where + M: GenericMemtable + 'static, + M::Error: std::fmt::Debug, +{ + unsafe { + assert_eq!(wal.reserved_slice(), b""); + assert_eq!(wal.reserved_slice_mut(), b""); + + let wal = wal.reader(); + assert_eq!(wal.reserved_slice(), b""); + } +} + +fn reserved(wal: &mut OrderWal) +where + M: GenericMemtable + 'static, + M::Error: std::fmt::Debug, +{ + unsafe { + let buf = wal.reserved_slice_mut(); + buf.copy_from_slice(b"al8n"); + assert_eq!(wal.reserved_slice(), b"al8n"); + assert_eq!(wal.reserved_slice_mut(), b"al8n"); + + let wal = wal.reader(); + assert_eq!(wal.reserved_slice(), b"al8n"); + } +} + +#[cfg(feature = "unbounded")] +expand_unit_tests!( + "unbounded": OrderWal> [Default::default()]: UnboundedTable<_, _> { + zero_reserved, + } +); + +#[cfg(feature = "unbounded")] +expand_unit_tests!( + "unbounded": OrderWal> [Default::default()]: UnboundedTable<_, _> { + reserved({ + crate::Builder::new() + .with_capacity(MB) + .with_reserved(4) + }), + } +); + +#[cfg(feature = "bounded")] +expand_unit_tests!( + "bounded": OrderWal> [Default::default()]: BoundedTable<_, _> { + zero_reserved, + } +); + +#[cfg(feature = "bounded")] +expand_unit_tests!( + "bounded": OrderWal> [Default::default()]: BoundedTable<_, _> { + reserved({ + crate::Builder::new() + .with_capacity(MB) + .with_reserved(4) + }), + } +); diff --git a/src/swmr/tests/multiple_version_get.rs b/src/swmr/tests/generic/get.rs similarity index 68% rename from src/swmr/tests/multiple_version_get.rs rename to src/swmr/tests/generic/get.rs index 60fe7199..e4e37531 100644 --- a/src/swmr/tests/multiple_version_get.rs +++ b/src/swmr/tests/generic/get.rs @@ -1,20 +1,28 @@ +use dbutils::{ + buffer::VacantBuffer, + equivalentor::{TypeRefComparator, TypeRefQueryComparator}, + state::{Active, MaybeTombstone}, + types::{MaybeStructured, Str, Type}, +}; + use core::ops::Bound; use crate::{ - memtable::{ - alternative::{MultipleVersionTable, TableOptions}, - MultipleVersionMemtable, VersionedMemtableEntry, - }, + generic::{GenericMemtable, OrderWal, Reader, Writer}, + memtable::{Entry, MutableMemtable, RawEntry}, types::{KeyBuilder, ValueBuilder}, }; -use dbutils::types::MaybeStructured; -use multiple_version::{Reader, Writer}; -use skl::VacantBuffer; + +#[cfg(feature = "bounded")] +use crate::generic::BoundedTable; + +#[cfg(feature = "unbounded")] +use crate::generic::UnboundedTable; use super::*; -#[cfg(feature = "std")] -expand_unit_tests!("linked": MultipleVersionOrderWalAlternativeTable [TableOptions::Linked]: MultipleVersionTable<_, _> { +#[cfg(feature = "unbounded")] +expand_unit_tests!("unbounded": OrderWal> [Default::default()]: UnboundedTable<_, _> { mvcc, gt, ge, @@ -22,7 +30,8 @@ expand_unit_tests!("linked": MultipleVersionOrderWalAlternativeTable [ lt, }); -expand_unit_tests!("arena": MultipleVersionOrderWalAlternativeTable [TableOptions::Arena(Default::default())]: MultipleVersionTable<_, _> { +#[cfg(feature = "bounded")] +expand_unit_tests!("bounded": OrderWal> [Default::default()]: BoundedTable<_, _> { mvcc, gt, ge, @@ -30,28 +39,34 @@ expand_unit_tests!("arena": MultipleVersionOrderWalAlternativeTable [T lt, }); -#[cfg(feature = "std")] -expand_unit_tests!("linked": MultipleVersionOrderWalAlternativeTable [TableOptions::Linked]: MultipleVersionTable<_, _> { +#[cfg(feature = "unbounded")] +expand_unit_tests!("unbounded": OrderWal> [Default::default()]: UnboundedTable<_, _> { insert, - insert_with_value_builder, - insert_with_key_builder, - insert_with_bytes, - insert_with_builders, + unbounded_insert_with_value_builder, + unbounded_insert_with_key_builder, + unbounded_insert_with_bytes, + unbounded_insert_with_builders, }); -expand_unit_tests!("arena": MultipleVersionOrderWalAlternativeTable [TableOptions::Arena(Default::default())]: MultipleVersionTable<_, _> { +#[cfg(feature = "bounded")] +expand_unit_tests!("bounded": OrderWal> [Default::default()]: BoundedTable<_, _> { insert, - insert_with_value_builder, - insert_with_key_builder, - insert_with_bytes, - insert_with_builders, + bounded_insert_with_value_builder, + bounded_insert_with_key_builder, + bounded_insert_with_bytes, + bounded_insert_with_builders, }); -fn mvcc(wal: &mut multiple_version::OrderWal) +fn mvcc(wal: &mut OrderWal) where - M: MultipleVersionMemtable + 'static, + M: GenericMemtable + MutableMemtable + 'static, M::Error: std::fmt::Debug, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a> + std::fmt::Debug, + for<'a> M::Comparator: TypeRefComparator<'a, str> + TypeRefQueryComparator<'a, str, str>, + for<'a> M::Entry<'a, Active>: + Entry<'a, Key = Str<'a>, Value = Str<'a>> + RawEntry<'a, RawValue = &'a [u8]> + std::fmt::Debug, + for<'a> M::Entry<'a, MaybeTombstone>: Entry<'a, Key = Str<'a>, Value = Option>> + + RawEntry<'a, RawValue = Option<&'a [u8]>> + + std::fmt::Debug, { wal.insert(1, "a", "a1").unwrap(); wal.insert(3, "a", "a2").unwrap(); @@ -124,11 +139,16 @@ where assert!(wal.get(5, "d").is_none()); } -fn gt(wal: &mut multiple_version::OrderWal) +fn gt(wal: &mut OrderWal) where - M: MultipleVersionMemtable + 'static, + M: GenericMemtable + MutableMemtable + 'static, M::Error: std::fmt::Debug, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a> + std::fmt::Debug, + for<'a> M::Comparator: TypeRefComparator<'a, str> + TypeRefQueryComparator<'a, str, str>, + for<'a> M::Entry<'a, Active>: + Entry<'a, Key = Str<'a>, Value = Str<'a>> + RawEntry<'a, RawValue = &'a [u8]> + std::fmt::Debug, + for<'a> M::Entry<'a, MaybeTombstone>: Entry<'a, Key = Str<'a>, Value = Option>> + + RawEntry<'a, RawValue = Option<&'a [u8]>> + + std::fmt::Debug, { wal.insert(1, "a", "a1").unwrap(); wal.insert(3, "a", "a2").unwrap(); @@ -230,11 +250,16 @@ where assert!(wal.lower_bound(6, Bound::Excluded("c")).is_none()); } -fn ge(wal: &mut multiple_version::OrderWal) +fn ge(wal: &mut OrderWal) where - M: MultipleVersionMemtable + 'static, + M: GenericMemtable + MutableMemtable + 'static, M::Error: std::fmt::Debug, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a> + std::fmt::Debug, + for<'a> M::Comparator: TypeRefComparator<'a, str> + TypeRefQueryComparator<'a, str, str>, + for<'a> M::Entry<'a, Active>: + Entry<'a, Key = Str<'a>, Value = Str<'a>> + RawEntry<'a, RawValue = &'a [u8]> + std::fmt::Debug, + for<'a> M::Entry<'a, MaybeTombstone>: Entry<'a, Key = Str<'a>, Value = Option>> + + RawEntry<'a, RawValue = Option<&'a [u8]>> + + std::fmt::Debug, { wal.insert(1, "a", "a1").unwrap(); wal.insert(3, "a", "a2").unwrap(); @@ -336,11 +361,16 @@ where assert!(wal.lower_bound(4, Bound::Included("d")).is_none()); } -fn le(wal: &mut multiple_version::OrderWal) +fn le(wal: &mut OrderWal) where - M: MultipleVersionMemtable + 'static, + M: GenericMemtable + MutableMemtable + 'static, M::Error: std::fmt::Debug, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a> + std::fmt::Debug, + for<'a> M::Comparator: TypeRefComparator<'a, str> + TypeRefQueryComparator<'a, str, str>, + for<'a> M::Entry<'a, Active>: + Entry<'a, Key = Str<'a>, Value = Str<'a>> + RawEntry<'a, RawValue = &'a [u8]> + std::fmt::Debug, + for<'a> M::Entry<'a, MaybeTombstone>: Entry<'a, Key = Str<'a>, Value = Option>> + + RawEntry<'a, RawValue = Option<&'a [u8]>> + + std::fmt::Debug, { wal.insert(1, "a", "a1").unwrap(); wal.insert(3, "a", "a2").unwrap(); @@ -464,11 +494,16 @@ where assert_eq!(ent.version(), 3); } -fn lt(wal: &mut multiple_version::OrderWal) +fn lt(wal: &mut OrderWal) where - M: MultipleVersionMemtable + 'static, + M: GenericMemtable + MutableMemtable + 'static, M::Error: std::fmt::Debug, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a> + std::fmt::Debug, + for<'a> M::Comparator: TypeRefComparator<'a, str> + TypeRefQueryComparator<'a, str, str>, + for<'a> M::Entry<'a, Active>: + Entry<'a, Key = Str<'a>, Value = Str<'a>> + RawEntry<'a, RawValue = &'a [u8]> + std::fmt::Debug, + for<'a> M::Entry<'a, MaybeTombstone>: Entry<'a, Key = Str<'a>, Value = Option>> + + RawEntry<'a, RawValue = Option<&'a [u8]>> + + std::fmt::Debug, { wal.insert(1, "a", "a1").unwrap(); wal.insert(3, "a", "a2").unwrap(); @@ -567,11 +602,14 @@ where } #[allow(clippy::needless_borrows_for_generic_args)] -fn insert(wal: &mut OrderWal) +fn insert(wal: &mut OrderWal) where - M: MultipleVersionMemtable + 'static, + M: GenericMemtable + MutableMemtable + 'static, M::Error: std::fmt::Debug, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a> + std::fmt::Debug, + for<'a> M::Comparator: TypeRefComparator<'a, Person> + TypeRefQueryComparator<'a, Person, Person>, + for<'a> M::Entry<'a, Active>: Entry<'a, Value = Str<'a>> + RawEntry<'a> + std::fmt::Debug, + for<'a> M::Entry<'a, MaybeTombstone>: + Entry<'a, Value = Option>> + RawEntry<'a> + std::fmt::Debug, { let people = (0..100) .map(|_| { @@ -589,136 +627,150 @@ where } } -fn insert_with_value_builder(wal: &mut OrderWal) -where - M: MultipleVersionMemtable + 'static, - M::Error: std::fmt::Debug, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a> + std::fmt::Debug, -{ - let people = (0..100) - .map(|_| { - let p = Person::random(); - let v = std::format!("My name is {}", p.name); - wal - .insert_with_value_builder( - 0, - &p, - ValueBuilder::once(v.len(), |buf: &mut VacantBuffer<'_>| { - buf.put_slice(v.as_bytes()).map(|_| v.len()) - }), - ) - .unwrap(); - (p, v) - }) - .collect::>(); +macro_rules! insert_with_value_builder { + ($wal:ident) => {{ + let people = (0..100) + .map(|_| { + let p = Person::random(); + let v = std::format!("My name is {}", p.name); + $wal + .insert_with_value_builder( + 0, + &p, + ValueBuilder::once(v.len(), |buf: &mut VacantBuffer<'_>| { + buf.put_slice(v.as_bytes()).map(|_| v.len()) + }), + ) + .unwrap(); + (p, v) + }) + .collect::>(); - for (p, _) in &people { - assert!(wal.contains_key(0, p)); - assert!(wal.contains_key(0, &p.as_ref())); - } + for (p, _) in &people { + assert!($wal.contains_key(0, p)); + assert!($wal.contains_key(0, &p.as_ref())); + } + }}; } -fn insert_with_key_builder(wal: &mut OrderWal) -where - M: MultipleVersionMemtable + 'static, - M::Error: std::fmt::Debug, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a> + std::fmt::Debug, -{ - let people = (0..100) - .map(|_| { - let p = Person::random(); - let v = std::format!("My name is {}", p.name); - wal - .insert_with_key_builder( - 0, - KeyBuilder::once(p.encoded_len(), |buf| p.encode_to_buffer(buf)), - &v, - ) - .unwrap(); - (p, v) - }) - .collect::>(); +#[cfg(feature = "bounded")] +fn bounded_insert_with_value_builder(wal: &mut OrderWal>) { + insert_with_value_builder!(wal); +} - for (p, pv) in &people { - assert!(wal.contains_key(0, p)); - assert_eq!(wal.get(0, p).unwrap().value(), pv); - } +#[cfg(feature = "unbounded")] +fn unbounded_insert_with_value_builder(wal: &mut OrderWal>) { + insert_with_value_builder!(wal); } -fn insert_with_bytes(wal: &mut OrderWal) -where - M: MultipleVersionMemtable + 'static, - M::Error: std::fmt::Debug, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a> + std::fmt::Debug, -{ - let people = (0..100) - .map(|_| { - let p = Person::random(); - let v = std::format!("My name is {}", p.name); - unsafe { - wal - .insert( +macro_rules! insert_with_key_builder { + ($wal:ident) => {{ + let people = (0..100) + .map(|_| { + let p = Person::random(); + let v = std::format!("My name is {}", p.name); + $wal + .insert_with_key_builder( 0, - MaybeStructured::from_slice(p.to_vec().as_slice()), - MaybeStructured::from_slice(v.as_bytes()), + KeyBuilder::once(p.encoded_len(), |buf| p.encode_to_buffer(buf)), + &v, ) .unwrap(); - } - (p, v) - }) - .collect::>(); + (p, v) + }) + .collect::>(); - for (p, pv) in &people { - assert!(wal.contains_key(0, p)); - assert!(wal.contains_key(0, &p.as_ref())); - assert_eq!(wal.get(0, p).unwrap().value(), pv); - } + for (p, pv) in &people { + assert!($wal.contains_key(0, p)); + assert_eq!($wal.get(0, p).unwrap().value(), pv); + } + }}; } -fn insert_with_builders(wal: &mut OrderWal) -where - M: MultipleVersionMemtable + 'static, - M::Error: std::fmt::Debug, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a> + std::fmt::Debug, -{ - let people = (0..1) - .map(|_| { - let p = Person::random(); - let pvec = p.to_vec(); - let v = std::format!("My name is {}", p.name); - wal - .insert_with_builders( - 0, - KeyBuilder::new(pvec.len(), |buf: &mut VacantBuffer<'_>| { - p.encode_to_buffer(buf) - }), - ValueBuilder::new(v.len(), |buf: &mut VacantBuffer<'_>| { - buf.put_slice(v.as_bytes()).map(|_| v.len()) - }), - ) - .unwrap(); - (p, pvec, v) - }) - .collect::>(); +#[cfg(feature = "bounded")] +fn bounded_insert_with_key_builder(wal: &mut OrderWal>) { + insert_with_key_builder!(wal); +} - for (p, pvec, pv) in &people { - assert!(wal.contains_key(0, p)); - assert!(wal.contains_key_versioned(0, p)); - assert_eq!(wal.get(0, p).unwrap().value(), pv); - assert_eq!(wal.get_versioned(0, p).unwrap().value().unwrap(), pv); - - unsafe { - assert!(wal.contains_key_by_bytes(0, pvec)); - assert!(wal.contains_key_versioned_by_bytes(0, pvec)); - assert_eq!(wal.get_by_bytes(0, pvec.as_ref()).unwrap().value(), pv); - assert_eq!( - wal - .get_versioned_by_bytes(0, pvec) - .unwrap() - .value() - .unwrap(), - pv - ); +#[cfg(feature = "unbounded")] +fn unbounded_insert_with_key_builder(wal: &mut OrderWal>) { + insert_with_key_builder!(wal); +} + +macro_rules! insert_with_bytes { + ($wal:ident) => {{ + let people = (0..100) + .map(|_| { + let p = Person::random(); + let v = std::format!("My name is {}", p.name); + unsafe { + $wal + .insert( + 0, + MaybeStructured::from_slice(p.to_vec().as_slice()), + MaybeStructured::from_slice(v.as_bytes()), + ) + .unwrap(); + } + (p, v) + }) + .collect::>(); + + for (p, pv) in &people { + assert!($wal.contains_key(0, p)); + assert!($wal.contains_key(0, &p.as_ref())); + assert_eq!($wal.get(0, p).unwrap().value(), pv); } - } + }}; +} + +#[cfg(feature = "bounded")] +fn bounded_insert_with_bytes(wal: &mut OrderWal>) { + insert_with_bytes!(wal); +} + +#[cfg(feature = "unbounded")] +fn unbounded_insert_with_bytes(wal: &mut OrderWal>) { + insert_with_bytes!(wal); +} + +macro_rules! insert_with_builders { + ($wal:ident) => {{ + let people = (0..1) + .map(|_| { + let p = Person::random(); + let pvec = p.to_vec(); + let v = std::format!("My name is {}", p.name); + $wal + .insert_with_builders( + 0, + KeyBuilder::new(pvec.len(), |buf: &mut VacantBuffer<'_>| { + p.encode_to_buffer(buf) + }), + ValueBuilder::new(v.len(), |buf: &mut VacantBuffer<'_>| { + buf.put_slice(v.as_bytes()).map(|_| v.len()) + }), + ) + .unwrap(); + (p, pvec, v) + }) + .collect::>(); + + for (p, _, pv) in &people { + assert!($wal.contains_key(0, p)); + assert!($wal.contains_key_with_tombstone(0, p)); + assert_eq!($wal.get(0, p).unwrap().value(), pv); + assert_eq!($wal.get_with_tombstone(0, p).unwrap().value().unwrap(), pv); + } + }}; +} + +#[cfg(feature = "bounded")] +fn bounded_insert_with_builders(wal: &mut OrderWal>) { + insert_with_builders!(wal); +} + +#[cfg(feature = "unbounded")] +fn unbounded_insert_with_builders(wal: &mut OrderWal>) { + insert_with_builders!(wal); } diff --git a/src/swmr/tests/generic/insert.rs b/src/swmr/tests/generic/insert.rs new file mode 100644 index 00000000..73fd9851 --- /dev/null +++ b/src/swmr/tests/generic/insert.rs @@ -0,0 +1,467 @@ +use dbutils::{ + buffer::VacantBuffer, + equivalentor::{TypeRefComparator, TypeRefQueryComparator}, + state::Active, + types::{MaybeStructured, Type}, +}; + +#[cfg(feature = "std")] +use std::thread::spawn; + +use crate::{ + batch::BatchEntry, + generic::{GenericMemtable, OrderWal, OrderWalReader, Reader, Writer}, + memtable::{Entry, MutableMemtable}, + types::{KeyBuilder, ValueBuilder}, + Builder, +}; + +#[cfg(feature = "bounded")] +use crate::generic::BoundedTable; + +#[cfg(feature = "unbounded")] +use crate::generic::UnboundedTable; + +use super::*; + +#[cfg(feature = "std")] +fn concurrent_basic(mut w: OrderWal) +where + M: GenericMemtable + MutableMemtable + Send + 'static, + M::Error: core::fmt::Debug, + for<'a> M::Entry<'a, Active>: Entry<'a, Key = u32, Value = [u8; 4]>, + for<'a> M::Comparator: TypeRefComparator<'a, u32> + TypeRefQueryComparator<'a, u32, u32>, +{ + let readers = (0..100u32).map(|i| (i, w.reader())).collect::>(); + + let handles = readers.into_iter().map(|(i, reader)| { + spawn(move || loop { + if let Some(p) = reader.get(1, &i) { + assert_eq!(p.key(), i); + assert_eq!(p.value(), i.to_le_bytes()); + break; + } + }) + }); + + spawn(move || { + for i in 0..100u32 { + #[allow(clippy::needless_borrows_for_generic_args)] + w.insert(1, &i, &i.to_le_bytes()).unwrap(); + } + }); + + for handle in handles { + handle.join().unwrap(); + } +} + +#[cfg(feature = "std")] +fn concurrent_one_key(mut w: OrderWal) +where + M: GenericMemtable + MutableMemtable + Send + 'static, + M::Error: core::fmt::Debug, + for<'a> M::Entry<'a, Active>: Entry<'a, Key = u32, Value = [u8; 4]>, + for<'a> M::Comparator: TypeRefComparator<'a, u32> + TypeRefQueryComparator<'a, u32, u32>, +{ + let readers = (0..100u32).map(|i| (i, w.reader())).collect::>(); + let handles = readers.into_iter().map(|(_, reader)| { + spawn(move || loop { + if let Some(p) = reader.get(1, &1) { + assert_eq!(p.key(), 1); + assert_eq!(p.value(), 1u32.to_le_bytes()); + break; + } + }) + }); + + w.insert(1, &1, &1u32.to_le_bytes()).unwrap(); + + for handle in handles { + handle.join().unwrap(); + } +} + +fn apply(mut wal: OrderWal) -> (Person, Vec<(Person, String)>, Person) +where + M: GenericMemtable + MutableMemtable + Send + 'static, + M::Error: core::fmt::Debug, + for<'a> M::Entry<'a, Active>: Entry<'a, Value = ::Ref<'a>>, + for<'a> M::Comparator: TypeRefComparator<'a, Person> + TypeRefQueryComparator<'a, Person, Person>, +{ + const N: u32 = 5; + + let mut batch = vec![]; + let output = (0..N) + .map(|i| { + ( + { + let mut p = Person::random(); + p.id = i as u64; + p + }, + std::format!("My id is {i}"), + ) + .clone() + }) + .collect::>(); + + for (person, val) in output.iter() { + batch.push(BatchEntry::insert( + 1, + MaybeStructured::from(person), + MaybeStructured::from(val), + )); + } + + let rp1 = Person::random(); + wal.insert(1, &rp1, &"rp1".to_string()).unwrap(); + wal.apply(&mut batch).unwrap(); + let rp2 = Person::random(); + wal.insert(1, &rp2, &"rp2".to_string()).unwrap(); + + for (p, val) in output.iter() { + assert_eq!(wal.get(1, p).unwrap().value(), val); + } + + assert_eq!(wal.get(1, &rp1).unwrap().value(), "rp1"); + assert_eq!(wal.get(1, &rp2).unwrap().value(), "rp2"); + + let wal = wal.reader(); + for (p, val) in output.iter() { + assert_eq!(wal.get(1, p).unwrap().value(), val); + } + + assert_eq!(wal.get(1, &rp1).unwrap().value(), "rp1"); + assert_eq!(wal.get(1, &rp2).unwrap().value(), "rp2"); + + (rp1, output, rp2) +} + +fn apply_with_key_builder(mut wal: OrderWal) -> (Person, Vec<(Person, String)>, Person) +where + M: GenericMemtable + MutableMemtable + Send + 'static, + M::Error: core::fmt::Debug, + for<'a> M::Entry<'a, Active>: Entry<'a, Value = ::Ref<'a>>, + for<'a> M::Comparator: TypeRefComparator<'a, Person> + TypeRefQueryComparator<'a, Person, Person>, +{ + const N: u32 = 5; + + let mut batch = vec![]; + let output = (0..N) + .map(|i| { + ( + { + let mut p = Person::random(); + p.id = i as u64; + p + }, + std::format!("My id is {i}"), + ) + .clone() + }) + .collect::>(); + + for (person, val) in output.iter() { + batch.push(BatchEntry::insert( + 1, + KeyBuilder::new(person.encoded_len(), |buf: &mut VacantBuffer<'_>| { + buf.set_len(person.encoded_len()); + person.encode(buf) + }), + MaybeStructured::from(val), + )); + } + + let rp1 = Person::random(); + wal.insert(1, &rp1, &"rp1".to_string()).unwrap(); + wal.apply(&mut batch).unwrap(); + let rp2 = Person::random(); + wal.insert(1, &rp2, &"rp2".to_string()).unwrap(); + + for (p, val) in output.iter() { + assert_eq!(wal.get(1, p).unwrap().value(), val); + } + + assert_eq!(wal.get(1, &rp1).unwrap().value(), "rp1"); + assert_eq!(wal.get(1, &rp2).unwrap().value(), "rp2"); + + let wal = wal.reader(); + for (p, val) in output.iter() { + assert_eq!(wal.get(1, p).unwrap().value(), val); + } + + assert_eq!(wal.get(1, &rp1).unwrap().value(), "rp1"); + assert_eq!(wal.get(1, &rp2).unwrap().value(), "rp2"); + + (rp1, output, rp2) +} + +fn apply_with_value_builder(mut wal: OrderWal) -> (Person, Vec<(Person, String)>, Person) +where + M: GenericMemtable + MutableMemtable + Send + 'static, + M::Error: core::fmt::Debug, + for<'a> M::Entry<'a, Active>: Entry<'a, Value = ::Ref<'a>>, + for<'a> M::Comparator: TypeRefComparator<'a, Person> + TypeRefQueryComparator<'a, Person, Person>, +{ + const N: u32 = 5; + + let mut batch = vec![]; + let output = (0..N) + .map(|i| { + ( + { + let mut p = Person::random(); + p.id = i as u64; + p + }, + std::format!("My id is {i}"), + ) + .clone() + }) + .collect::>(); + + for (person, val) in output.iter() { + batch.push(BatchEntry::insert( + 1, + MaybeStructured::from(person), + ValueBuilder::new(val.len(), |buf: &mut VacantBuffer<'_>| { + buf.put_slice(val.as_bytes()).map(|_| val.len()) + }), + )); + } + + let rp1 = Person::random(); + wal.insert(1, &rp1, &"rp1".to_string()).unwrap(); + wal.apply(&mut batch).unwrap(); + let rp2 = Person::random(); + wal.insert(1, &rp2, &"rp2".to_string()).unwrap(); + + for (p, val) in output.iter() { + assert_eq!(wal.get(1, p).unwrap().value(), val); + } + + assert_eq!(wal.get(1, &rp1).unwrap().value(), "rp1"); + assert_eq!(wal.get(1, &rp2).unwrap().value(), "rp2"); + + let wal = wal.reader(); + for (p, val) in output.iter() { + assert_eq!(wal.get(1, p).unwrap().value(), val); + } + + assert_eq!(wal.get(1, &rp1).unwrap().value(), "rp1"); + assert_eq!(wal.get(1, &rp2).unwrap().value(), "rp2"); + + (rp1, output, rp2) +} + +fn apply_with_builders(mut wal: OrderWal) -> (Person, Vec<(Person, String)>, Person) +where + M: GenericMemtable + MutableMemtable + Send + 'static, + M::Error: core::fmt::Debug, + for<'a> M::Entry<'a, Active>: Entry<'a, Value = ::Ref<'a>>, + for<'a> M::Comparator: TypeRefComparator<'a, Person> + TypeRefQueryComparator<'a, Person, Person>, +{ + const N: u32 = 1; + + let mut batch = vec![]; + let output = (0..N) + .map(|i| { + ( + { + let mut p = Person::random(); + p.id = i as u64; + p + }, + std::format!("My id is {i}"), + ) + .clone() + }) + .collect::>(); + + for (person, val) in output.iter() { + batch.push(BatchEntry::insert( + 1, + KeyBuilder::new(person.encoded_len(), |buf: &mut VacantBuffer<'_>| { + buf.set_len(person.encoded_len()); + person.encode(buf) + }), + ValueBuilder::new(val.len(), |buf: &mut VacantBuffer<'_>| { + buf.put_slice(val.as_bytes()).map(|_| val.len()) + }), + )); + } + + let rp1 = Person::random(); + wal.insert(1, &rp1, &"rp1".to_string()).unwrap(); + wal.apply(&mut batch).unwrap(); + let rp2 = Person::random(); + wal.insert(1, &rp2, &"rp2".to_string()).unwrap(); + + for (p, val) in output.iter() { + assert_eq!(wal.get(1, p).unwrap().value(), val); + } + + assert_eq!(wal.get(1, &rp1).unwrap().value(), "rp1"); + assert_eq!(wal.get(1, &rp2).unwrap().value(), "rp2"); + + let wal = wal.reader(); + for (p, val) in output.iter() { + assert_eq!(wal.get(1, p).unwrap().value(), val); + } + + assert_eq!(wal.get(1, &rp1).unwrap().value(), "rp1"); + assert_eq!(wal.get(1, &rp2).unwrap().value(), "rp2"); + + (rp1, output, rp2) +} + +#[cfg(feature = "unbounded")] +expand_unit_tests!( + move "unbounded": OrderWal> [Default::default()]: UnboundedTable<_, _> { + concurrent_basic |p, _res| { + let wal = unsafe { Builder::new().map::>, _>(p).unwrap() }; + + for i in 0..100u32 { + assert!(wal.contains_key(1, &i)); + } + }, + concurrent_one_key |p, _res| { + let wal = unsafe { Builder::new().map::>, _>(p).unwrap() }; + assert!(wal.contains_key(1, &1)); + }, + } +); + +#[cfg(feature = "unbounded")] +expand_unit_tests!( + move "unbounded": OrderWal> [Default::default()]: UnboundedTable<_, _> { + apply |p, (rp1, data, rp2)| { + let map = unsafe { + Builder::new() + .map::>, _>(&p) + .unwrap() + }; + + for (p, val) in data { + assert_eq!(map.get(1, &p).unwrap().value(), &val); + } + assert_eq!(map.get(1, &rp1).unwrap().value(), "rp1"); + assert_eq!(map.get(1, &rp2).unwrap().value(), "rp2"); + }, + apply_with_key_builder |p, (rp1, data, rp2)| { + let map = unsafe { + Builder::new() + .map::>, _>(&p) + .unwrap() + }; + + for (p, val) in data { + assert_eq!(map.get(1, &p).unwrap().value(), &val); + } + assert_eq!(map.get(1, &rp1).unwrap().value(), "rp1"); + assert_eq!(map.get(1, &rp2).unwrap().value(), "rp2"); + }, + apply_with_value_builder |p, (rp1, data, rp2)| { + let map = unsafe { + Builder::new() + .map::>, _>(&p) + .unwrap() + }; + + for (p, val) in data { + assert_eq!(map.get(1, &p).unwrap().value(), &val); + } + assert_eq!(map.get(1, &rp1).unwrap().value(), "rp1"); + assert_eq!(map.get(1, &rp2).unwrap().value(), "rp2"); + }, + apply_with_builders |p, (rp1, data, rp2)| { + let map = unsafe { + Builder::new() + .map::>, _>(&p) + .unwrap() + }; + + for (p, val) in data { + assert_eq!(map.get(1, &p).unwrap().value(), &val); + } + assert_eq!(map.get(1, &rp1).unwrap().value(), "rp1"); + assert_eq!(map.get(1, &rp2).unwrap().value(), "rp2"); + } + } +); + +#[cfg(all(feature = "bounded", feature = "std"))] +expand_unit_tests!( + move "bounded": OrderWal> [Default::default()]: BoundedTable<_, _> { + concurrent_basic |p, _res| { + let wal = unsafe { Builder::new().map::>, _>(p).unwrap() }; + + for i in 0..100u32 { + assert!(wal.contains_key(1, &i)); + } + }, + concurrent_one_key |p, _res| { + let wal = unsafe { Builder::new().map::>, _>(p).unwrap() }; + assert!(wal.contains_key(1, &1)); + }, + } +); + +#[cfg(feature = "bounded")] +expand_unit_tests!( + move "bounded": OrderWal> [Default::default()]: BoundedTable<_, _> { + apply |p, (rp1, data, rp2)| { + let map = unsafe { + Builder::new() + .map::>, _>(&p) + .unwrap() + }; + + for (p, val) in data { + assert_eq!(map.get(1, &p).unwrap().value(), &val); + } + assert_eq!(map.get(1, &rp1).unwrap().value(), "rp1"); + assert_eq!(map.get(1, &rp2).unwrap().value(), "rp2"); + }, + apply_with_key_builder |p, (rp1, data, rp2)| { + let map = unsafe { + Builder::new() + .map::>, _>(&p) + .unwrap() + }; + + for (p, val) in data { + assert_eq!(map.get(1, &p).unwrap().value(), &val); + } + assert_eq!(map.get(1, &rp1).unwrap().value(), "rp1"); + assert_eq!(map.get(1, &rp2).unwrap().value(), "rp2"); + }, + apply_with_value_builder |p, (rp1, data, rp2)| { + let map = unsafe { + Builder::new() + .map::>, _>(&p) + .unwrap() + }; + + for (p, val) in data { + assert_eq!(map.get(1, &p).unwrap().value(), &val); + } + assert_eq!(map.get(1, &rp1).unwrap().value(), "rp1"); + assert_eq!(map.get(1, &rp2).unwrap().value(), "rp2"); + }, + apply_with_builders |p, (rp1, data, rp2)| { + let map = unsafe { + Builder::new() + .map::>, _>(&p) + .unwrap() + }; + + for (p, val) in data { + assert_eq!(map.get(1, &p).unwrap().value(), &val); + } + assert_eq!(map.get(1, &rp1).unwrap().value(), "rp1"); + assert_eq!(map.get(1, &rp2).unwrap().value(), "rp2"); + } + } +); diff --git a/src/swmr/tests/generic/iters.rs b/src/swmr/tests/generic/iters.rs new file mode 100644 index 00000000..a7470dd1 --- /dev/null +++ b/src/swmr/tests/generic/iters.rs @@ -0,0 +1,519 @@ +use core::ops::Bound; + +use crate::{ + generic::{OrderWal, Reader, Writer}, + memtable::{Entry, RawEntry as _}, +}; + +#[cfg(feature = "bounded")] +use crate::generic::BoundedTable; + +#[cfg(feature = "unbounded")] +use crate::generic::UnboundedTable; + +use super::*; + +#[cfg(feature = "unbounded")] +expand_unit_tests!("unbounded": OrderWal> [Default::default()]: UnboundedTable<_, _> { + unbounded_iter_with_tombstone_mvcc, +}); + +#[cfg(feature = "bounded")] +expand_unit_tests!("bounded": OrderWal> [Default::default()]: BoundedTable<_, _> { + bounded_iter_with_tombstone_mvcc, +}); + +#[cfg(feature = "unbounded")] +expand_unit_tests!("unbounded": OrderWal> [Default::default()]: UnboundedTable<_, _> { + unbounded_iter_with_tombstone_next_by_entry, + unbounded_iter_with_tombstone_next_by_with_tombstone_entry, + unbounded_iter_next, + unbounded_range_next, + unbounded_iter_prev, + unbounded_range_prev, + unbounded_iter_with_tombstone_prev_by_entry, + unbounded_iter_with_tombstone_prev_by_with_tombstone_entry, +}); + +macro_rules! bounded_builder { + () => {{ + crate::Builder::new() + .with_memtable_options( + crate::memtable::bounded::TableOptions::new() + .with_capacity(1024 * 1024) + .into(), + ) + .with_capacity(8 * 1024) + }}; +} + +#[cfg(feature = "bounded")] +expand_unit_tests!("bounded": OrderWal> [Default::default()]: BoundedTable<_, _> { + bounded_iter_with_tombstone_next_by_entry(bounded_builder!()), + bounded_iter_with_tombstone_next_by_with_tombstone_entry(bounded_builder!()), + bounded_iter_next(bounded_builder!()), + bounded_range_next(bounded_builder!()), + bounded_iter_prev(bounded_builder!()), + bounded_range_prev(bounded_builder!()), + bounded_iter_with_tombstone_prev_by_entry(bounded_builder!()), + bounded_iter_with_tombstone_prev_by_with_tombstone_entry(bounded_builder!()), +}); + +fn make_int_key(i: usize) -> String { + ::std::format!("{:05}", i) +} + +fn make_value(i: usize) -> String { + ::std::format!("v{:05}", i) +} + +macro_rules! iter_with_tombstone_mvcc { + ($wal:ident) => {{ + $wal.insert(1, "a", "a1").unwrap(); + $wal.insert(3, "a", "a2").unwrap(); + $wal.insert(1, "c", "c1").unwrap(); + $wal.insert(3, "c", "c2").unwrap(); + + let mut iter = $wal.iter_all(0); + let mut num = 0; + while iter.next().is_some() { + num += 1; + } + assert_eq!(num, 0); + + let mut iter = $wal.iter_all(1); + let mut num = 0; + while iter.next().is_some() { + num += 1; + } + assert_eq!(num, 2); + + let mut iter = $wal.iter_all(2); + let mut num = 0; + while iter.next().is_some() { + num += 1; + } + assert_eq!(num, 2); + + let mut iter = $wal.iter_all(3); + let mut num = 0; + while iter.next().is_some() { + num += 1; + } + assert_eq!(num, 4); + + let upper_bound = $wal.upper_bound(1, Bound::Included("b")).unwrap(); + assert_eq!(upper_bound.value(), "a1"); + + let upper_bound = $wal + .upper_bound_with_tombstone(1, Bound::Included("b")) + .unwrap(); + assert_eq!(upper_bound.value().unwrap(), "a1"); + + let upper_bound = $wal.upper_bound(1, Bound::Included("b")).unwrap(); + assert_eq!(upper_bound.value(), "a1"); + + let upper_bound = $wal + .upper_bound_with_tombstone(1, Bound::Included("b")) + .unwrap(); + assert_eq!(upper_bound.value().unwrap(), "a1"); + + let lower_bound = $wal.lower_bound(1, Bound::Included("b")).unwrap(); + assert_eq!(lower_bound.value(), "c1"); + + let lower_bound = $wal + .lower_bound_with_tombstone(1, Bound::Included("b")) + .unwrap(); + assert_eq!(lower_bound.value().unwrap(), "c1"); + + let lower_bound = $wal.lower_bound(1, Bound::Included("b")).unwrap(); + assert_eq!(lower_bound.value(), "c1"); + + let lower_bound = $wal + .lower_bound_with_tombstone(1, Bound::Included("b")) + .unwrap(); + assert_eq!(lower_bound.value().unwrap(), "c1"); + }}; +} + +#[cfg(feature = "bounded")] +fn bounded_iter_with_tombstone_mvcc(wal: &mut OrderWal>) { + iter_with_tombstone_mvcc!(wal); +} + +#[cfg(feature = "unbounded")] +fn unbounded_iter_with_tombstone_mvcc(wal: &mut OrderWal>) { + iter_with_tombstone_mvcc!(wal); +} + +macro_rules! iter_next { + ($wal:ident) => {{ + const N: usize = 100; + + for i in (0..N).rev() { + $wal.insert(0, &make_int_key(i), &make_value(i)).unwrap(); + } + + let iter = $wal.iter_all(0); + + let mut i = 0; + for ent in iter { + assert_eq!(ent.key(), make_int_key(i).as_str()); + assert_eq!(ent.raw_key(), make_int_key(i).as_bytes()); + assert_eq!(ent.value().unwrap(), make_value(i).as_str()); + assert_eq!(ent.raw_value().unwrap(), make_value(i).as_bytes()); + i += 1; + } + + assert_eq!(i, N); + + let iter = $wal.iter(0); + let mut i = 0; + for ent in iter { + assert_eq!(ent.key(), make_int_key(i).as_str()); + assert_eq!(ent.raw_key(), make_int_key(i).as_bytes()); + assert_eq!(ent.value(), make_value(i).as_str()); + assert_eq!(ent.raw_value(), make_value(i).as_bytes()); + i += 1; + } + + assert_eq!(i, N); + }}; +} + +#[cfg(feature = "bounded")] +fn bounded_iter_next(wal: &mut OrderWal>) { + iter_next!(wal); +} + +#[cfg(feature = "unbounded")] +fn unbounded_iter_next(wal: &mut OrderWal>) { + iter_next!(wal); +} + +macro_rules! iter_with_tombstone_next_by_entry { + ($wal:ident) => {{ + const N: usize = 100; + + for i in (0..N).rev() { + $wal.insert(0, &make_int_key(i), &make_value(i)).unwrap(); + } + + let mut ent = $wal.first(0).clone(); + #[cfg(feature = "std")] + std::println!("{ent:?}"); + let mut i = 0; + while let Some(ref mut entry) = ent { + assert_eq!(entry.key(), make_int_key(i).as_str()); + assert_eq!(entry.value(), make_value(i).as_str()); + ent = entry.next(); + i += 1; + } + assert_eq!(i, N); + + let mut ent = $wal.iter(0).next().clone(); + #[cfg(feature = "std")] + std::println!("{ent:?}"); + + let mut i = 0; + while let Some(ref mut entry) = ent { + assert_eq!(entry.key(), make_int_key(i).as_str()); + assert_eq!(entry.value(), make_value(i).as_str()); + ent = entry.next(); + i += 1; + } + assert_eq!(i, N); + }}; +} + +#[cfg(feature = "bounded")] +fn bounded_iter_with_tombstone_next_by_entry(wal: &mut OrderWal>) { + iter_with_tombstone_next_by_entry!(wal); +} + +#[cfg(feature = "unbounded")] +fn unbounded_iter_with_tombstone_next_by_entry(wal: &mut OrderWal>) { + iter_with_tombstone_next_by_entry!(wal); +} + +macro_rules! iter_with_tombstone_next_by_with_tombstone_entry { + ($wal:ident) => {{ + const N: usize = 100; + + for i in 0..N { + let k = make_int_key(i); + let v = make_value(i); + $wal.insert(0, &k, &v).unwrap(); + $wal.remove(1, &k).unwrap(); + } + + let mut ent = $wal.first(0).clone(); + let mut i = 0; + while let Some(ref mut entry) = ent { + assert_eq!(entry.key(), make_int_key(i).as_str()); + assert_eq!(entry.value(), make_value(i).as_str()); + ent = entry.next(); + i += 1; + } + assert_eq!(i, N); + + let mut ent = $wal.first_with_tombstone(1).clone(); + #[cfg(feature = "std")] + std::println!("{ent:?}"); + let mut i = 0; + while let Some(ref mut entry) = ent { + if i % 2 == 1 { + assert_eq!(entry.version(), 0); + assert_eq!(entry.key(), make_int_key(i / 2).as_str()); + assert_eq!(entry.value().unwrap(), make_value(i / 2).as_str()); + } else { + assert_eq!(entry.version(), 1); + assert_eq!(entry.key(), make_int_key(i / 2).as_str()); + assert!(entry.value().is_none()); + } + + ent = entry.next(); + i += 1; + } + assert_eq!(i, N * 2); + let ent = $wal.first(1); + assert!(ent.is_none()); + }}; +} + +#[cfg(feature = "bounded")] +fn bounded_iter_with_tombstone_next_by_with_tombstone_entry( + wal: &mut OrderWal>, +) { + iter_with_tombstone_next_by_with_tombstone_entry!(wal); +} + +#[cfg(feature = "unbounded")] +fn unbounded_iter_with_tombstone_next_by_with_tombstone_entry( + wal: &mut OrderWal>, +) { + iter_with_tombstone_next_by_with_tombstone_entry!(wal); +} + +macro_rules! range_next { + ($wal:ident) => {{ + const N: usize = 100; + + for i in (0..N).rev() { + $wal.insert(0, &make_int_key(i), &make_value(i)).unwrap(); + } + + let upper = make_int_key(50); + let mut i = 0; + let mut iter = $wal.range(0, ..=upper.as_str()); + for ent in &mut iter { + assert_eq!(ent.key(), make_int_key(i).as_str()); + assert_eq!(ent.raw_key(), make_int_key(i).as_bytes()); + assert_eq!(ent.value(), make_value(i).as_str()); + assert_eq!(ent.raw_value(), make_value(i).as_bytes()); + i += 1; + } + + assert_eq!(i, 51); + + let mut i = 0; + let mut iter = $wal.range_all(0, ..=upper.as_str()); + for ent in &mut iter { + assert_eq!(ent.key(), make_int_key(i).as_str()); + assert_eq!(ent.raw_key(), make_int_key(i).as_bytes()); + assert_eq!(ent.value().unwrap(), make_value(i).as_str()); + assert_eq!(ent.raw_value().unwrap(), make_value(i).as_bytes()); + i += 1; + } + + assert_eq!(i, 51); + }}; +} + +#[cfg(feature = "bounded")] +fn bounded_range_next(wal: &mut OrderWal>) { + range_next!(wal); +} + +#[cfg(feature = "unbounded")] +fn unbounded_range_next(wal: &mut OrderWal>) { + range_next!(wal); +} + +macro_rules! iter_prev { + ($wal:ident) => {{ + const N: usize = 100; + + for i in 0..N { + $wal.insert(0, &make_int_key(i), &make_value(i)).unwrap(); + } + + let iter = $wal.iter_all(0).rev(); + let mut i = N; + for ent in iter { + assert_eq!(ent.key(), make_int_key(i - 1).as_str()); + assert_eq!(ent.value().unwrap(), make_value(i - 1).as_str()); + i -= 1; + } + + assert_eq!(i, 0); + + let iter = $wal.iter(0).rev(); + let mut i = N; + for ent in iter { + assert_eq!(ent.key(), make_int_key(i - 1).as_str()); + assert_eq!(ent.value(), make_value(i - 1).as_str()); + i -= 1; + } + + assert_eq!(i, 0); + }}; +} + +#[cfg(feature = "bounded")] +fn bounded_iter_prev(wal: &mut OrderWal>) { + iter_prev!(wal); +} + +#[cfg(feature = "unbounded")] +fn unbounded_iter_prev(wal: &mut OrderWal>) { + iter_prev!(wal); +} + +macro_rules! iter_with_tombstone_prev_by_entry { + ($wal:ident) => { + const N: usize = 100; + + for i in 0..N { + $wal.insert(0, &make_int_key(i), &make_value(i)).unwrap(); + } + + let mut ent = $wal.last(0); + + let mut i = 0; + while let Some(ref mut entry) = ent { + i += 1; + assert_eq!(entry.key(), make_int_key(N - i).as_str()); + assert_eq!(entry.value(), make_value(N - i).as_str()); + ent = entry.prev(); + } + assert_eq!(i, N); + }; +} + +#[cfg(feature = "bounded")] +fn bounded_iter_with_tombstone_prev_by_entry(wal: &mut OrderWal>) { + iter_with_tombstone_prev_by_entry!(wal); +} + +#[cfg(feature = "unbounded")] +fn unbounded_iter_with_tombstone_prev_by_entry(wal: &mut OrderWal>) { + iter_with_tombstone_prev_by_entry!(wal); +} + +macro_rules! iter_with_tombstone_prev_by_with_tombstone_entry { + ($wal:ident) => {{ + const N: usize = 100; + + for i in 0..N { + let k = make_int_key(i); + let v = make_value(i); + $wal.insert(0, &k, &v).unwrap(); + $wal.remove(1, &k).unwrap(); + } + + let mut ent = $wal.last(0); + let mut i = 0; + while let Some(ref mut entry) = ent { + i += 1; + assert_eq!(entry.key(), make_int_key(N - i).as_str()); + assert_eq!(entry.value(), make_value(N - i).as_str()); + ent = entry.prev(); + } + assert_eq!(i, N); + + let mut ent = $wal.last_with_tombstone(1); + let mut i = 0; + while let Some(ref mut entry) = ent { + if i % 2 == 0 { + assert_eq!(entry.version(), 0); + assert_eq!(entry.key(), make_int_key(N - 1 - i / 2).as_str()); + assert_eq!(entry.value().unwrap(), make_value(N - 1 - i / 2).as_str()); + } else { + assert_eq!(entry.version(), 1); + assert_eq!(entry.key(), make_int_key(N - 1 - i / 2).as_str()); + assert!(entry.value().is_none()); + } + + ent = entry.prev(); + i += 1; + } + + assert_eq!(i, N * 2); + let ent = $wal.last(1); + assert!(ent.is_none()); + }}; +} + +#[cfg(feature = "bounded")] +fn bounded_iter_with_tombstone_prev_by_with_tombstone_entry( + wal: &mut OrderWal>, +) { + iter_with_tombstone_prev_by_with_tombstone_entry!(wal); +} + +#[cfg(feature = "unbounded")] +fn unbounded_iter_with_tombstone_prev_by_with_tombstone_entry( + wal: &mut OrderWal>, +) { + iter_with_tombstone_prev_by_with_tombstone_entry!(wal); +} + +macro_rules! range_prev { + ($wal:ident) => {{ + const N: usize = 100; + + for i in 0..N { + $wal.insert(0, &make_int_key(i), &make_value(i)).unwrap(); + } + + let lower = make_int_key(50); + let it = $wal.range(0, lower.as_str()..).rev(); + let mut i = N - 1; + + for ent in it { + assert_eq!(ent.key(), make_int_key(i).as_str()); + assert_eq!(ent.raw_key(), make_int_key(i).as_bytes()); + assert_eq!(ent.value(), make_value(i).as_str()); + assert_eq!(ent.raw_value(), make_value(i).as_bytes()); + assert_eq!(ent.version(), 0); + i -= 1; + } + + assert_eq!(i, 49); + + let it = $wal.range_all(0, lower.as_str()..).rev(); + let mut i = N - 1; + + for ent in it { + assert_eq!(ent.key(), make_int_key(i).as_str()); + assert_eq!(ent.raw_key(), make_int_key(i).as_bytes()); + assert_eq!(ent.value().unwrap(), make_value(i).as_str()); + assert_eq!(ent.raw_value().unwrap(), make_value(i).as_bytes()); + assert_eq!(ent.version(), 0); + i -= 1; + } + + assert_eq!(i, 49); + }}; +} + +#[cfg(feature = "bounded")] +fn bounded_range_prev(wal: &mut OrderWal>) { + range_prev!(wal); +} + +#[cfg(feature = "unbounded")] +fn unbounded_range_prev(wal: &mut OrderWal>) { + range_prev!(wal); +} diff --git a/src/swmr/tests/get.rs b/src/swmr/tests/get.rs deleted file mode 100644 index 360f8900..00000000 --- a/src/swmr/tests/get.rs +++ /dev/null @@ -1,254 +0,0 @@ -use base::OrderWal; - -use dbutils::{buffer::VacantBuffer, types::MaybeStructured}; - -use std::collections::BTreeMap; - -use crate::{ - memtable::{alternative::TableOptions, Memtable, MemtableEntry}, - swmr::base::{Reader, Writer}, - types::{KeyBuilder, ValueBuilder}, -}; - -use super::*; - -fn first(wal: &mut OrderWal) -where - M: Memtable + 'static, - for<'a> M::Item<'a>: MemtableEntry<'a>, - M::Error: std::fmt::Debug, -{ - let people = (0..10) - .map(|_| { - let p = Person::random(); - let v = std::format!("My name is {}", p.name); - wal.insert(&p, &v).unwrap(); - - (p, v) - }) - .collect::>(); - - let ent = wal.first().unwrap(); - let (p, v) = people.first_key_value().unwrap(); - assert!(ent.key().equivalent(p)); - assert_eq!(ent.value(), v); - - let wal = wal.reader(); - let ent = wal.first().unwrap(); - let (p, v) = people.first_key_value().unwrap(); - assert!(ent.key().equivalent(p)); - assert_eq!(ent.value(), v); -} - -fn last(wal: &mut OrderWal) -where - M: Memtable + 'static, - for<'a> M::Item<'a>: MemtableEntry<'a>, - M::Error: std::fmt::Debug, -{ - let people = (0..10) - .map(|_| { - let p = Person::random(); - let v = std::format!("My name is {}", p.name); - wal.insert(&p, &v).unwrap(); - - (p, v) - }) - .collect::>(); - - let ent = wal.last().unwrap(); - let (p, v) = people.last_key_value().unwrap(); - assert!(ent.key().equivalent(p)); - assert_eq!(ent.value(), v); - - let wal = wal.reader(); - let ent = wal.last().unwrap(); - assert!(ent.key().equivalent(p)); - assert_eq!(ent.value(), v); -} - -#[allow(clippy::needless_borrows_for_generic_args)] -fn insert(wal: &mut OrderWal) -where - M: Memtable + 'static, - for<'a> M::Item<'a>: MemtableEntry<'a>, - M::Error: std::fmt::Debug, -{ - let people = (0..100) - .map(|_| { - let p = Person::random(); - let v = std::format!("My name is {}", p.name); - wal.insert(&p, &v).unwrap(); - (p, v) - }) - .collect::>(); - - assert_eq!(wal.len(), 100); - - for (p, pv) in &people { - assert!(wal.contains_key(p)); - - assert_eq!(wal.get(p).unwrap().value(), pv); - } - - for (p, _) in &people { - assert!(wal.contains_key(p)); - } -} - -fn insert_with_value_builder(wal: &mut OrderWal) -where - M: Memtable + 'static, - for<'a> M::Item<'a>: MemtableEntry<'a>, - M::Error: std::fmt::Debug, -{ - let people = (0..100) - .map(|_| { - let p = Person::random(); - let v = std::format!("My name is {}", p.name); - wal - .insert_with_value_builder( - &p, - ValueBuilder::new(v.len(), |buf: &mut VacantBuffer<'_>| { - buf.put_slice(v.as_bytes()).map(|_| v.len()) - }), - ) - .unwrap(); - (p, v) - }) - .collect::>(); - - assert_eq!(wal.len(), 100); - - for (p, _) in &people { - assert!(wal.contains_key(p)); - assert!(wal.contains_key(&p.as_ref())); - } -} - -#[allow(clippy::needless_borrows_for_generic_args)] -fn insert_with_key_builder(wal: &mut OrderWal) -where - M: Memtable + 'static, - for<'a> M::Item<'a>: MemtableEntry<'a>, - M::Error: std::fmt::Debug, -{ - let people = (0..100) - .map(|_| { - let p = Person::random(); - let pvec = p.to_vec(); - let v = std::format!("My name is {}", p.name); - unsafe { - wal - .insert_with_key_builder( - KeyBuilder::once(p.encoded_len(), |buf| p.encode_to_buffer(buf)), - &v, - ) - .unwrap(); - } - (p, v) - }) - .collect::>(); - - assert_eq!(wal.len(), 100); - - for (p, pv) in &people { - assert!(wal.contains_key(p)); - assert_eq!(wal.get(p).unwrap().value(), pv); - } - - for (p, _) in &people { - assert!(wal.contains_key(p)); - } -} - -fn insert_with_bytes(wal: &mut OrderWal) -where - M: Memtable + 'static, - for<'a> M::Item<'a>: MemtableEntry<'a>, - M::Error: std::fmt::Debug, -{ - let people = (0..100) - .map(|_| { - let p = Person::random(); - let v = std::format!("My name is {}", p.name); - unsafe { - wal - .insert( - MaybeStructured::from_slice(p.to_vec().as_slice()), - MaybeStructured::from_slice(v.as_bytes()), - ) - .unwrap(); - } - (p, v) - }) - .collect::>(); - - assert_eq!(wal.len(), 100); - - for (p, pv) in &people { - assert!(wal.contains_key(p)); - assert!(wal.contains_key(&p.as_ref())); - assert_eq!(wal.get(p).unwrap().value(), pv); - } -} - -fn insert_with_builders(wal: &mut OrderWal) -where - M: Memtable + 'static, - for<'a> M::Item<'a>: MemtableEntry<'a> + std::fmt::Debug, - M::Error: std::fmt::Debug, -{ - let people = (0..1) - .map(|_| { - let p = Person::random(); - let pvec = p.to_vec(); - let v = std::format!("My name is {}", p.name); - wal - .insert_with_builders( - KeyBuilder::new(pvec.len(), |buf: &mut VacantBuffer<'_>| { - p.encode_to_buffer(buf) - }), - ValueBuilder::new(v.len(), |buf: &mut VacantBuffer<'_>| { - buf.put_slice(v.as_bytes()).map(|_| v.len()) - }), - ) - .unwrap(); - (p, pvec, v) - }) - .collect::>(); - - assert_eq!(wal.len(), 1); - - for (p, pvec, pv) in &people { - assert!(wal.contains_key(p)); - unsafe { - assert_eq!(wal.get_by_bytes(pvec.as_ref()).unwrap().value(), pv); - } - } - - for (p, _, _) in &people { - assert!(wal.contains_key(p)); - } -} - -#[cfg(feature = "std")] -expand_unit_tests!("linked": OrderWalAlternativeTable [TableOptions::Linked]: crate::memtable::alternative::Table<_, _> { - first, - last, - insert, - insert_with_value_builder, - insert_with_key_builder, - insert_with_bytes, - insert_with_builders, -}); - -expand_unit_tests!("arena": OrderWalAlternativeTable [TableOptions::Arena(Default::default())]: crate::memtable::alternative::Table<_, _> { - first, - last, - insert, - insert_with_value_builder, - insert_with_key_builder, - insert_with_bytes, - insert_with_builders, -}); diff --git a/src/swmr/tests/insert.rs b/src/swmr/tests/insert.rs deleted file mode 100644 index 6f147b04..00000000 --- a/src/swmr/tests/insert.rs +++ /dev/null @@ -1,451 +0,0 @@ -use base::{Reader, Writer}; -use dbutils::{buffer::VacantBuffer, types::MaybeStructured}; - -use crate::{ - batch::BatchEntry, - memtable::{ - alternative::{Table, TableOptions}, - Memtable, MemtableEntry, - }, - types::{KeyBuilder, ValueBuilder}, - Builder, -}; - -use super::*; - -#[cfg(feature = "std")] -fn concurrent_basic(mut w: OrderWal) -where - M: Memtable + Send + 'static, - for<'a> M::Item<'a>: MemtableEntry<'a>, - M::Error: std::fmt::Debug, -{ - let readers = (0..100u32).map(|i| (i, w.reader())).collect::>(); - - let handles = readers.into_iter().map(|(i, reader)| { - spawn(move || loop { - if let Some(p) = reader.get(&i) { - assert_eq!(p.key(), &i); - assert_eq!(p.value(), &i.to_le_bytes()); - break; - } - }) - }); - - spawn(move || { - for i in 0..100u32 { - #[allow(clippy::needless_borrows_for_generic_args)] - w.insert(&i, &i.to_le_bytes()).unwrap(); - } - }); - - for handle in handles { - handle.join().unwrap(); - } -} - -#[cfg(feature = "std")] -fn concurrent_one_key(mut w: OrderWal) -where - M: Memtable + Send + 'static, - for<'a> M::Item<'a>: MemtableEntry<'a>, - M::Error: std::fmt::Debug, -{ - let readers = (0..100u32).map(|i| (i, w.reader())).collect::>(); - let handles = readers.into_iter().map(|(_, reader)| { - spawn(move || loop { - if let Some(p) = reader.get(&1) { - assert_eq!(p.key(), &1); - assert_eq!(p.value(), &1u32.to_le_bytes()); - break; - } - }) - }); - - w.insert(&1, &1u32.to_le_bytes()).unwrap(); - - for handle in handles { - handle.join().unwrap(); - } -} - -fn insert_batch(mut wal: OrderWal) -> (Person, Vec<(Person, String)>, Person) -where - M: Memtable + Send + 'static, - for<'a> M::Item<'a>: MemtableEntry<'a>, - M::Error: std::fmt::Debug, -{ - const N: u32 = 5; - - let mut batch = vec![]; - let output = (0..N) - .map(|i| { - ( - { - let mut p = Person::random(); - p.id = i as u64; - p - }, - std::format!("My id is {i}"), - ) - .clone() - }) - .collect::>(); - - for (person, val) in output.iter() { - batch.push(BatchEntry::new( - MaybeStructured::from(person), - MaybeStructured::from(val), - )); - } - - let rp1 = Person::random(); - wal.insert(&rp1, &"rp1".to_string()).unwrap(); - wal.insert_batch(&mut batch).unwrap(); - let rp2 = Person::random(); - wal.insert(&rp2, &"rp2".to_string()).unwrap(); - - for (p, val) in output.iter() { - assert_eq!(wal.get(p).unwrap().value(), val); - } - - assert_eq!(wal.get(&rp1).unwrap().value(), "rp1"); - assert_eq!(wal.get(&rp2).unwrap().value(), "rp2"); - - let wal = wal.reader(); - for (p, val) in output.iter() { - assert_eq!(wal.get(p).unwrap().value(), val); - } - - assert_eq!(wal.get(&rp1).unwrap().value(), "rp1"); - assert_eq!(wal.get(&rp2).unwrap().value(), "rp2"); - - (rp1, output, rp2) -} - -fn insert_batch_with_key_builder( - mut wal: OrderWal, -) -> (Person, Vec<(Person, String)>, Person) -where - M: Memtable + Send + 'static, - for<'a> M::Item<'a>: MemtableEntry<'a>, - M::Error: std::fmt::Debug, -{ - const N: u32 = 5; - - let mut batch = vec![]; - let output = (0..N) - .map(|i| { - ( - { - let mut p = Person::random(); - p.id = i as u64; - p - }, - std::format!("My id is {i}"), - ) - .clone() - }) - .collect::>(); - - for (person, val) in output.iter() { - batch.push(BatchEntry::new( - KeyBuilder::new(person.encoded_len(), |buf: &mut VacantBuffer<'_>| { - buf.set_len(person.encoded_len()); - person.encode(buf) - }), - MaybeStructured::from(val), - )); - } - - let rp1 = Person::random(); - wal.insert(&rp1, &"rp1".to_string()).unwrap(); - wal.insert_batch_with_key_builder(&mut batch).unwrap(); - let rp2 = Person::random(); - wal.insert(&rp2, &"rp2".to_string()).unwrap(); - - for (p, val) in output.iter() { - assert_eq!(wal.get(p).unwrap().value(), val); - } - - assert_eq!(wal.get(&rp1).unwrap().value(), "rp1"); - assert_eq!(wal.get(&rp2).unwrap().value(), "rp2"); - - let wal = wal.reader(); - for (p, val) in output.iter() { - assert_eq!(wal.get(p).unwrap().value(), val); - } - - assert_eq!(wal.get(&rp1).unwrap().value(), "rp1"); - assert_eq!(wal.get(&rp2).unwrap().value(), "rp2"); - - (rp1, output, rp2) -} - -fn insert_batch_with_value_builder( - mut wal: OrderWal, -) -> (Person, Vec<(Person, String)>, Person) -where - M: Memtable + Send + 'static, - for<'a> M::Item<'a>: MemtableEntry<'a>, - M::Error: std::fmt::Debug, -{ - const N: u32 = 5; - - let mut batch = vec![]; - let output = (0..N) - .map(|i| { - ( - { - let mut p = Person::random(); - p.id = i as u64; - p - }, - std::format!("My id is {i}"), - ) - .clone() - }) - .collect::>(); - - for (person, val) in output.iter() { - batch.push(BatchEntry::new( - person.into(), - ValueBuilder::new(val.len(), |buf: &mut VacantBuffer<'_>| { - buf.put_slice(val.as_bytes()).map(|_| val.len()) - }), - )); - } - - let rp1 = Person::random(); - wal.insert(&rp1, &"rp1".to_string()).unwrap(); - wal.insert_batch_with_value_builder(&mut batch).unwrap(); - let rp2 = Person::random(); - wal.insert(&rp2, &"rp2".to_string()).unwrap(); - - for (p, val) in output.iter() { - assert_eq!(wal.get(p).unwrap().value(), val); - } - - assert_eq!(wal.get(&rp1).unwrap().value(), "rp1"); - assert_eq!(wal.get(&rp2).unwrap().value(), "rp2"); - - let wal = wal.reader(); - for (p, val) in output.iter() { - assert_eq!(wal.get(p).unwrap().value(), val); - } - - assert_eq!(wal.get(&rp1).unwrap().value(), "rp1"); - assert_eq!(wal.get(&rp2).unwrap().value(), "rp2"); - - (rp1, output, rp2) -} - -fn insert_batch_with_builders( - mut wal: OrderWal, -) -> (Person, Vec<(Person, String)>, Person) -where - M: Memtable + Send + 'static, - for<'a> M::Item<'a>: MemtableEntry<'a>, - M::Error: std::fmt::Debug, -{ - const N: u32 = 5; - - let mut batch = vec![]; - let output = (0..N) - .map(|i| { - ( - { - let mut p = Person::random(); - p.id = i as u64; - p - }, - std::format!("My id is {i}"), - ) - .clone() - }) - .collect::>(); - - for (person, val) in output.iter() { - batch.push(BatchEntry::new( - KeyBuilder::new(person.encoded_len(), |buf: &mut VacantBuffer<'_>| { - buf.set_len(person.encoded_len()); - person.encode(buf) - }), - ValueBuilder::new(val.len(), |buf: &mut VacantBuffer<'_>| { - buf.put_slice(val.as_bytes()).map(|_| val.len()) - }), - )); - } - - let rp1 = Person::random(); - wal.insert(&rp1, &"rp1".to_string()).unwrap(); - wal.insert_batch_with_builders(&mut batch).unwrap(); - let rp2 = Person::random(); - wal.insert(&rp2, &"rp2".to_string()).unwrap(); - - for (p, val) in output.iter() { - assert_eq!(wal.get(p).unwrap().value(), val); - } - - assert_eq!(wal.get(&rp1).unwrap().value(), "rp1"); - assert_eq!(wal.get(&rp2).unwrap().value(), "rp2"); - - let wal = wal.reader(); - for (p, val) in output.iter() { - assert_eq!(wal.get(p).unwrap().value(), val); - } - - assert_eq!(wal.get(&rp1).unwrap().value(), "rp1"); - assert_eq!(wal.get(&rp2).unwrap().value(), "rp2"); - - (rp1, output, rp2) -} - -#[cfg(feature = "std")] -expand_unit_tests!( - move "linked": OrderWalAlternativeTable [TableOptions::Linked]: Table<_, _> { - concurrent_basic |p, _res| { - let wal = unsafe { Builder::new().map::, _>(p).unwrap() }; - - for i in 0..100u32 { - assert!(wal.contains_key(&i)); - } - }, - concurrent_one_key |p, _res| { - let wal = unsafe { Builder::new().map::, _>(p).unwrap() }; - assert!(wal.contains_key(&1)); - }, - } -); - -#[cfg(feature = "std")] -expand_unit_tests!( - move "linked": OrderWalAlternativeTable [TableOptions::Linked]: Table<_, _> { - insert_batch |p, (rp1, data, rp2)| { - let map = unsafe { - Builder::new() - .map::, _>(&p) - .unwrap() - }; - - for (p, val) in data { - assert_eq!(map.get(&p).unwrap().value(), &val); - } - assert_eq!(map.get(&rp1).unwrap().value(), "rp1"); - assert_eq!(map.get(&rp2).unwrap().value(), "rp2"); - }, - insert_batch_with_key_builder |p, (rp1, data, rp2)| { - let map = unsafe { - Builder::new() - .map::, _>(&p) - .unwrap() - }; - - for (p, val) in data { - assert_eq!(map.get(&p).unwrap().value(), &val); - } - assert_eq!(map.get(&rp1).unwrap().value(), "rp1"); - assert_eq!(map.get(&rp2).unwrap().value(), "rp2"); - }, - insert_batch_with_value_builder |p, (rp1, data, rp2)| { - let map = unsafe { - Builder::new() - .map::, _>(&p) - .unwrap() - }; - - for (p, val) in data { - assert_eq!(map.get(&p).unwrap().value(), &val); - } - assert_eq!(map.get(&rp1).unwrap().value(), "rp1"); - assert_eq!(map.get(&rp2).unwrap().value(), "rp2"); - }, - insert_batch_with_builders |p, (rp1, data, rp2)| { - let map = unsafe { - Builder::new() - .map::, _>(&p) - .unwrap() - }; - - for (p, val) in data { - assert_eq!(map.get(&p).unwrap().value(), &val); - } - assert_eq!(map.get(&rp1).unwrap().value(), "rp1"); - assert_eq!(map.get(&rp2).unwrap().value(), "rp2"); - } - } -); - -#[cfg(feature = "std")] -expand_unit_tests!( - move "arena": OrderWalAlternativeTable [TableOptions::Arena(Default::default())]: Table<_, _> { - concurrent_basic |p, _res| { - let wal = unsafe { Builder::new().map::, _>(p).unwrap() }; - - for i in 0..100u32 { - assert!(wal.contains_key(&i)); - } - }, - concurrent_one_key |p, _res| { - let wal = unsafe { Builder::new().map::, _>(p).unwrap() }; - assert!(wal.contains_key(&1)); - }, - } -); - -expand_unit_tests!( - move "arena": OrderWalAlternativeTable [TableOptions::Arena(Default::default())]: Table<_, _> { - insert_batch |p, (rp1, data, rp2)| { - let map = unsafe { - Builder::new() - .map::, _>(&p) - .unwrap() - }; - - for (p, val) in data { - assert_eq!(map.get(&p).unwrap().value(), &val); - } - assert_eq!(map.get(&rp1).unwrap().value(), "rp1"); - assert_eq!(map.get(&rp2).unwrap().value(), "rp2"); - }, - insert_batch_with_key_builder |p, (rp1, data, rp2)| { - let map = unsafe { - Builder::new() - .map::, _>(&p) - .unwrap() - }; - - for (p, val) in data { - assert_eq!(map.get(&p).unwrap().value(), &val); - } - assert_eq!(map.get(&rp1).unwrap().value(), "rp1"); - assert_eq!(map.get(&rp2).unwrap().value(), "rp2"); - }, - insert_batch_with_value_builder |p, (rp1, data, rp2)| { - let map = unsafe { - Builder::new() - .map::, _>(&p) - .unwrap() - }; - - for (p, val) in data { - assert_eq!(map.get(&p).unwrap().value(), &val); - } - assert_eq!(map.get(&rp1).unwrap().value(), "rp1"); - assert_eq!(map.get(&rp2).unwrap().value(), "rp2"); - }, - insert_batch_with_builders |p, (rp1, data, rp2)| { - let map = unsafe { - Builder::new() - .map::, _>(&p) - .unwrap() - }; - - for (p, val) in data { - assert_eq!(map.get(&p).unwrap().value(), &val); - } - assert_eq!(map.get(&rp1).unwrap().value(), "rp1"); - assert_eq!(map.get(&rp2).unwrap().value(), "rp2"); - } - } -); diff --git a/src/swmr/tests/iters.rs b/src/swmr/tests/iters.rs deleted file mode 100644 index 6fac3743..00000000 --- a/src/swmr/tests/iters.rs +++ /dev/null @@ -1,399 +0,0 @@ -use core::ops::Bound; -use std::collections::BTreeMap; - -use base::{OrderWal, Reader, Writer}; - -use crate::memtable::{ - alternative::{Table, TableOptions}, - Memtable, MemtableEntry, -}; - -use super::*; - -fn iter(wal: &mut OrderWal) -where - M: Memtable + 'static, - for<'a> M::Item<'a>: MemtableEntry<'a>, - M::Error: std::fmt::Debug, -{ - let mut people = (0..100) - .map(|_| { - let p = Person::random(); - let v = std::format!("My name is {}", p.name); - wal.insert(&p, &v).unwrap(); - (p, v) - }) - .collect::>(); - - people.sort_by(|a, b| a.0.cmp(&b.0)); - - let mut iter = wal.iter(); - - for (pwal, pvec) in people.iter().zip(iter.by_ref()) { - assert!(pwal.0.equivalent(pvec.key())); - assert!(pwal.0.to_vec().eq(pvec.raw_key())); - assert_eq!(&pwal.1, pvec.value()); - assert_eq!(pwal.1.as_bytes(), pvec.raw_value()); - } - - let mut rev_iter = wal.iter().rev(); - - for (pwal, pvec) in people.iter().rev().zip(rev_iter.by_ref()) { - assert!(pwal.0.equivalent(pvec.key())); - assert!(pwal.0.to_vec().eq(pvec.raw_key())); - assert_eq!(&pwal.1, pvec.value()); - assert_eq!(pwal.1.as_bytes(), pvec.raw_value()); - } - - let mut iter = wal.keys(); - - for (pwal, pvec) in people.iter().zip(iter.by_ref()) { - assert!(pwal.0.equivalent(pvec.key())); - assert!(pwal.0.to_vec().eq(pvec.raw_key())); - } - - let mut rev_iter = wal.keys().rev(); - - for (pwal, pvec) in people.iter().rev().zip(rev_iter.by_ref()) { - assert!(pwal.0.equivalent(pvec.key())); - assert!(pwal.0.to_vec().eq(pvec.raw_key())); - } - - let mut iter = wal.values(); - - for (pwal, pvec) in people.iter().zip(iter.by_ref()) { - assert_eq!(&pwal.1, pvec.value()); - assert_eq!(pwal.1.as_bytes(), pvec.raw_value()); - } - - let mut rev_iter = wal.values().rev(); - - for (pwal, pvec) in people.iter().rev().zip(rev_iter.by_ref()) { - assert_eq!(&pwal.1, pvec.value()); - assert_eq!(pwal.1.as_bytes(), pvec.raw_value()); - } - - let wal = wal.reader(); - let mut iter = wal.iter(); - - for (pwal, pvec) in people.iter().zip(iter.by_ref()) { - assert!(pwal.0.equivalent(pvec.key())); - assert!(pwal.0.to_vec().eq(pvec.raw_key())); - assert_eq!(&pwal.1, pvec.value()); - assert_eq!(pwal.1.as_bytes(), pvec.raw_value()); - } -} - -fn bounds(wal: &mut OrderWal) -where - M: Memtable + 'static, - for<'a> M::Item<'a>: MemtableEntry<'a>, - M::Error: std::fmt::Debug, -{ - for i in 0..100u32 { - wal.insert(&i, &i).unwrap(); - } - - let upper50 = wal.upper_bound(Bound::Included(&50u32)).unwrap(); - assert_eq!(upper50.value(), &50u32); - let upper51 = wal.upper_bound(Bound::Excluded(&51u32)).unwrap(); - assert_eq!(upper51.value(), &50u32); - - let upper50 = unsafe { - wal - .upper_bound_by_bytes(Bound::Included(50u32.to_le_bytes().as_ref())) - .unwrap() - }; - assert_eq!(upper50.value(), &50u32); - let upper51 = unsafe { - wal - .upper_bound_by_bytes(Bound::Excluded(51u32.to_le_bytes().as_ref())) - .unwrap() - }; - assert_eq!(upper51.value(), &50u32); - - let upper101 = wal.upper_bound(Bound::Included(&101u32)).unwrap(); - assert_eq!(upper101.value(), &99u32); - let upper101 = unsafe { - wal - .upper_bound_by_bytes(Bound::Included(101u32.to_le_bytes().as_ref())) - .unwrap() - }; - assert_eq!(upper101.value(), &99u32); - - let upper_unbounded = wal.upper_bound::(Bound::Unbounded).unwrap(); - assert_eq!(upper_unbounded.value(), &99u32); - let upper_unbounded = unsafe { wal.upper_bound_by_bytes(Bound::Unbounded).unwrap() }; - assert_eq!(upper_unbounded.value(), &99u32); - - let lower50 = wal.lower_bound(Bound::Included(&50u32)).unwrap(); - assert_eq!(lower50.value(), &50u32); - let lower50 = unsafe { - wal - .lower_bound_by_bytes(Bound::Included(50u32.to_le_bytes().as_ref())) - .unwrap() - }; - assert_eq!(lower50.value(), &50u32); - - let lower51 = wal.lower_bound(Bound::Excluded(&51u32)).unwrap(); - assert_eq!(lower51.value(), &52u32); - let lower51 = unsafe { - wal - .lower_bound_by_bytes(Bound::Excluded(51u32.to_le_bytes().as_ref())) - .unwrap() - }; - assert_eq!(lower51.value(), &52u32); - - let lower0 = wal.lower_bound(Bound::Excluded(&0u32)).unwrap(); - assert_eq!(lower0.value(), &1u32); - let lower0 = unsafe { - wal - .lower_bound_by_bytes(Bound::Excluded(0u32.to_le_bytes().as_ref())) - .unwrap() - }; - assert_eq!(lower0.value(), &1u32); - - let lower_unbounded = wal.lower_bound::(Bound::Unbounded).unwrap(); - assert_eq!(lower_unbounded.value(), &0u32); - let lower_unbounded = unsafe { wal.lower_bound_by_bytes(Bound::Unbounded).unwrap() }; - assert_eq!(lower_unbounded.value(), &0u32); - - let wal = wal.reader(); - let upper50 = wal.upper_bound(Bound::Included(&50u32)).unwrap(); - assert_eq!(upper50.value(), &50u32); - let upper50 = unsafe { - wal - .upper_bound_by_bytes(Bound::Included(50u32.to_le_bytes().as_ref())) - .unwrap() - }; - assert_eq!(upper50.value(), &50u32); - - let upper51 = wal.upper_bound(Bound::Excluded(&51u32)).unwrap(); - assert_eq!(upper51.value(), &50u32); - let upper51 = unsafe { - wal - .upper_bound_by_bytes(Bound::Excluded(51u32.to_le_bytes().as_ref())) - .unwrap() - }; - assert_eq!(upper51.value(), &50u32); - - let upper101 = wal.upper_bound(Bound::Included(&101u32)).unwrap(); - assert_eq!(upper101.value(), &99u32); - let upper101 = unsafe { - wal - .upper_bound_by_bytes(Bound::Included(101u32.to_le_bytes().as_ref())) - .unwrap() - }; - assert_eq!(upper101.value(), &99u32); - - let upper_unbounded = wal.upper_bound::(Bound::Unbounded).unwrap(); - assert_eq!(upper_unbounded.value(), &99u32); - let upper_unbounded = unsafe { wal.upper_bound_by_bytes(Bound::Unbounded).unwrap() }; - assert_eq!(upper_unbounded.value(), &99u32); - - let lower50 = wal.lower_bound(Bound::Included(&50u32)).unwrap(); - assert_eq!(lower50.value(), &50u32); - let lower50 = unsafe { - wal - .lower_bound_by_bytes(Bound::Included(50u32.to_le_bytes().as_ref())) - .unwrap() - }; - assert_eq!(lower50.value(), &50u32); - - let lower51 = wal.lower_bound(Bound::Excluded(&51u32)).unwrap(); - assert_eq!(lower51.value(), &52u32); - let lower51 = unsafe { - wal - .lower_bound_by_bytes(Bound::Excluded(51u32.to_le_bytes().as_ref())) - .unwrap() - }; - assert_eq!(lower51.value(), &52u32); - - let lower0 = wal.lower_bound(Bound::Excluded(&0u32)).unwrap(); - assert_eq!(lower0.value(), &1u32); - let lower0 = unsafe { - wal - .lower_bound_by_bytes(Bound::Excluded(0u32.to_le_bytes().as_ref())) - .unwrap() - }; - assert_eq!(lower0.value(), &1u32); - - let lower_unbounded = wal.lower_bound::(Bound::Unbounded).unwrap(); - assert_eq!(lower_unbounded.value(), &0u32); - let lower_unbounded = unsafe { wal.lower_bound_by_bytes(Bound::Unbounded).unwrap() }; - assert_eq!(lower_unbounded.value(), &0u32); -} - -fn range(wal: &mut OrderWal) -where - M: Memtable + 'static, - for<'a> M::Item<'a>: MemtableEntry<'a>, - M::Error: std::fmt::Debug, -{ - let mut mid = Person::random(); - let people = (0..100) - .map(|idx| { - let p = Person::random(); - let v = std::format!("My name is {}", p.name); - wal.insert(&p, &v).unwrap(); - - if idx == 500 { - mid = p.clone(); - } - (p, v) - }) - .collect::>(); - - let mut iter = wal.range::(&mid..); - - for (pwal, pvec) in people.range(&mid..).zip(iter.by_ref()) { - assert!(pwal.0.equivalent(pvec.key())); - assert!(pwal.0.to_vec().eq(pvec.raw_key())); - assert_eq!(&pwal.1, pvec.value()); - assert_eq!(pwal.1.as_bytes(), pvec.raw_value()); - } - - assert!(iter.next().is_none()); - - let mut iter = wal.range_keys::(&mid..); - for (pwal, pvec) in people.range(&mid..).zip(iter.by_ref()) { - assert!(pwal.0.equivalent(pvec.clone().key())); - } - - assert!(iter.next().is_none()); - - let mut rev_iter = wal.range_keys::(&mid..).rev(); - - for (pwal, pvec) in people.range(&mid..).rev().zip(rev_iter.by_ref()) { - assert!(pwal.0.equivalent(pvec.key())); - assert!(pwal.0.to_vec().eq(pvec.raw_key())); - } - - let mut iter = wal.range_values::(&mid..); - for (pwal, pvec) in people.range(&mid..).zip(iter.by_ref()) { - assert_eq!(&pwal.1, pvec.clone().value()); - } - - assert!(iter.next().is_none()); - - let mut rev_iter = wal.range_values::(&mid..).rev(); - - for (pwal, pvec) in people.range(&mid..).rev().zip(rev_iter.by_ref()) { - assert_eq!(&pwal.1, pvec.value()); - assert_eq!(pwal.1.as_bytes(), pvec.raw_value()); - } - - let wal = wal.reader(); - let mut iter = wal.range::(&mid..); - - for (pwal, pvec) in people.range(&mid..).zip(iter.by_ref()) { - assert!(pwal.0.equivalent(pvec.key())); - assert!(pwal.0.to_vec().eq(pvec.raw_key())); - assert_eq!(&pwal.1, pvec.value()); - assert_eq!(pwal.1.as_bytes(), pvec.raw_value()); - } - - let mut rev_iter = wal.range::(&mid..).rev(); - - for (pwal, pvec) in people.range(&mid..).rev().zip(rev_iter.by_ref()) { - assert!(pwal.0.equivalent(pvec.key())); - assert!(pwal.0.to_vec().eq(pvec.raw_key())); - assert_eq!(&pwal.1, pvec.value()); - assert_eq!(pwal.1.as_bytes(), pvec.raw_value()); - } -} - -fn entry_iter(wal: &mut OrderWal) -where - M: Memtable + 'static, - for<'a> M::Item<'a>: MemtableEntry<'a> + std::fmt::Debug, - M::Error: std::fmt::Debug, -{ - for i in 0..100u32 { - wal.insert(&i, &i).unwrap(); - } - - let mut curr = wal.first(); - #[cfg(feature = "std")] - std::println!("{:?}", curr); - let mut cursor = 0; - while let Some(mut ent) = curr { - assert_eq!(ent.key(), &cursor); - assert_eq!(ent.value(), &cursor); - cursor += 1; - curr = ent.next(); - } - - let curr = wal.last(); - - let mut curr = curr.clone(); - let mut cursor = 100; - while let Some(mut ent) = curr { - cursor -= 1; - assert_eq!(ent.key(), &cursor); - assert_eq!(ent.value(), &cursor); - curr = ent.prev(); - } - - let mut curr = wal.keys().next(); - #[cfg(feature = "std")] - std::println!("{:?}", curr); - let mut cursor = 0; - while let Some(mut ent) = curr { - assert_eq!(ent.key(), &cursor); - cursor += 1; - curr = ent.next(); - } - - let curr = wal.keys().next_back(); - - let mut curr = curr.clone(); - let mut cursor = 100; - while let Some(mut ent) = curr { - cursor -= 1; - assert_eq!(ent.key(), &cursor); - curr = ent.prev(); - } - - let mut curr = wal.values().next(); - #[cfg(feature = "std")] - std::println!("{:?}", curr); - let mut cursor = 0; - while let Some(mut ent) = curr { - assert_eq!(ent.value(), &cursor); - cursor += 1; - curr = ent.next(); - } - - let curr = wal.values().next_back(); - - let mut curr = curr.clone(); - let mut cursor = 100; - while let Some(mut ent) = curr { - cursor -= 1; - assert_eq!(ent.value(), &cursor); - curr = ent.prev(); - } -} - -#[cfg(feature = "std")] -expand_unit_tests!("linked": OrderWalAlternativeTable [TableOptions::Linked]: Table<_, _> { - bounds, - entry_iter, -}); - -expand_unit_tests!("arena": OrderWalAlternativeTable [TableOptions::Arena(Default::default())]: Table<_, _> { - bounds, - entry_iter, -}); - -#[cfg(feature = "std")] -expand_unit_tests!("linked": OrderWalAlternativeTable [TableOptions::Linked]: Table<_, _> { - range, - iter, -}); - -expand_unit_tests!("arena": OrderWalAlternativeTable [TableOptions::Arena(Default::default())]: Table<_, _> { - range, - iter, -}); diff --git a/src/swmr/tests/multiple_version_constructor.rs b/src/swmr/tests/multiple_version_constructor.rs deleted file mode 100644 index 009a359d..00000000 --- a/src/swmr/tests/multiple_version_constructor.rs +++ /dev/null @@ -1,113 +0,0 @@ -use multiple_version::{OrderWal, Reader, Writer}; -use skl::KeySize; - -use crate::memtable::{ - alternative::{MultipleVersionTable, TableOptions}, - MultipleVersionMemtable, VersionedMemtableEntry, -}; - -use super::*; - -fn zero_reserved(wal: &mut OrderWal) -where - M: MultipleVersionMemtable + 'static, - M::Error: std::fmt::Debug, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a> + std::fmt::Debug, -{ - unsafe { - assert_eq!(wal.reserved_slice(), b""); - assert_eq!(wal.reserved_slice_mut(), b""); - - let wal = wal.reader(); - assert_eq!(wal.reserved_slice(), b""); - } -} - -fn reserved(wal: &mut OrderWal) -where - M: MultipleVersionMemtable + 'static, - M::Error: std::fmt::Debug, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a> + std::fmt::Debug, -{ - unsafe { - let buf = wal.reserved_slice_mut(); - buf.copy_from_slice(b"al8n"); - assert_eq!(wal.reserved_slice(), b"al8n"); - assert_eq!(wal.reserved_slice_mut(), b"al8n"); - - let wal = wal.reader(); - assert_eq!(wal.reserved_slice(), b"al8n"); - } -} - -#[cfg(feature = "std")] -expand_unit_tests!( - "linked": MultipleVersionOrderWalAlternativeTable [TableOptions::Linked]: MultipleVersionTable<_, _> { - zero_reserved, - } -); - -#[cfg(feature = "std")] -expand_unit_tests!( - "linked": MultipleVersionOrderWalAlternativeTable [TableOptions::Linked]: MultipleVersionTable<_, _> { - reserved({ - crate::Builder::new() - .with_capacity(MB) - .with_reserved(4) - }), - } -); - -expand_unit_tests!( - "arena": MultipleVersionOrderWalAlternativeTable [TableOptions::Arena(Default::default())]: MultipleVersionTable<_, _> { - zero_reserved, - } -); - -expand_unit_tests!( - "arena": MultipleVersionOrderWalAlternativeTable [TableOptions::Arena(Default::default())]: MultipleVersionTable<_, _> { - reserved({ - crate::Builder::new() - .with_capacity(MB) - .with_reserved(4) - }), - } -); - -#[test] -#[cfg(all(feature = "memmap", not(target_family = "wasm")))] -#[cfg_attr(miri, ignore)] -fn reopen_wrong_kind() { - use crate::Builder; - - let dir = tempfile::tempdir().unwrap(); - let path = dir.path().join("test_reopen_wrong_kind"); - let wal = unsafe { - Builder::new() - .with_capacity(MB) - .with_maximum_key_size(KeySize::with(10)) - .with_maximum_value_size(10) - .with_create_new(true) - .with_read(true) - .with_write(true) - .map_mut::, _>(path.as_path()) - .unwrap() - }; - - assert!(!wal.read_only()); - assert_eq!(wal.capacity(), MB); - assert!(wal.remaining() < MB); - assert_eq!(wal.maximum_key_size(), 10); - assert_eq!(wal.maximum_value_size(), 10); - assert_eq!(wal.path().unwrap().as_path(), path.as_path()); - assert_eq!(wal.options().maximum_key_size(), 10); - - let err = unsafe { - Builder::new() - .with_capacity(MB) - .with_read(true) - .map_mut::, _>(path.as_path()) - .unwrap_err() - }; - assert!(matches!(err, crate::error::Error::KindMismatch { .. })); -} diff --git a/src/swmr/tests/multiple_version_insert.rs b/src/swmr/tests/multiple_version_insert.rs deleted file mode 100644 index 8d295f2d..00000000 --- a/src/swmr/tests/multiple_version_insert.rs +++ /dev/null @@ -1,549 +0,0 @@ -use dbutils::{buffer::VacantBuffer, types::MaybeStructured}; -use multiple_version::{Reader, Writer}; - -use crate::{ - batch::BatchEntry, - memtable::{ - alternative::{MultipleVersionTable, TableOptions}, - MultipleVersionMemtable, VersionedMemtableEntry, - }, - types::{KeyBuilder, ValueBuilder}, - Builder, -}; - -use super::*; - -#[cfg(feature = "std")] -fn concurrent_basic(mut w: OrderWal) -where - M: MultipleVersionMemtable + Send + 'static, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a>, - M::Error: std::fmt::Debug, -{ - let readers = (0..100u32).map(|i| (i, w.reader())).collect::>(); - - let handles = readers.into_iter().map(|(i, reader)| { - spawn(move || loop { - if let Some(p) = reader.get(0, &i) { - assert_eq!(p.key(), &i); - assert_eq!(p.value(), &i.to_le_bytes()); - break; - } - }) - }); - - spawn(move || { - for i in 0..100u32 { - #[allow(clippy::needless_borrows_for_generic_args)] - w.insert(0, &i, &i.to_le_bytes()).unwrap(); - } - }); - - for handle in handles { - handle.join().unwrap(); - } -} - -#[cfg(feature = "std")] -fn concurrent_one_key(mut w: OrderWal) -where - M: MultipleVersionMemtable + Send + 'static, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a>, - M::Error: std::fmt::Debug, -{ - let readers = (0..100u32).map(|i| (i, w.reader())).collect::>(); - let handles = readers.into_iter().map(|(_, reader)| { - spawn(move || loop { - if let Some(p) = reader.get(0, &1) { - assert_eq!(p.key(), &1); - assert_eq!(p.value(), &1u32.to_le_bytes()); - break; - } - }) - }); - - w.insert(0, &1, &1u32.to_le_bytes()).unwrap(); - - for handle in handles { - handle.join().unwrap(); - } -} - -fn insert_batch(mut wal: OrderWal) -> (Person, Vec<(Person, String)>, Person) -where - M: MultipleVersionMemtable + Send + 'static, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a>, - M::Error: std::fmt::Debug, -{ - const N: u32 = 5; - - let mut batch = vec![]; - let output = (0..N) - .map(|i| { - ( - { - let mut p = Person::random(); - p.id = i as u64; - p - }, - std::format!("My id is {i}"), - ) - .clone() - }) - .collect::>(); - - for (person, val) in output.iter() { - batch.push(BatchEntry::with_version( - 0, - MaybeStructured::from(person), - MaybeStructured::from(val), - )); - } - - let rp1 = Person::random(); - wal.insert(0, &rp1, &"rp1".to_string()).unwrap(); - wal.insert_batch(&mut batch).unwrap(); - let rp2 = Person::random(); - wal.insert(0, &rp2, &"rp2".to_string()).unwrap(); - - for (p, val) in output.iter() { - assert_eq!(wal.get(0, p).unwrap().value(), val); - } - - assert_eq!(wal.get(0, &rp1).unwrap().value(), "rp1"); - assert_eq!(wal.get(0, &rp2).unwrap().value(), "rp2"); - - let wal = wal.reader(); - for (p, val) in output.iter() { - assert_eq!(wal.get(0, p).unwrap().value(), val); - } - - assert_eq!(wal.get(0, &rp1).unwrap().value(), "rp1"); - assert_eq!(wal.get(0, &rp2).unwrap().value(), "rp2"); - - (rp1, output, rp2) -} - -fn insert_batch_with_key_builder( - mut wal: OrderWal, -) -> (Person, Vec<(Person, String)>, Person) -where - M: MultipleVersionMemtable + Send + 'static, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a>, - M::Error: std::fmt::Debug, -{ - const N: u32 = 5; - - let mut batch = vec![]; - let output = (0..N) - .map(|i| { - ( - { - let mut p = Person::random(); - p.id = i as u64; - p - }, - std::format!("My id is {i}"), - ) - .clone() - }) - .collect::>(); - - for (person, val) in output.iter() { - batch.push(BatchEntry::with_version( - 0, - KeyBuilder::new(person.encoded_len(), |buf: &mut VacantBuffer<'_>| { - buf.set_len(person.encoded_len()); - person.encode(buf) - }), - MaybeStructured::from(val), - )); - } - - let rp1 = Person::random(); - wal.insert(0, &rp1, &"rp1".to_string()).unwrap(); - wal.insert_batch_with_key_builder(&mut batch).unwrap(); - let rp2 = Person::random(); - wal.insert(0, &rp2, &"rp2".to_string()).unwrap(); - - for (p, val) in output.iter() { - assert_eq!(wal.get(0, p).unwrap().value(), val); - } - - assert_eq!(wal.get(0, &rp1).unwrap().value(), "rp1"); - assert_eq!(wal.get(0, &rp2).unwrap().value(), "rp2"); - - let wal = wal.reader(); - for (p, val) in output.iter() { - assert_eq!(wal.get(0, p).unwrap().value(), val); - } - - assert_eq!(wal.get(0, &rp1).unwrap().value(), "rp1"); - assert_eq!(wal.get(0, &rp2).unwrap().value(), "rp2"); - - (rp1, output, rp2) -} - -fn insert_batch_with_value_builder( - mut wal: OrderWal, -) -> (Person, Vec<(Person, String)>, Person) -where - M: MultipleVersionMemtable + Send + 'static, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a>, - M::Error: std::fmt::Debug, -{ - const N: u32 = 5; - - let mut batch = vec![]; - let output = (0..N) - .map(|i| { - ( - { - let mut p = Person::random(); - p.id = i as u64; - p - }, - std::format!("My id is {i}"), - ) - .clone() - }) - .collect::>(); - - for (person, val) in output.iter() { - batch.push(BatchEntry::with_version( - 0, - person.into(), - ValueBuilder::new(val.len(), |buf: &mut VacantBuffer<'_>| { - buf.put_slice(val.as_bytes()).map(|_| val.len()) - }), - )); - } - - let rp1 = Person::random(); - wal.insert(0, &rp1, &"rp1".to_string()).unwrap(); - wal.insert_batch_with_value_builder(&mut batch).unwrap(); - let rp2 = Person::random(); - wal.insert(0, &rp2, &"rp2".to_string()).unwrap(); - - for (p, val) in output.iter() { - assert_eq!(wal.get(0, p).unwrap().value(), val); - } - - assert_eq!(wal.get(0, &rp1).unwrap().value(), "rp1"); - assert_eq!(wal.get(0, &rp2).unwrap().value(), "rp2"); - - let wal = wal.reader(); - for (p, val) in output.iter() { - assert_eq!(wal.get(0, p).unwrap().value(), val); - } - - assert_eq!(wal.get(0, &rp1).unwrap().value(), "rp1"); - assert_eq!(wal.get(0, &rp2).unwrap().value(), "rp2"); - - (rp1, output, rp2) -} - -fn insert_batch_with_builders( - mut wal: OrderWal, -) -> (Person, Vec<(Person, String)>, Person) -where - M: MultipleVersionMemtable + Send + 'static, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a>, - M::Error: std::fmt::Debug, -{ - const N: u32 = 5; - - let mut batch = vec![]; - let output = (0..N) - .map(|i| { - ( - { - let mut p = Person::random(); - p.id = i as u64; - p - }, - std::format!("My id is {i}"), - ) - .clone() - }) - .collect::>(); - - for (person, val) in output.iter() { - batch.push(BatchEntry::with_version( - 0, - KeyBuilder::new(person.encoded_len(), |buf: &mut VacantBuffer<'_>| { - buf.set_len(person.encoded_len()); - person.encode(buf) - }), - ValueBuilder::new(val.len(), |buf: &mut VacantBuffer<'_>| { - buf.put_slice(val.as_bytes()).map(|_| val.len()) - }), - )); - } - - let rp1 = Person::random(); - wal.insert(0, &rp1, &"rp1".to_string()).unwrap(); - wal.insert_batch_with_builders(&mut batch).unwrap(); - let rp2 = Person::random(); - wal.insert(0, &rp2, &"rp2".to_string()).unwrap(); - - for (p, val) in output.iter() { - assert_eq!(wal.get(0, p).unwrap().value(), val); - } - - assert_eq!(wal.get(0, &rp1).unwrap().value(), "rp1"); - assert_eq!(wal.get(0, &rp2).unwrap().value(), "rp2"); - - let wal = wal.reader(); - for (p, val) in output.iter() { - assert_eq!(wal.get(0, p).unwrap().value(), val); - } - - assert_eq!(wal.get(0, &rp1).unwrap().value(), "rp1"); - assert_eq!(wal.get(0, &rp2).unwrap().value(), "rp2"); - - (rp1, output, rp2) -} - -fn insert_batch_with_tombstone( - mut wal: OrderWal, -) -> (Person, Vec<(Person, String)>, Person) -where - M: MultipleVersionMemtable + Send + 'static, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a>, - M::Error: std::fmt::Debug, -{ - const N: u32 = 5; - - let mut batch = vec![]; - let output = (0..N) - .map(|i| { - ( - { - let mut p = Person::random(); - p.id = i as u64; - p - }, - std::format!("My id is {i}"), - ) - .clone() - }) - .collect::>(); - - for (person, val) in output.iter() { - batch.push(BatchEntry::with_version( - 0, - MaybeStructured::from(person), - MaybeStructured::from(val), - )); - - batch.push(BatchEntry::tombstone_with_version( - 1, - MaybeStructured::from(person), - )); - } - - let rp1 = Person::random(); - wal.insert(0, &rp1, &"rp1".to_string()).unwrap(); - wal.insert_batch(&mut batch).unwrap(); - let rp2 = Person::random(); - wal.insert(0, &rp2, &"rp2".to_string()).unwrap(); - - for (p, val) in output.iter() { - assert_eq!(wal.get(0, p).unwrap().value(), val); - } - - for (p, _) in output.iter() { - assert!(wal.get(1, p).is_none()); - } - - assert_eq!(wal.get(0, &rp1).unwrap().value(), "rp1"); - assert_eq!(wal.get(0, &rp2).unwrap().value(), "rp2"); - - let wal = wal.reader(); - for (p, val) in output.iter() { - assert_eq!(wal.get(0, p).unwrap().value(), val); - } - - assert_eq!(wal.get(0, &rp1).unwrap().value(), "rp1"); - assert_eq!(wal.get(0, &rp2).unwrap().value(), "rp2"); - - (rp1, output, rp2) -} - -#[cfg(feature = "std")] -expand_unit_tests!( - move "linked": MultipleVersionOrderWalAlternativeTable [TableOptions::Linked]: MultipleVersionTable<_, _> { - concurrent_basic |p, _res| { - let wal = unsafe { Builder::new().map::, _>(p).unwrap() }; - - for i in 0..100u32 { - assert!(wal.contains_key(0, &i)); - } - }, - concurrent_one_key |p, _res| { - let wal = unsafe { Builder::new().map::, _>(p).unwrap() }; - assert!(wal.contains_key(0, &1)); - }, - } -); - -#[cfg(feature = "std")] -expand_unit_tests!( - move "linked": MultipleVersionOrderWalAlternativeTable [TableOptions::Linked]: MultipleVersionTable<_, _> { - insert_batch |p, (rp1, data, rp2)| { - let map = unsafe { - Builder::new() - .map::, _>(&p) - .unwrap() - }; - - for (p, val) in data { - assert_eq!(map.get(0, &p).unwrap().value(), &val); - } - assert_eq!(map.get(0, &rp1).unwrap().value(), "rp1"); - assert_eq!(map.get(0, &rp2).unwrap().value(), "rp2"); - }, - insert_batch_with_tombstone |p, (rp1, data, rp2)| { - let map = unsafe { - Builder::new() - .map::, _>(&p) - .unwrap() - }; - - for (p, val) in data { - assert_eq!(map.get(0, &p).unwrap().value(), &val); - assert!(map.get(1, &p).is_none()); - } - assert_eq!(map.get(0, &rp1).unwrap().value(), "rp1"); - assert_eq!(map.get(0, &rp2).unwrap().value(), "rp2"); - }, - insert_batch_with_key_builder |p, (rp1, data, rp2)| { - let map = unsafe { - Builder::>::new() - .map::, _>(&p) - .unwrap() - }; - - for (p, val) in data { - assert_eq!(map.get(0, &p).unwrap().value(), &val); - } - assert_eq!(map.get(0, &rp1).unwrap().value(), "rp1"); - assert_eq!(map.get(0, &rp2).unwrap().value(), "rp2"); - }, - insert_batch_with_value_builder |p, (rp1, data, rp2)| { - let map = unsafe { - Builder::new() - .map::, _>(&p) - .unwrap() - }; - - for (p, val) in data { - assert_eq!(map.get(0, &p).unwrap().value(), &val); - } - assert_eq!(map.get(0, &rp1).unwrap().value(), "rp1"); - assert_eq!(map.get(0, &rp2).unwrap().value(), "rp2"); - }, - insert_batch_with_builders |p, (rp1, data, rp2)| { - let map = unsafe { - Builder::new() - .map::, _>(&p) - .unwrap() - }; - - for (p, val) in data { - assert_eq!(map.get(0, &p).unwrap().value(), &val); - } - assert_eq!(map.get(0, &rp1).unwrap().value(), "rp1"); - assert_eq!(map.get(0, &rp2).unwrap().value(), "rp2"); - } - } -); - -#[cfg(feature = "std")] -expand_unit_tests!( - move "arena": MultipleVersionOrderWalAlternativeTable [TableOptions::Arena(Default::default())]: MultipleVersionTable<_, _> { - concurrent_basic |p, _res| { - let wal = unsafe { Builder::new().map::, _>(p).unwrap() }; - - for i in 0..100u32 { - assert!(wal.contains_key(0, &i)); - } - }, - concurrent_one_key |p, _res| { - let wal = unsafe { Builder::new().map::, _>(p).unwrap() }; - assert!(wal.contains_key(0, &1)); - }, - } -); - -expand_unit_tests!( - move "arena": MultipleVersionOrderWalAlternativeTable [TableOptions::Arena(Default::default())]: MultipleVersionTable<_, _> { - insert_batch |p, (rp1, data, rp2)| { - let map = unsafe { - Builder::new() - .map::, _>(&p) - .unwrap() - }; - - for (p, val) in data { - assert_eq!(map.get(0, &p).unwrap().value(), &val); - } - assert_eq!(map.get(0, &rp1).unwrap().value(), "rp1"); - assert_eq!(map.get(0, &rp2).unwrap().value(), "rp2"); - }, - insert_batch_with_tombstone |p, (rp1, data, rp2)| { - let map = unsafe { - Builder::new() - .map::, _>(&p) - .unwrap() - }; - - for (p, val) in data { - assert_eq!(map.get(0, &p).unwrap().value(), &val); - assert!(map.get(1, &p).is_none()); - } - assert_eq!(map.get(0, &rp1).unwrap().value(), "rp1"); - assert_eq!(map.get(0, &rp2).unwrap().value(), "rp2"); - }, - insert_batch_with_key_builder |p, (rp1, data, rp2)| { - let map = unsafe { - Builder::new() - .map::, _>(&p) - .unwrap() - }; - - for (p, val) in data { - assert_eq!(map.get(0, &p).unwrap().value(), &val); - } - assert_eq!(map.get(0, &rp1).unwrap().value(), "rp1"); - assert_eq!(map.get(0, &rp2).unwrap().value(), "rp2"); - }, - insert_batch_with_value_builder |p, (rp1, data, rp2)| { - let map = unsafe { - Builder::new() - .map::, _>(&p) - .unwrap() - }; - - for (p, val) in data { - assert_eq!(map.get(0, &p).unwrap().value(), &val); - } - assert_eq!(map.get(0, &rp1).unwrap().value(), "rp1"); - assert_eq!(map.get(0, &rp2).unwrap().value(), "rp2"); - }, - insert_batch_with_builders |p, (rp1, data, rp2)| { - let map = unsafe { - Builder::new() - .map::, _>(&p) - .unwrap() - }; - - for (p, val) in data { - assert_eq!(map.get(0, &p).unwrap().value(), &val); - } - assert_eq!(map.get(0, &rp1).unwrap().value(), "rp1"); - assert_eq!(map.get(0, &rp2).unwrap().value(), "rp2"); - } - } -); diff --git a/src/swmr/tests/multiple_version_iters.rs b/src/swmr/tests/multiple_version_iters.rs deleted file mode 100644 index 5b4a0743..00000000 --- a/src/swmr/tests/multiple_version_iters.rs +++ /dev/null @@ -1,556 +0,0 @@ -use core::ops::Bound; - -use crate::memtable::{ - alternative::{MultipleVersionTable, TableOptions}, - MultipleVersionMemtable, VersionedMemtableEntry, -}; -use multiple_version::{Reader, Writer}; - -use super::*; - -#[cfg(feature = "std")] -expand_unit_tests!("linked": MultipleVersionOrderWalAlternativeTable [TableOptions::Linked]: MultipleVersionTable<_, _> { - iter_all_versions_mvcc, -}); - -expand_unit_tests!("arena": MultipleVersionOrderWalAlternativeTable [TableOptions::Arena(Default::default())]: MultipleVersionTable<_, _> { - iter_all_versions_mvcc, -}); - -#[cfg(feature = "std")] -expand_unit_tests!("linked": MultipleVersionOrderWalAlternativeTable [TableOptions::Linked]: MultipleVersionTable<_, _> { - iter_next, - iter_all_versions_next_by_entry, - iter_all_versions_next_by_versioned_entry, - range_next, - iter_prev, - range_prev, - iter_all_versions_prev_by_entry, - iter_all_versions_prev_by_versioned_entry, -}); - -macro_rules! arena_builder { - () => {{ - crate::Builder::new() - .with_memtable_options( - crate::memtable::arena::TableOptions::new() - .with_capacity(1024 * 1024) - .into(), - ) - .with_capacity(8 * 1024) - }}; -} - -expand_unit_tests!("arena": MultipleVersionOrderWalAlternativeTable [TableOptions::Arena(Default::default())]: MultipleVersionTable<_, _> { - iter_next(arena_builder!()), - iter_all_versions_next_by_entry(arena_builder!()), - iter_all_versions_next_by_versioned_entry(arena_builder!()), - range_next(arena_builder!()), - iter_prev(arena_builder!()), - range_prev(arena_builder!()), - iter_all_versions_prev_by_entry(arena_builder!()), - iter_all_versions_prev_by_versioned_entry(arena_builder!()), -}); - -fn make_int_key(i: usize) -> String { - ::std::format!("{:05}", i) -} - -fn make_value(i: usize) -> String { - ::std::format!("v{:05}", i) -} - -fn iter_all_versions_mvcc(wal: &mut multiple_version::OrderWal) -where - M: MultipleVersionMemtable + 'static, - M::Error: std::fmt::Debug, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a> + std::fmt::Debug, -{ - wal.insert(1, "a", "a1").unwrap(); - wal.insert(3, "a", "a2").unwrap(); - wal.insert(1, "c", "c1").unwrap(); - wal.insert(3, "c", "c2").unwrap(); - - let mut iter = wal.iter_all_versions(0); - let mut num = 0; - while iter.next().is_some() { - num += 1; - } - assert_eq!(num, 0); - - let mut iter = wal.iter_all_versions(1); - let mut num = 0; - while iter.next().is_some() { - num += 1; - } - assert_eq!(num, 2); - - let mut iter = wal.iter_all_versions(2); - let mut num = 0; - while iter.next().is_some() { - num += 1; - } - assert_eq!(num, 2); - - let mut iter = wal.iter_all_versions(3); - let mut num = 0; - while iter.next().is_some() { - num += 1; - } - assert_eq!(num, 4); - - let upper_bound = wal.upper_bound(1, Bound::Included("b")).unwrap(); - assert_eq!(upper_bound.value(), "a1"); - - let upper_bound = wal.upper_bound_versioned(1, Bound::Included("b")).unwrap(); - assert_eq!(upper_bound.value().unwrap(), "a1"); - - let upper_bound = unsafe { wal.upper_bound_by_bytes(1, Bound::Included(b"b")).unwrap() }; - assert_eq!(upper_bound.value(), "a1"); - - let upper_bound = unsafe { - wal - .upper_bound_versioned_by_bytes(1, Bound::Included(b"b")) - .unwrap() - }; - assert_eq!(upper_bound.value().unwrap(), "a1"); - - let lower_bound = wal.lower_bound(1, Bound::Included("b")).unwrap(); - assert_eq!(lower_bound.value(), "c1"); - - let lower_bound = wal.lower_bound_versioned(1, Bound::Included("b")).unwrap(); - assert_eq!(lower_bound.value().unwrap(), "c1"); - - let lower_bound = unsafe { wal.lower_bound_by_bytes(1, Bound::Included(b"b")).unwrap() }; - assert_eq!(lower_bound.value(), "c1"); - - let lower_bound = unsafe { - wal - .lower_bound_versioned_by_bytes(1, Bound::Included(b"b")) - .unwrap() - }; - assert_eq!(lower_bound.value().unwrap(), "c1"); -} - -fn iter_next(wal: &mut multiple_version::OrderWal) -where - M: MultipleVersionMemtable + 'static, - M::Error: std::fmt::Debug, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a> + std::fmt::Debug, -{ - const N: usize = 100; - - for i in (0..N).rev() { - wal.insert(0, &make_int_key(i), &make_value(i)).unwrap(); - } - - let iter = wal.iter_all_versions(0); - - let mut i = 0; - for ent in iter { - assert_eq!(ent.key(), make_int_key(i).as_str()); - assert_eq!(ent.raw_key(), make_int_key(i).as_bytes()); - assert_eq!(ent.value().unwrap(), make_value(i).as_str()); - assert_eq!(ent.raw_value().unwrap(), make_value(i).as_bytes()); - i += 1; - } - - assert_eq!(i, N); - - let iter = wal.iter(0); - let mut i = 0; - for ent in iter { - assert_eq!(ent.key(), make_int_key(i).as_str()); - assert_eq!(ent.raw_key(), make_int_key(i).as_bytes()); - assert_eq!(ent.value(), make_value(i).as_str()); - assert_eq!(ent.raw_value(), make_value(i).as_bytes()); - i += 1; - } - - assert_eq!(i, N); - - let iter = wal.values(0); - - let mut i = 0; - for ent in iter { - assert_eq!(ent.value(), make_value(i).as_str()); - assert_eq!(ent.raw_value(), make_value(i).as_bytes()); - i += 1; - } - - assert_eq!(i, N); - - let iter = wal.keys(0); - let mut i = 0; - for ent in iter { - assert_eq!(ent.key(), make_int_key(i).as_str()); - assert_eq!(ent.raw_key(), make_int_key(i).as_bytes()); - i += 1; - } - - assert_eq!(i, N); -} - -fn iter_all_versions_next_by_entry(wal: &mut multiple_version::OrderWal) -where - M: MultipleVersionMemtable + 'static, - M::Error: std::fmt::Debug, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a> + std::fmt::Debug, -{ - const N: usize = 100; - - for i in (0..N).rev() { - wal.insert(0, &make_int_key(i), &make_value(i)).unwrap(); - } - - let mut ent = wal.first(0).clone(); - #[cfg(feature = "std")] - std::println!("{ent:?}"); - let mut i = 0; - while let Some(ref mut entry) = ent { - assert_eq!(entry.key(), make_int_key(i).as_str()); - assert_eq!(entry.value(), make_value(i).as_str()); - ent = entry.next(); - i += 1; - } - assert_eq!(i, N); - - let mut ent = wal.keys(0).next().clone(); - #[cfg(feature = "std")] - std::println!("{ent:?}"); - - let mut i = 0; - while let Some(ref mut entry) = ent { - assert_eq!(entry.key(), make_int_key(i).as_str()); - ent = entry.next(); - i += 1; - } - assert_eq!(i, N); - - let mut ent = wal.values(0).next().clone(); - #[cfg(feature = "std")] - std::println!("{ent:?}"); - - let mut i = 0; - while let Some(ref mut entry) = ent { - assert_eq!(entry.value(), make_value(i).as_str()); - ent = entry.next(); - i += 1; - } - assert_eq!(i, N); -} - -fn iter_all_versions_next_by_versioned_entry( - wal: &mut multiple_version::OrderWal, -) where - M: MultipleVersionMemtable + 'static, - M::Error: std::fmt::Debug, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a> + std::fmt::Debug, - for<'a> M::VersionedItem<'a>: std::fmt::Debug, -{ - const N: usize = 100; - - for i in 0..N { - let k = make_int_key(i); - let v = make_value(i); - wal.insert(0, &k, &v).unwrap(); - wal.remove(1, &k).unwrap(); - } - - let mut ent = wal.first(0).clone(); - let mut i = 0; - while let Some(ref mut entry) = ent { - assert_eq!(entry.key(), make_int_key(i).as_str()); - assert_eq!(entry.value(), make_value(i).as_str()); - ent = entry.next(); - i += 1; - } - assert_eq!(i, N); - - let mut ent = wal.first_versioned(1).clone(); - #[cfg(feature = "std")] - std::println!("{ent:?}"); - let mut i = 0; - while let Some(ref mut entry) = ent { - if i % 2 == 1 { - assert_eq!(entry.version(), 0); - assert_eq!(entry.key(), make_int_key(i / 2).as_str()); - assert_eq!(entry.value().unwrap(), make_value(i / 2).as_str()); - } else { - assert_eq!(entry.version(), 1); - assert_eq!(entry.key(), make_int_key(i / 2).as_str()); - assert!(entry.value().is_none()); - } - - ent = entry.next(); - i += 1; - } - assert_eq!(i, N * 2); - let ent = wal.first(1); - assert!(ent.is_none()); -} - -fn range_next(wal: &mut multiple_version::OrderWal) -where - M: MultipleVersionMemtable + 'static, - M::Error: std::fmt::Debug, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a> + std::fmt::Debug, -{ - const N: usize = 100; - - for i in (0..N).rev() { - wal.insert(0, &make_int_key(i), &make_value(i)).unwrap(); - } - - let upper = make_int_key(50); - let mut i = 0; - let mut iter = wal.range(0, ..=upper.as_str()); - for ent in &mut iter { - assert_eq!(ent.key(), make_int_key(i).as_str()); - assert_eq!(ent.raw_key(), make_int_key(i).as_bytes()); - assert_eq!(ent.value(), make_value(i).as_str()); - assert_eq!(ent.raw_value(), make_value(i).as_bytes()); - i += 1; - } - - assert_eq!(i, 51); - - let mut i = 0; - let mut iter = wal.range_all_versions(0, ..=upper.as_str()); - for ent in &mut iter { - assert_eq!(ent.key(), make_int_key(i).as_str()); - assert_eq!(ent.raw_key(), make_int_key(i).as_bytes()); - assert_eq!(ent.value().unwrap(), make_value(i).as_str()); - assert_eq!(ent.raw_value().unwrap(), make_value(i).as_bytes()); - i += 1; - } - - assert_eq!(i, 51); - - let mut i = 0; - let mut iter = wal.range_keys(0, ..=upper.as_str()); - for ent in &mut iter { - assert_eq!(ent.key(), make_int_key(i).as_str()); - assert_eq!(ent.raw_key(), make_int_key(i).as_bytes()); - i += 1; - } - - assert_eq!(i, 51); - - let mut i = 0; - let mut iter = wal.range_values(0, ..=upper.as_str()); - for ent in &mut iter { - assert_eq!(ent.value(), make_value(i).as_str()); - assert_eq!(ent.raw_value(), make_value(i).as_bytes()); - i += 1; - } - assert_eq!(i, 51); -} - -fn iter_prev(wal: &mut multiple_version::OrderWal) -where - M: MultipleVersionMemtable + 'static, - M::Error: std::fmt::Debug, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a> + std::fmt::Debug, -{ - const N: usize = 100; - - for i in 0..N { - wal.insert(0, &make_int_key(i), &make_value(i)).unwrap(); - } - - let iter = wal.iter_all_versions(0).rev(); - let mut i = N; - for ent in iter { - assert_eq!(ent.key(), make_int_key(i - 1).as_str()); - assert_eq!(ent.value().unwrap(), make_value(i - 1).as_str()); - i -= 1; - } - - assert_eq!(i, 0); - - let iter = wal.iter(0).rev(); - let mut i = N; - for ent in iter { - assert_eq!(ent.key(), make_int_key(i - 1).as_str()); - assert_eq!(ent.value(), make_value(i - 1).as_str()); - i -= 1; - } - - assert_eq!(i, 0); - - let iter = wal.values(0).rev(); - let mut i = N; - for ent in iter { - assert_eq!(ent.value(), make_value(i - 1).as_str()); - i -= 1; - } - - assert_eq!(i, 0); - - let iter = wal.keys(0).rev(); - let mut i = N; - for ent in iter { - assert_eq!(ent.key(), make_int_key(i - 1).as_str()); - i -= 1; - } - - assert_eq!(i, 0); -} - -fn iter_all_versions_prev_by_entry(wal: &mut multiple_version::OrderWal) -where - M: MultipleVersionMemtable + 'static, - M::Error: std::fmt::Debug, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a> + std::fmt::Debug, -{ - const N: usize = 100; - - for i in 0..N { - wal.insert(0, &make_int_key(i), &make_value(i)).unwrap(); - } - - let mut ent = wal.last(0); - - let mut i = 0; - while let Some(ref mut entry) = ent { - i += 1; - assert_eq!(entry.key(), make_int_key(N - i).as_str()); - assert_eq!(entry.value(), make_value(N - i).as_str()); - ent = entry.prev(); - } - assert_eq!(i, N); - - let mut ent = wal.values(0).next_back(); - - let mut i = 0; - while let Some(ref mut entry) = ent { - i += 1; - assert_eq!(entry.value(), make_value(N - i).as_str()); - ent = entry.prev(); - } - - assert_eq!(i, N); - - let mut ent = wal.keys(0).next_back(); - - let mut i = 0; - while let Some(ref mut entry) = ent { - i += 1; - assert_eq!(entry.key(), make_int_key(N - i).as_str()); - ent = entry.prev(); - } - - assert_eq!(i, N); -} - -fn iter_all_versions_prev_by_versioned_entry( - wal: &mut multiple_version::OrderWal, -) where - M: MultipleVersionMemtable + 'static, - M::Error: std::fmt::Debug, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a> + std::fmt::Debug, - for<'a> M::VersionedItem<'a>: VersionedMemtableEntry<'a> + std::fmt::Debug, -{ - const N: usize = 100; - - for i in 0..N { - let k = make_int_key(i); - let v = make_value(i); - wal.insert(0, &k, &v).unwrap(); - wal.remove(1, &k).unwrap(); - } - - let mut ent = wal.last(0); - let mut i = 0; - while let Some(ref mut entry) = ent { - i += 1; - assert_eq!(entry.key(), make_int_key(N - i).as_str()); - assert_eq!(entry.value(), make_value(N - i).as_str()); - ent = entry.prev(); - } - assert_eq!(i, N); - - let mut ent = wal.last_versioned(1); - let mut i = 0; - while let Some(ref mut entry) = ent { - if i % 2 == 0 { - assert_eq!(entry.version(), 0); - assert_eq!(entry.key(), make_int_key(N - 1 - i / 2).as_str()); - assert_eq!(entry.value().unwrap(), make_value(N - 1 - i / 2).as_str()); - } else { - assert_eq!(entry.version(), 1); - assert_eq!(entry.key(), make_int_key(N - 1 - i / 2).as_str()); - assert!(entry.value().is_none()); - } - - ent = entry.prev(); - i += 1; - } - - assert_eq!(i, N * 2); - let ent = wal.last(1); - assert!(ent.is_none()); -} - -fn range_prev(wal: &mut multiple_version::OrderWal) -where - M: MultipleVersionMemtable + 'static, - M::Error: std::fmt::Debug, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a> + std::fmt::Debug, -{ - const N: usize = 100; - - for i in 0..N { - wal.insert(0, &make_int_key(i), &make_value(i)).unwrap(); - } - - let lower = make_int_key(50); - let it = wal.range(0, lower.as_str()..).rev(); - let mut i = N - 1; - - for ent in it { - assert_eq!(ent.key(), make_int_key(i).as_str()); - assert_eq!(ent.raw_key(), make_int_key(i).as_bytes()); - assert_eq!(ent.value(), make_value(i).as_str()); - assert_eq!(ent.raw_value(), make_value(i).as_bytes()); - assert_eq!(ent.version(), 0); - i -= 1; - } - - assert_eq!(i, 49); - - let it = wal.range_all_versions(0, lower.as_str()..).rev(); - let mut i = N - 1; - - for ent in it { - assert_eq!(ent.key(), make_int_key(i).as_str()); - assert_eq!(ent.raw_key(), make_int_key(i).as_bytes()); - assert_eq!(ent.value().unwrap(), make_value(i).as_str()); - assert_eq!(ent.raw_value().unwrap(), make_value(i).as_bytes()); - assert_eq!(ent.version(), 0); - i -= 1; - } - - assert_eq!(i, 49); - - let mut i = N - 1; - let mut iter = wal.range_keys(0, lower.as_str()..).rev(); - for ent in &mut iter { - assert_eq!(ent.key(), make_int_key(i).as_str()); - assert_eq!(ent.raw_key(), make_int_key(i).as_bytes()); - assert_eq!(ent.version(), 0); - i -= 1; - } - assert_eq!(i, 49); - - let mut i = N - 1; - let mut iter = wal.range_values(0, lower.as_str()..).rev(); - for ent in &mut iter { - assert_eq!(ent.value(), make_value(i).as_str()); - assert_eq!(ent.raw_value(), make_value(i).as_bytes()); - assert_eq!(ent.version(), 0); - i -= 1; - } - assert_eq!(i, 49); -} diff --git a/src/swmr/wal.rs b/src/swmr/wal.rs index 5c5ce8cb..87cb3322 100644 --- a/src/swmr/wal.rs +++ b/src/swmr/wal.rs @@ -1,26 +1,14 @@ -use core::marker::PhantomData; - +use crate::{memtable::Memtable, Options}; use rarena_allocator::sync::Arena; -use crate::{memtable::BaseTable, sealed::Wal, Options}; - -pub struct OrderCore -where - K: ?Sized, - V: ?Sized, -{ +pub struct OrderCore { pub(super) arena: Arena, pub(super) map: M, pub(super) opts: Options, pub(super) cks: S, - pub(super) _m: PhantomData<(fn() -> K, fn() -> V)>, } -impl core::fmt::Debug for OrderCore -where - K: ?Sized, - V: ?Sized, -{ +impl core::fmt::Debug for OrderCore { #[inline] fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { f.debug_struct("OrderCore") @@ -30,48 +18,17 @@ where } } -impl Wal for OrderCore +impl OrderCore where - K: ?Sized, - V: ?Sized, - M: BaseTable, + M: Memtable, { - type Allocator = Arena; - type Memtable = M; - - #[inline] - fn memtable(&self) -> &Self::Memtable { - &self.map - } - - #[inline] - fn memtable_mut(&mut self) -> &mut Self::Memtable { - &mut self.map - } - #[inline] - fn construct(arena: Self::Allocator, set: Self::Memtable, opts: Options, checksumer: S) -> Self { + pub fn construct(arena: Arena, set: M, opts: Options, checksumer: S) -> Self { Self { arena, map: set, opts, cks: checksumer, - _m: PhantomData, } } - - #[inline] - fn options(&self) -> &Options { - &self.opts - } - - #[inline] - fn allocator(&self) -> &Self::Allocator { - &self.arena - } - - #[inline] - fn hasher(&self) -> &S { - &self.cks - } } diff --git a/src/swmr/writer.rs b/src/swmr/writer.rs index e1fe54b0..f6cb5722 100644 --- a/src/swmr/writer.rs +++ b/src/swmr/writer.rs @@ -1,84 +1,94 @@ -use crate::{ - memtable::{BaseTable, Memtable, MemtableEntry, MultipleVersionMemtable, VersionedMemtableEntry}, - sealed::{Constructable, WithVersion}, -}; -use dbutils::{checksum::Crc32, types::Type}; +use super::{reader::OrderWalReader, wal::OrderCore}; +use crate::{log::Log, memtable::Memtable}; +use dbutils::checksum::Crc32; use rarena_allocator::sync::Arena; +use triomphe::Arc; + #[cfg(all(feature = "memmap", not(target_family = "wasm")))] use rarena_allocator::Allocator; -use std::sync::Arc; - -use super::{reader::OrderWalReader, wal::OrderCore}; - /// A ordered write-ahead log implementation for concurrent thread environments. -pub struct OrderWal { - pub(super) core: Arc>, +pub struct OrderWal { + pub(crate) core: Arc>, } -impl core::fmt::Debug for OrderWal -where - K: ?Sized, - V: ?Sized, -{ +impl core::fmt::Debug for OrderWal { #[inline] fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { f.debug_tuple("OrderWal").field(&self.core).finish() } } -unsafe impl Send for OrderWal {} -unsafe impl Sync for OrderWal {} +unsafe impl Send for OrderWal {} +unsafe impl Sync for OrderWal {} -impl OrderWal { +impl OrderWal { #[inline] - pub(super) const fn construct(core: Arc>) -> Self { + pub(super) const fn from_core(core: Arc>) -> Self { Self { core } } } -impl Constructable for OrderWal +impl Log for OrderWal where - K: ?Sized + 'static, - V: ?Sized + 'static, S: 'static, - M: BaseTable + 'static, + M: Memtable + 'static, { type Allocator = Arena; - type Wal = OrderCore; type Memtable = M; type Checksumer = S; - type Reader = OrderWalReader; + type Reader = OrderWalReader; #[inline] - fn as_wal(&self) -> &Self::Wal { - &self.core + fn allocator<'a>(&'a self) -> &'a Self::Allocator + where + Self::Allocator: 'a, + { + &self.core.arena } #[inline] - fn from_core(core: Self::Wal) -> Self { + fn construct( + arena: Self::Allocator, + base: Self::Memtable, + opts: crate::Options, + checksumer: Self::Checksumer, + ) -> Self { Self { - core: Arc::new(core), + core: Arc::new(OrderCore::construct(arena, base, opts, checksumer)), } } + + #[inline] + fn options(&self) -> &crate::Options { + &self.core.opts + } + + #[inline] + fn memtable(&self) -> &Self::Memtable { + &self.core.map + } + + #[inline] + fn hasher(&self) -> &Self::Checksumer { + &self.core.cks + } } -impl OrderWal +impl OrderWal where - K: ?Sized + 'static, - V: ?Sized + 'static, S: 'static, - M: BaseTable + 'static, + M: Memtable + 'static, { /// Returns the path of the WAL if it is backed by a file. /// /// ## Example /// /// ```rust - /// use orderwal::{base::OrderWal, Builder}; + /// use orderwal::{generic::{OrderWal, BoundedTable}, Builder}; /// /// // A in-memory WAL - /// let wal = Builder::new().with_capacity(100).alloc::>().unwrap(); + /// let wal = Builder::new().with_capacity(100).alloc::>>().unwrap(); /// /// assert!(wal.path_buf().is_none()); /// ``` @@ -86,36 +96,6 @@ where #[cfg_attr(docsrs, doc(cfg(all(feature = "std", not(target_family = "wasm")))))] #[inline] pub fn path_buf(&self) -> Option<&std::sync::Arc> { - self.as_wal().arena.path() - } -} - -impl crate::wal::base::Writer for OrderWal -where - K: ?Sized + Type + Ord + 'static, - V: ?Sized + Type + 'static, - M: Memtable + 'static, - for<'a> M::Item<'a>: MemtableEntry<'a>, - S: 'static, -{ - #[inline] - fn reader(&self) -> Self::Reader { - OrderWalReader::new(self.core.clone()) - } -} - -impl crate::wal::multiple_version::Writer for OrderWal -where - K: ?Sized + Type + Ord + 'static, - V: ?Sized + Type + 'static, - M: MultipleVersionMemtable + 'static, - for<'a> M::Item<'a>: VersionedMemtableEntry<'a>, - for<'a> M::VersionedItem<'a>: WithVersion, - for<'a> M::Item<'a>: WithVersion, - S: 'static, -{ - #[inline] - fn reader(&self) -> Self::Reader { - OrderWalReader::new(self.core.clone()) + self.core.arena.path() } } diff --git a/src/types.rs b/src/types.rs index 7747b507..46070dba 100644 --- a/src/types.rs +++ b/src/types.rs @@ -1,42 +1,86 @@ -use dbutils::leb128::encoded_u64_varint_len; +use core::{ + marker::PhantomData, + mem, + ops::{Bound, RangeBounds}, +}; + +use dbutils::error::InsufficientBuffer; +use ref_cast::RefCast as _; +use sealed::Pointee; + +use crate::utils::split_lengths; + +use super::{CHECKSUM_SIZE, RECORD_FLAG_SIZE, VERSION_SIZE}; + pub use dbutils::{ - buffer::{BufWriter, BufWriterOnce, VacantBuffer}, - types::*, + buffer::{BufWriter, VacantBuffer}, + types::{Type, TypeRef}, }; -use crate::{utils::merge_lengths, CHECKSUM_SIZE, RECORD_FLAG_SIZE, VERSION_SIZE}; +mod mode; +mod raw; +pub(crate) use mode::sealed; +pub use mode::{Dynamic, Generic, Mode}; +pub(crate) use raw::*; -pub(crate) mod base; -pub(crate) mod multiple_version; +#[doc(hidden)] +#[derive(ref_cast::RefCast)] +#[repr(transparent)] +pub struct Query(pub(crate) Q); + +pub(crate) struct QueryRange { + r: R, + _m: PhantomData, +} + +impl From for QueryRange +where + R: RangeBounds, + Q: ?Sized, +{ + #[inline] + fn from(r: R) -> Self { + Self { r, _m: PhantomData } + } +} -const ENTRY_FLAGS_SIZE: usize = core::mem::size_of::(); +impl core::ops::RangeBounds> for QueryRange +where + R: RangeBounds, + Q: ?Sized, +{ + #[inline] + fn start_bound(&self) -> Bound<&Query> { + self.r.start_bound().map(Query::ref_cast) + } -/// The kind of the Write-Ahead Log. -/// -/// Currently, there are two kinds of Write-Ahead Log: -/// 1. Plain: The Write-Ahead Log is plain, which means it does not support multiple versions. -/// 2. MultipleVersion: The Write-Ahead Log supports multiple versions. -#[derive(Debug, PartialEq, Eq)] -#[repr(u8)] -#[non_exhaustive] -pub enum Kind { - /// The Write-Ahead Log is plain, which means it does not support multiple versions. - Plain = 0, - /// The Write-Ahead Log supports multiple versions. - MultipleVersion = 1, + #[inline] + fn end_bound(&self) -> Bound<&Query> { + self.r.end_bound().map(Query::ref_cast) + } } -#[cfg(all(feature = "memmap", not(target_family = "wasm")))] -impl TryFrom for Kind { - type Error = crate::error::UnknownKind; +#[doc(hidden)] +#[derive(ref_cast::RefCast)] +#[repr(transparent)] +pub struct RefQuery { + pub(crate) query: Q, +} +impl RefQuery { #[inline] - fn try_from(value: u8) -> Result { - Ok(match value { - 0 => Self::Plain, - 1 => Self::MultipleVersion, - _ => return Err(crate::error::UnknownKind(value)), - }) + pub const fn new(query: Q) -> Self { + Self { query } + } +} + +bitflags::bitflags! { + /// The flags for each atomic write. + pub(super) struct Flags: u8 { + /// First bit: 1 indicates committed, 0 indicates uncommitted + const COMMITTED = 0b00000001; + /// Second bit: 1 indicates batching, 0 indicates single entry + const BATCHING = 0b00000010; } } @@ -44,12 +88,22 @@ bitflags::bitflags! { /// The flags for each entry. #[derive(Debug, Copy, Clone)] pub struct EntryFlags: u8 { - /// First bit: 1 indicates removed - const REMOVED = 0b00000001; - /// Second bit: 1 indicates the key is pointer - const POINTER = 0b00000010; - /// Third bit: 1 indicates the entry contains a version - const VERSIONED = 0b00000100; + /// First bit: 1 indicates the entry is inserted within a batch + const BATCHING = 0b00000001; + /// Second bit: 1 indicates the key is pointer, the real key is stored in the offset contained in the RecordPointer. + const KEY_POINTER = 0b00000010; + /// Third bit: 1 indicates the value is pointer, the real value is stored in the offset contained in the ValuePointer. + const VALUE_POINTER = 0b00000100; + /// Fourth bit: 1 indicates the entry is a tombstone + const REMOVED = 0b00001000; + /// Fifth bit: 1 indicates the entry contains a version + const RANGE_DELETION = 0b00010000; + /// Sixth bit: 1 indicates the entry is range deletion + /// + /// [Reference link](https://github.com/cockroachdb/pebble/blob/master/docs/rocksdb.md#range-deletions) + const RANGE_SET = 0b00100000; + /// Seventh bit: 1 indicates the entry is range set + const RANGE_UNSET = 0b01000000; } } @@ -64,70 +118,85 @@ pub(crate) struct EncodedEntryMeta { pub(crate) entry_size: u32, pub(crate) klen: usize, pub(crate) vlen: usize, - pub(crate) versioned: bool, - batch: bool, + pub(crate) batch: bool, } impl EncodedEntryMeta { #[inline] - pub(crate) const fn new(key_len: usize, value_len: usize, versioned: bool) -> Self { - // Cast to u32 is safe, because we already checked those values before calling this function. - - let len = merge_lengths(key_len as u32, value_len as u32); - let len_size = encoded_u64_varint_len(len); - let version_size = if versioned { VERSION_SIZE } else { 0 }; - let elen = RECORD_FLAG_SIZE as u32 - + len_size as u32 - + ENTRY_FLAGS_SIZE as u32 - + version_size as u32 - + key_len as u32 - + value_len as u32 - + CHECKSUM_SIZE as u32; - + pub(crate) const fn placeholder() -> Self { Self { - packed_kvlen_size: len_size, - batch: false, - packed_kvlen: len, - entry_size: elen, - klen: key_len, - vlen: value_len, - versioned, + packed_kvlen_size: 0, + packed_kvlen: 0, + entry_size: 0, + klen: 0, + vlen: 0, + batch: true, } } #[inline] - pub(crate) const fn batch(key_len: usize, value_len: usize, versioned: bool) -> Self { - // Cast to u32 is safe, because we already checked those values before calling this function. + pub(crate) const fn entry_flag_offset(&self) -> usize { + if self.batch { + return 0; + } - let len = merge_lengths(key_len as u32, value_len as u32); - let len_size = encoded_u64_varint_len(len); - let version_size = if versioned { VERSION_SIZE } else { 0 }; - let elen = len_size as u32 - + EntryFlags::SIZE as u32 - + version_size as u32 - + key_len as u32 - + value_len as u32; + RECORD_FLAG_SIZE + } - Self { - packed_kvlen_size: len_size, - packed_kvlen: len, - entry_size: elen, - klen: key_len, - vlen: value_len, - versioned, - batch: true, + #[inline] + pub(crate) const fn version_offset(&self) -> usize { + self.entry_flag_offset() + EntryFlags::SIZE + } + + #[inline] + pub(crate) const fn key_offset(&self) -> usize { + self.version_offset() + VERSION_SIZE + self.packed_kvlen_size + } + + #[inline] + pub(crate) const fn value_offset(&self) -> usize { + self.key_offset() + self.klen + } + + #[inline] + pub(crate) const fn checksum_offset(&self) -> usize { + if self.batch { + self.value_offset() + self.vlen + } else { + self.entry_size as usize - CHECKSUM_SIZE } } +} +#[derive(Debug)] +pub(crate) struct EncodedRangeEntryMeta { + pub(crate) packed_kvlen_size: usize, + pub(crate) packed_kvlen: u64, + pub(crate) entry_size: u32, + pub(crate) range_key_len: u64, + pub(crate) range_key_len_size: usize, + pub(crate) total_range_key_size: usize, + /// Include Bound marker byte + pub(crate) start_key_len: usize, + /// Include Bound marker byte + pub(crate) end_key_len: usize, + pub(crate) vlen: usize, + pub(crate) batch: bool, +} + +impl EncodedRangeEntryMeta { #[inline] - pub(crate) const fn batch_zero(versioned: bool) -> Self { + pub(crate) const fn placeholder() -> Self { Self { packed_kvlen_size: 0, packed_kvlen: 0, entry_size: 0, - klen: 0, + range_key_len: 0, + range_key_len_size: 0, + total_range_key_size: 0, + start_key_len: 0, + end_key_len: 0, vlen: 0, - versioned, batch: true, } } @@ -135,29 +204,35 @@ impl EncodedEntryMeta { #[inline] pub(crate) const fn entry_flag_offset(&self) -> usize { if self.batch { - return self.packed_kvlen_size; + return 0; } - RECORD_FLAG_SIZE + self.packed_kvlen_size + RECORD_FLAG_SIZE } #[inline] pub(crate) const fn version_offset(&self) -> usize { - self.entry_flag_offset() + ENTRY_FLAGS_SIZE + self.entry_flag_offset() + EntryFlags::SIZE } #[inline] - pub(crate) const fn key_offset(&self) -> usize { - if self.versioned { - self.version_offset() + VERSION_SIZE - } else { - self.version_offset() - } + pub(crate) const fn start_key_offset(&self) -> usize { + self.range_key_offset() + self.range_key_len_size + } + + #[inline] + pub(crate) const fn end_key_offset(&self) -> usize { + self.start_key_offset() + self.start_key_len + } + + #[inline] + pub(crate) const fn range_key_offset(&self) -> usize { + self.version_offset() + VERSION_SIZE + self.packed_kvlen_size } #[inline] pub(crate) const fn value_offset(&self) -> usize { - self.key_offset() + self.klen + self.range_key_offset() + self.total_range_key_size } #[inline] @@ -197,3 +272,218 @@ dbutils::builder!( ); builder_ext!(ValueBuilder, KeyBuilder,); + +const U32_SIZE: usize = mem::size_of::(); + +/// The pointer to a record in the WAL. +#[derive(Debug, Clone, Copy)] +pub struct RecordPointer { + offset: u32, + len: u32, +} + +impl RecordPointer { + const SIZE: usize = mem::size_of::(); + + #[inline] + pub(crate) fn new(offset: u32, len: u32) -> Self { + Self { offset, len } + } + + /// Returns the offset of the record. + #[inline] + pub const fn offset(&self) -> usize { + self.offset as usize + } + + /// Returns the size of the record. + #[inline] + pub const fn size(&self) -> usize { + self.len as usize + } +} + +impl Type for RecordPointer { + type Ref<'a> = Self; + + type Error = InsufficientBuffer; + + #[inline] + fn encoded_len(&self) -> usize { + Self::SIZE + } + + #[inline] + fn encode_to_buffer(&self, buf: &mut VacantBuffer<'_>) -> Result { + buf + .put_u32_le(self.offset) + .and_then(|_| buf.put_u32_le(self.len)) + .map(|_| Self::SIZE) + } +} + +impl<'a> TypeRef<'a> for RecordPointer { + #[inline] + unsafe fn from_slice(src: &'a [u8]) -> Self { + let offset = u32::from_le_bytes(src[..U32_SIZE].try_into().unwrap()); + let len = u32::from_le_bytes(src[U32_SIZE..Self::SIZE].try_into().unwrap()); + Self { offset, len } + } +} + +/// A pointer points to a byte slice in the WAL. +pub struct Pointer { + offset: u32, + len: u32, +} + +impl Pointer { + /// The encoded size of the pointer. + pub const SIZE: usize = U32_SIZE * 2; + + #[inline] + pub(crate) const fn new(offset: u32, len: u32) -> Self { + Self { offset, len } + } + + /// Returns the offset to the underlying file of the pointer. + #[inline] + pub const fn offset(&self) -> usize { + self.offset as usize + } + + /// Returns the size of the byte slice of the pointer. + #[inline] + pub const fn size(&self) -> usize { + self.len as usize + } + + /// # Panics + /// Panics if the length of the slice is less than 8. + #[inline] + pub(crate) const fn from_slice(src: &[u8]) -> Self { + let offset = u32::from_le_bytes([src[0], src[1], src[2], src[3]]); + let len = u32::from_le_bytes([src[4], src[5], src[6], src[7]]); + Self { offset, len } + } +} + +/// A marker trait for the entry, which may have a value. +pub trait WithValue: BulkOperation {} + +/// The range operation. +pub trait BulkOperation: range_operation::Sealed {} + +impl BulkOperation for T {} + +mod range_operation { + use core::ops::Bound; + + use super::{ + sealed::RangeComparator, RawRangeRemoveRef, RawRangeUpdateRef, RecordPointer, Remove, Update, + }; + + pub trait Sealed: Send + Sync + 'static { + type Output<'a>; + + fn fetch<'a, C, RC>(cmp: &RC, rp: &RecordPointer) -> Self::Output<'a> + where + RC: RangeComparator; + + fn fmt( + output: &Self::Output<'_>, + wrapper_name: &'static str, + f: &mut core::fmt::Formatter<'_>, + ) -> core::fmt::Result; + + fn start_bound<'a>(output: &Self::Output<'a>) -> Bound<&'a [u8]>; + + fn end_bound<'a>(output: &Self::Output<'a>) -> Bound<&'a [u8]>; + + fn value<'a>(output: &Self::Output<'a>) -> Option<&'a [u8]> + where + Self: super::WithValue; + } + + impl Sealed for Update { + type Output<'a> = RawRangeUpdateRef<'a>; + + #[inline] + fn fetch<'a, C, RC>(cmp: &RC, rp: &RecordPointer) -> Self::Output<'a> + where + RC: RangeComparator, + { + cmp.fetch_range_update(rp) + } + + #[inline] + fn fmt( + output: &Self::Output<'_>, + wrapper_name: &'static str, + f: &mut core::fmt::Formatter<'_>, + ) -> core::fmt::Result { + output.write_fmt(wrapper_name, f) + } + + #[inline] + fn start_bound<'a>(output: &Self::Output<'a>) -> Bound<&'a [u8]> { + output.start_bound() + } + + #[inline] + fn end_bound<'a>(output: &Self::Output<'a>) -> Bound<&'a [u8]> { + output.end_bound() + } + + #[inline] + fn value<'a>(output: &Self::Output<'a>) -> Option<&'a [u8]> { + output.value() + } + } + + impl Sealed for Remove { + type Output<'a> = RawRangeRemoveRef<'a>; + + #[inline] + fn fetch<'a, C, RC>(cmp: &RC, rp: &RecordPointer) -> Self::Output<'a> + where + RC: crate::types::sealed::RangeComparator, + { + cmp.fetch_range_deletion(rp) + } + + #[inline] + fn fmt( + output: &Self::Output<'_>, + wrapper_name: &'static str, + f: &mut core::fmt::Formatter<'_>, + ) -> core::fmt::Result { + output.write_fmt(wrapper_name, f) + } + + #[inline] + fn start_bound<'a>(output: &Self::Output<'a>) -> Bound<&'a [u8]> { + output.start_bound() + } + + #[inline] + fn end_bound<'a>(output: &Self::Output<'a>) -> Bound<&'a [u8]> { + output.end_bound() + } + + #[inline] + fn value<'a>(_: &Self::Output<'a>) -> Option<&'a [u8]> { + None + } + } +} + +/// The range update operation. +#[derive(Debug, Default, Clone, Copy, Eq, PartialEq)] +pub struct Update; + +impl WithValue for Update {} + +/// The range remove operation. +#[derive(Debug, Default, Clone, Copy, Eq, PartialEq)] +pub struct Remove; diff --git a/src/types/base.rs b/src/types/base.rs deleted file mode 100644 index 1449d3ee..00000000 --- a/src/types/base.rs +++ /dev/null @@ -1,296 +0,0 @@ -use dbutils::types::{KeyRef, Type}; -use skl::LazyRef; - -use crate::{memtable::MemtableEntry, sealed::WithoutVersion}; - -/// The reference to an entry in the generic WALs. -pub struct Entry<'a, E> -where - E: MemtableEntry<'a>, - E::Key: Type, - E::Value: Type, -{ - ent: E, - key: LazyRef<'a, E::Key>, - value: LazyRef<'a, E::Value>, -} - -impl<'a, E> core::fmt::Debug for Entry<'a, E> -where - E: MemtableEntry<'a> + core::fmt::Debug, - E::Key: Type, - E::Value: Type, -{ - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.debug_struct("Entry") - .field("key", &self.key()) - .field("value", &self.value()) - .finish() - } -} - -impl<'a, E> Clone for Entry<'a, E> -where - E: MemtableEntry<'a> + Clone, - E::Key: Type, - E::Value: Type, -{ - #[inline] - fn clone(&self) -> Self { - Self { - ent: self.ent.clone(), - key: self.key.clone(), - value: self.value.clone(), - } - } -} - -impl<'a, E> Entry<'a, E> -where - E: MemtableEntry<'a> + WithoutVersion, - E::Key: Type, - E::Value: Type, -{ - #[inline] - pub(crate) fn new(ent: E) -> Self { - let raw_key = ent.key().as_slice(); - let raw_value = ent.value().as_slice(); - unsafe { - Self { - key: LazyRef::from_raw(raw_key), - value: LazyRef::from_raw(raw_value), - ent, - } - } - } -} - -impl<'a, E> Entry<'a, E> -where - E: MemtableEntry<'a> + WithoutVersion, - E::Key: Type + Ord, - ::Ref<'a>: KeyRef<'a, E::Key>, - E::Value: Type, -{ - /// Returns the next entry in the generic WALs. - /// - /// This does not move the cursor. - #[inline] - #[allow(clippy::should_implement_trait)] - pub fn next(&mut self) -> Option { - self.ent.next().map(Self::new) - } - - /// Returns the previous entry in the generic WALs. - /// - /// This does not move the cursor. - #[inline] - pub fn prev(&mut self) -> Option { - self.ent.prev().map(Self::new) - } -} - -impl<'a, E> Entry<'a, E> -where - E: MemtableEntry<'a>, - E::Key: Type, - E::Value: Type, -{ - /// Returns the key of the entry. - #[inline] - pub fn key(&self) -> &::Ref<'a> { - self.key.get() - } - - /// Returns the raw key of the entry. - #[inline] - pub fn raw_key(&self) -> &[u8] { - self.key.raw().expect("Entry's raw key cannot be None") - } - - /// Returns the value of the entry. - #[inline] - pub fn value(&self) -> &::Ref<'a> { - self.value.get() - } - - /// Returns the raw value of the entry. - #[inline] - pub fn raw_value(&self) -> &[u8] { - self.value.raw().expect("Entry's raw value cannot be None") - } -} - -/// The reference to a key of the entry in the generic WALs. -pub struct Key<'a, E> -where - E: MemtableEntry<'a>, - E::Key: Type, -{ - ent: E, - key: LazyRef<'a, E::Key>, -} - -impl<'a, E> core::fmt::Debug for Key<'a, E> -where - E: MemtableEntry<'a> + core::fmt::Debug, - E::Key: Type, -{ - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.debug_struct("Key").field("key", &self.key()).finish() - } -} - -impl<'a, E> Clone for Key<'a, E> -where - E: MemtableEntry<'a> + Clone, - E::Key: Type, -{ - #[inline] - fn clone(&self) -> Self { - Self { - ent: self.ent.clone(), - key: self.key.clone(), - } - } -} - -impl<'a, E> Key<'a, E> -where - E::Key: Type + Ord, - ::Ref<'a>: KeyRef<'a, E::Key>, - E: MemtableEntry<'a>, -{ - /// Returns the next entry in the generic WALs. - /// - /// This does not move the cursor. - #[inline] - #[allow(clippy::should_implement_trait)] - pub fn next(&mut self) -> Option { - self.ent.next().map(Self::new) - } - - /// Returns the previous entry in the generic WALs. - /// - /// This does not move the cursor. - #[inline] - pub fn prev(&mut self) -> Option { - self.ent.prev().map(Self::new) - } -} - -impl<'a, E> Key<'a, E> -where - E::Key: Type, - E: MemtableEntry<'a>, -{ - /// Returns the key of the entry. - #[inline] - pub fn key(&self) -> &::Ref<'a> { - self.key.get() - } - - /// Returns the raw key of the entry. - #[inline] - pub fn raw_key(&self) -> &[u8] { - self.key.raw().expect("Key's raw key cannot be None") - } - - #[inline] - pub(crate) fn new(ent: E) -> Self { - let raw_key = ent.key().as_slice(); - unsafe { - Self { - key: LazyRef::from_raw(raw_key), - ent, - } - } - } -} - -/// The reference to a value of the entry in the generic WALs. -pub struct Value<'a, E> -where - E::Value: Type, - E: MemtableEntry<'a>, -{ - ent: E, - raw_key: &'a [u8], - value: LazyRef<'a, E::Value>, -} - -impl<'a, E> core::fmt::Debug for Value<'a, E> -where - E: MemtableEntry<'a> + core::fmt::Debug, - E::Value: Type, - ::Ref<'a>: core::fmt::Debug, -{ - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.debug_struct("Value") - .field("value", &self.value()) - .finish() - } -} - -impl<'a, E> Clone for Value<'a, E> -where - E: MemtableEntry<'a> + Clone, - E::Value: Type, -{ - #[inline] - fn clone(&self) -> Self { - Self { - ent: self.ent.clone(), - raw_key: self.raw_key, - value: self.value.clone(), - } - } -} - -impl<'a, E> Value<'a, E> -where - E: MemtableEntry<'a>, - E::Value: Type, -{ - #[inline] - pub(crate) fn new(ent: E) -> Self { - let raw_key = ent.key().as_slice(); - let raw_value = ent.value().as_slice(); - unsafe { - Self { - raw_key, - value: LazyRef::from_raw(raw_value), - ent, - } - } - } - - /// Returns the next entry in the generic WALs. - /// - /// This does not move the cursor. - #[inline] - #[allow(clippy::should_implement_trait)] - pub fn next(&mut self) -> Option { - self.ent.next().map(Self::new) - } - - /// Returns the previous entry in the generic WALs. - /// - /// This does not move the cursor. - #[inline] - pub fn prev(&mut self) -> Option { - self.ent.prev().map(Self::new) - } - - /// Returns the value of the entry. - #[inline] - pub fn value(&self) -> &::Ref<'a> { - self.value.get() - } - - /// Returns the raw value of the entry. - #[inline] - pub fn raw_value(&self) -> &[u8] { - self.value.raw().expect("Value's raw value cannot be None") - } -} diff --git a/src/types/mode.rs b/src/types/mode.rs new file mode 100644 index 00000000..eeaa3386 --- /dev/null +++ b/src/types/mode.rs @@ -0,0 +1,133 @@ +#![allow(dead_code)] + +/// The type mode of the memtable, either dynamic key-value or generic structured key-value. +pub trait Mode: sealed::Sealed {} + +#[doc(hidden)] +#[derive(Copy, Clone)] +pub struct Dynamic; + +#[doc(hidden)] +pub struct Generic(core::marker::PhantomData<(fn() -> K, fn() -> V)>); + +impl Clone for Generic +where + K: ?Sized, + V: ?Sized, +{ + fn clone(&self) -> Self { + *self + } +} + +impl Copy for Generic +where + K: ?Sized, + V: ?Sized, +{ +} + +pub(crate) mod sealed { + use dbutils::types::{LazyRef, Type}; + + use super::{ + super::{RawEntryRef, RawRangeRemoveRef, RawRangeUpdateRef, RecordPointer}, + Dynamic, Generic, Mode, + }; + + pub trait ComparatorConstructor: Sized { + fn new(ptr: *const u8, cmp: triomphe::Arc) -> Self; + } + + pub trait PointComparator: ComparatorConstructor { + fn fetch_entry<'a>(&self, kp: &RecordPointer) -> RawEntryRef<'a>; + } + + pub trait RangeComparator: ComparatorConstructor { + fn fetch_range_update<'a>(&self, kp: &RecordPointer) -> RawRangeUpdateRef<'a>; + + fn fetch_range_deletion<'a>(&self, kp: &RecordPointer) -> RawRangeRemoveRef<'a>; + } + + pub trait Pointee<'a> { + type Input; + type Output: Copy + core::fmt::Debug; + + fn from_input(input: Self::Input) -> Self; + + fn input(&self) -> Self::Input; + + fn output(&self) -> Self::Output; + } + + impl<'a> Pointee<'a> for &'a [u8] { + type Input = Self; + type Output = Self; + + #[inline] + fn from_input(input: Self::Input) -> Self { + input + } + + #[inline] + fn input(&self) -> Self::Input { + self + } + + #[inline] + fn output(&self) -> Self::Output { + self + } + } + + impl<'a, T> Pointee<'a> for LazyRef<'a, T> + where + T: Type + ?Sized, + { + type Input = &'a [u8]; + type Output = T::Ref<'a>; + + #[inline] + fn from_input(input: Self::Input) -> Self { + unsafe { LazyRef::from_raw(input) } + } + + #[inline] + fn input(&self) -> Self::Input { + self.raw().unwrap() + } + + #[inline] + fn output(&self) -> Self::Output { + *self.get() + } + } + + pub trait Sealed: Copy { + type Key<'a>: Pointee<'a>; + type Value<'a>: Pointee<'a>; + + type Comparator: ComparatorConstructor; + type RangeComparator: ComparatorConstructor; + } + + impl Mode for T {} + + impl Sealed for Dynamic { + type Key<'a> = &'a [u8]; + type Value<'a> = &'a [u8]; + type Comparator = crate::memtable::dynamic::MemtableComparator; + type RangeComparator = crate::memtable::dynamic::MemtableRangeComparator; + } + + impl Sealed for Generic + where + K: Type + ?Sized, + V: Type + ?Sized, + { + type Key<'a> = LazyRef<'a, K>; + type Value<'a> = LazyRef<'a, V>; + type Comparator = crate::memtable::generic::MemtableComparator; + type RangeComparator = crate::memtable::generic::MemtableRangeComparator; + } +} diff --git a/src/types/multiple_version.rs b/src/types/multiple_version.rs deleted file mode 100644 index 3f98e760..00000000 --- a/src/types/multiple_version.rs +++ /dev/null @@ -1,525 +0,0 @@ -use dbutils::types::{KeyRef, Type}; -use skl::LazyRef; - -use crate::memtable::VersionedMemtableEntry; - -/// The reference to an entry in the generic WALs. -pub struct Entry<'a, E> -where - E: VersionedMemtableEntry<'a>, - E::Key: Type, - E::Value: Type, -{ - ent: E, - key: LazyRef<'a, E::Key>, - value: LazyRef<'a, E::Value>, - version: u64, - query_version: u64, -} - -impl<'a, E> core::fmt::Debug for Entry<'a, E> -where - E: VersionedMemtableEntry<'a> + core::fmt::Debug, - E::Key: Type, - E::Value: Type, -{ - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.debug_struct("Entry") - .field("key", &self.key()) - .field("value", &self.value()) - .field("version", &self.version) - .finish() - } -} - -impl<'a, E> Clone for Entry<'a, E> -where - E: VersionedMemtableEntry<'a> + Clone, - E::Key: Type, - E::Value: Type, -{ - #[inline] - fn clone(&self) -> Self { - Self { - ent: self.ent.clone(), - key: self.key.clone(), - value: self.value.clone(), - version: self.version, - query_version: self.query_version, - } - } -} - -impl<'a, E> Entry<'a, E> -where - E: VersionedMemtableEntry<'a>, - E::Key: Type, - E::Value: Type, -{ - #[inline] - pub(crate) fn with_version(ent: E, query_version: u64) -> Self { - let version = ent.version(); - let raw_key = ent.key().as_slice(); - let raw_value = ent - .value() - .expect("value must be present on Entry") - .as_slice(); - unsafe { - Self { - key: LazyRef::from_raw(raw_key), - value: LazyRef::from_raw(raw_value), - version, - query_version, - ent, - } - } - } -} - -impl<'a, E> Entry<'a, E> -where - E: VersionedMemtableEntry<'a>, - E::Key: Type + Ord, - ::Ref<'a>: KeyRef<'a, E::Key>, - E::Value: Type, -{ - /// Returns the next entry in the generic WALs. - /// - /// This does not move the cursor. - #[inline] - #[allow(clippy::should_implement_trait)] - pub fn next(&mut self) -> Option { - self - .ent - .next() - .map(|ent| Self::with_version(ent, self.query_version)) - } - - /// Returns the previous entry in the generic WALs. - /// - /// This does not move the cursor. - #[inline] - pub fn prev(&mut self) -> Option { - self - .ent - .prev() - .map(|ent| Self::with_version(ent, self.query_version)) - } -} - -impl<'a, E> Entry<'a, E> -where - E: VersionedMemtableEntry<'a>, - E::Key: Type, - E::Value: Type, -{ - /// Returns the version of the entry. - #[inline] - pub const fn version(&self) -> u64 { - self.version - } - - /// Returns the key of the entry. - #[inline] - pub fn key(&self) -> &::Ref<'a> { - self.key.get() - } - - /// Returns the raw key of the entry. - #[inline] - pub fn raw_key(&self) -> &'a [u8] { - self.key.raw().expect("Entry's raw key cannot be None") - } - - /// Returns the value of the entry. - #[inline] - pub fn value(&self) -> &::Ref<'a> { - self.value.get() - } - - /// Returns the raw value of the entry. - #[inline] - pub fn raw_value(&self) -> &'a [u8] { - self.value.raw().expect("Entry's raw value cannot be None") - } -} - -/// The reference to a key of the entry in the generic WALs. -pub struct Key<'a, E> -where - E: VersionedMemtableEntry<'a>, - E::Key: Type, -{ - ent: E, - key: LazyRef<'a, E::Key>, - version: u64, - query_version: u64, -} - -impl<'a, E> core::fmt::Debug for Key<'a, E> -where - E: VersionedMemtableEntry<'a> + core::fmt::Debug, - E::Key: Type, -{ - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.debug_struct("Key") - .field("key", &self.key()) - .field("version", &self.version) - .finish() - } -} - -impl<'a, E> Clone for Key<'a, E> -where - E: VersionedMemtableEntry<'a> + Clone, - E::Key: Type, -{ - #[inline] - fn clone(&self) -> Self { - Self { - ent: self.ent.clone(), - key: self.key.clone(), - version: self.version, - query_version: self.query_version, - } - } -} - -impl<'a, E> Key<'a, E> -where - E: VersionedMemtableEntry<'a>, - E::Key: Type, -{ - #[inline] - pub(crate) fn with_version(ent: E, query_version: u64) -> Self { - let raw_key = ent.key().as_slice(); - let version = ent.version(); - Self { - key: unsafe { LazyRef::from_raw(raw_key) }, - version, - query_version, - ent, - } - } -} - -impl<'a, E> Key<'a, E> -where - E::Key: Type + Ord, - ::Ref<'a>: KeyRef<'a, E::Key>, - E: VersionedMemtableEntry<'a>, -{ - /// Returns the next entry in the generic WALs. - /// - /// This does not move the cursor. - #[inline] - #[allow(clippy::should_implement_trait)] - pub fn next(&mut self) -> Option { - self - .ent - .next() - .map(|ent| Self::with_version(ent, self.query_version)) - } - - /// Returns the previous entry in the generic WALs. - /// - /// This does not move the cursor. - #[inline] - pub fn prev(&mut self) -> Option { - self - .ent - .prev() - .map(|ent| Self::with_version(ent, self.query_version)) - } -} - -impl<'a, E> Key<'a, E> -where - E::Key: Type, - E: VersionedMemtableEntry<'a>, -{ - /// Returns the version of the entry. - #[inline] - pub const fn version(&self) -> u64 { - self.version - } - - /// Returns the key of the entry. - #[inline] - pub fn key(&self) -> &::Ref<'a> { - self.key.get() - } - - /// Returns the raw key of the entry. - #[inline] - pub fn raw_key(&self) -> &'a [u8] { - self.key.raw().expect("Key's raw key cannot be None") - } -} - -/// The reference to a value of the entry in the generic WALs. -pub struct Value<'a, E> -where - E::Value: Type, - E: VersionedMemtableEntry<'a>, -{ - ent: E, - raw_key: &'a [u8], - value: LazyRef<'a, E::Value>, - version: u64, - query_version: u64, -} - -impl<'a, E> core::fmt::Debug for Value<'a, E> -where - E: VersionedMemtableEntry<'a> + core::fmt::Debug, - E::Value: Type, - ::Ref<'a>: core::fmt::Debug, -{ - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.debug_struct("Value") - .field("value", &self.value()) - .field("version", &self.version) - .finish() - } -} - -impl<'a, E> Clone for Value<'a, E> -where - E: VersionedMemtableEntry<'a> + Clone, - E::Value: Type, -{ - #[inline] - fn clone(&self) -> Self { - Self { - ent: self.ent.clone(), - raw_key: self.raw_key, - value: self.value.clone(), - version: self.version, - query_version: self.query_version, - } - } -} - -impl<'a, E> Value<'a, E> -where - E: VersionedMemtableEntry<'a>, - E::Value: Type, -{ - #[inline] - pub(crate) fn with_version(ent: E, query_version: u64) -> Self { - let raw_key = ent.key().as_slice(); - let raw_value = ent - .value() - .expect("value must be present on Value") - .as_slice(); - let version = ent.version(); - Self { - raw_key, - value: unsafe { LazyRef::from_raw(raw_value) }, - version, - query_version, - ent, - } - } -} - -impl<'a, E> Value<'a, E> -where - E: VersionedMemtableEntry<'a>, - E::Value: Type, -{ - /// Returns the next entry in the generic WALs. - /// - /// This does not move the cursor. - #[inline] - #[allow(clippy::should_implement_trait)] - pub fn next(&mut self) -> Option { - self - .ent - .next() - .map(|ent| Self::with_version(ent, self.query_version)) - } - - /// Returns the previous entry in the generic WALs. - /// - /// This does not move the cursor. - #[inline] - pub fn prev(&mut self) -> Option { - self - .ent - .prev() - .map(|ent| Self::with_version(ent, self.query_version)) - } -} - -impl<'a, E> Value<'a, E> -where - E: VersionedMemtableEntry<'a>, - E::Value: Type, -{ - /// Returns the version of the entry. - #[inline] - pub const fn version(&self) -> u64 { - self.version - } - - /// Returns the value of the entry. - #[inline] - pub fn value(&self) -> &::Ref<'a> { - self.value.get() - } - - /// Returns the raw value of the entry. - #[inline] - pub fn raw_value(&self) -> &'a [u8] { - self.value.raw().expect("Value's raw value cannot be None") - } -} - -/// The reference to an entry in the generic WALs. -pub struct VersionedEntry<'a, E> -where - E: VersionedMemtableEntry<'a>, - E::Key: Type, - E::Value: Type, -{ - ent: E, - key: LazyRef<'a, E::Key>, - value: Option>, - version: u64, - query_version: u64, -} - -impl<'a, E> core::fmt::Debug for VersionedEntry<'a, E> -where - E: VersionedMemtableEntry<'a> + core::fmt::Debug, - E::Key: Type, - E::Value: Type, -{ - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.debug_struct("VersionedEntry") - .field("key", &self.key()) - .field("value", &self.value()) - .field("version", &self.version) - .finish() - } -} - -impl<'a, E> Clone for VersionedEntry<'a, E> -where - E: VersionedMemtableEntry<'a> + Clone, - E::Key: Type, - E::Value: Type, -{ - #[inline] - fn clone(&self) -> Self { - Self { - ent: self.ent.clone(), - key: self.key.clone(), - value: self.value.clone(), - version: self.version, - query_version: self.query_version, - } - } -} - -impl<'a, E> VersionedEntry<'a, E> -where - E: VersionedMemtableEntry<'a>, - E::Key: Type, - E::Value: Type, -{ - #[inline] - pub(crate) fn with_version(ent: E, query_version: u64) -> Self { - let raw_key = ent.key().as_slice(); - let raw_value = ent.value().map(|v| v.as_slice()); - let version = ent.version(); - unsafe { - Self { - key: LazyRef::from_raw(raw_key), - value: raw_value.map(|v| LazyRef::from_raw(v)), - version, - query_version, - ent, - } - } - } -} - -impl<'a, E> VersionedEntry<'a, E> -where - E: VersionedMemtableEntry<'a>, - E::Key: Ord + Type, - for<'b> ::Ref<'b>: KeyRef<'b, E::Key>, - E::Value: Type, -{ - /// Returns the next entry in the generic WALs. - /// - /// This does not move the cursor. - #[inline] - #[allow(clippy::should_implement_trait)] - pub fn next(&mut self) -> Option { - self - .ent - .next() - .map(|ent| Self::with_version(ent, self.query_version)) - } - - /// Returns the previous entry in the generic WALs. - /// - /// This does not move the cursor. - #[inline] - pub fn prev(&mut self) -> Option { - self - .ent - .prev() - .map(|ent| Self::with_version(ent, self.query_version)) - } -} - -impl<'a, E> VersionedEntry<'a, E> -where - E: VersionedMemtableEntry<'a>, - E::Key: Type, - E::Value: Type, -{ - /// Returns the version of the entry. - #[inline] - pub const fn version(&self) -> u64 { - self.version - } - - /// Returns the key of the entry. - #[inline] - pub fn key(&self) -> &::Ref<'a> { - self.key.get() - } - - /// Returns the raw key of the entry. - #[inline] - pub fn raw_key(&self) -> &'a [u8] { - self - .key - .raw() - .expect("VersionedEntry's raw key cannot be None") - } - - /// Returns the value of the entry. - #[inline] - pub fn value(&self) -> Option<&::Ref<'a>> { - self.value.as_deref() - } - - /// Returns the raw value of the entry. - #[inline] - pub fn raw_value(&self) -> Option<&'a [u8]> { - match self.value.as_ref() { - None => None, - Some(v) => Some( - v.raw() - .expect("VersionedEntry's raw value cannot be None if value exists"), - ), - } - } -} diff --git a/src/types/raw.rs b/src/types/raw.rs new file mode 100644 index 00000000..e2a7bcfb --- /dev/null +++ b/src/types/raw.rs @@ -0,0 +1,525 @@ +#![allow(dead_code)] + +use core::{ops::Bound, slice}; + +use dbutils::leb128::decode_u64_varint; + +use crate::VERSION_SIZE; + +use super::{split_lengths, EntryFlags, Pointee, Pointer, RecordPointer}; + +#[derive(Clone, Copy)] +pub struct RawEntryRef<'a> { + flag: EntryFlags, + key: &'a [u8], + value: Option<&'a [u8]>, + version: u64, +} + +impl RawEntryRef<'_> { + #[inline] + pub(crate) fn write_fmt( + &self, + wrapper_name: &'static str, + f: &mut core::fmt::Formatter<'_>, + ) -> core::fmt::Result { + f.debug_struct(wrapper_name) + .field("flags", &self.flag) + .field("key", &self.key.output()) + .field("value", &self.value.as_ref().map(|v| v.output())) + .field("version", &self.version) + .finish() + } +} + +impl<'a> RawEntryRef<'a> { + #[inline] + pub const fn key(&self) -> &'a [u8] { + self.key + } + + #[inline] + pub const fn value(&self) -> Option<&'a [u8]> { + self.value + } + + #[inline] + pub const fn version(&self) -> u64 { + self.version + } +} + +#[derive(Clone, Copy)] +pub struct RawRangeUpdateRef<'a> { + flag: EntryFlags, + start_bound: Bound<&'a [u8]>, + end_bound: Bound<&'a [u8]>, + value: Option<&'a [u8]>, + version: u64, +} + +impl RawRangeUpdateRef<'_> { + #[inline] + pub(crate) fn write_fmt( + &self, + wrapper_name: &'static str, + f: &mut core::fmt::Formatter<'_>, + ) -> core::fmt::Result { + f.debug_struct(wrapper_name) + .field("flags", &self.flag) + .field("start_bound", &self.start_bound()) + .field("end_bound", &self.end_bound()) + .field("value", &self.value.as_ref().map(|v| v.output())) + .field("version", &self.version) + .finish() + } +} + +impl<'a> RawRangeUpdateRef<'a> { + #[inline] + pub const fn start_bound(&self) -> Bound<&'a [u8]> { + match &self.start_bound { + Bound::Unbounded => Bound::Unbounded, + Bound::Included(k) => Bound::Included(k), + Bound::Excluded(k) => Bound::Excluded(k), + } + } + + #[inline] + pub const fn end_bound(&self) -> Bound<&'a [u8]> { + match &self.end_bound { + Bound::Unbounded => Bound::Unbounded, + Bound::Included(k) => Bound::Included(k), + Bound::Excluded(k) => Bound::Excluded(k), + } + } + + #[inline] + pub const fn value(&self) -> Option<&'a [u8]> { + self.value + } + + #[inline] + pub const fn version(&self) -> u64 { + self.version + } +} + +#[derive(Clone, Copy)] +pub struct RawRangeRemoveRef<'a> { + flag: EntryFlags, + start_bound: Bound<&'a [u8]>, + end_bound: Bound<&'a [u8]>, + version: u64, +} + +impl RawRangeRemoveRef<'_> { + #[inline] + pub(crate) fn write_fmt( + &self, + wrapper_name: &'static str, + f: &mut core::fmt::Formatter<'_>, + ) -> core::fmt::Result { + f.debug_struct(wrapper_name) + .field("flags", &self.flag) + .field("start_bound", &self.start_bound()) + .field("end_bound", &self.end_bound()) + .field("version", &self.version) + .finish() + } +} + +impl<'a> RawRangeRemoveRef<'a> { + #[inline] + pub const fn start_bound(&self) -> Bound<&'a [u8]> { + match &self.start_bound { + Bound::Unbounded => Bound::Unbounded, + Bound::Included(k) => Bound::Included(k), + Bound::Excluded(k) => Bound::Excluded(k), + } + } + + #[inline] + pub const fn end_bound(&self) -> Bound<&'a [u8]> { + match &self.end_bound { + Bound::Unbounded => Bound::Unbounded, + Bound::Included(k) => Bound::Included(k), + Bound::Excluded(k) => Bound::Excluded(k), + } + } + + #[inline] + pub const fn version(&self) -> u64 { + self.version + } +} + +pub(crate) struct BoundedKey { + bound: Bound<()>, + pointer: bool, +} + +impl BoundedKey { + #[inline] + pub const fn new(bound: Bound<()>, pointer: bool) -> Self { + Self { bound, pointer } + } + + /// Decode a `u8` into a `BoundedKey`. + #[inline] + pub const fn decode(src: u8) -> Self { + let bound_bits = src & 0b11; // Extract the first 2 bits for `Bound` + let pointer_bit = (src & 0b100) != 0; // Extract the 3rd bit for `pointer` + + let bound = match bound_bits { + 0b00 => Bound::Unbounded, + 0b01 => Bound::Included(()), + 0b10 => Bound::Excluded(()), + _ => panic!("Invalid bound encoding"), + }; + + Self { + bound, + pointer: pointer_bit, + } + } + + /// Encode the `BoundedKey` into a `u8`. + #[inline] + pub const fn encode(&self) -> u8 { + let bound_bits = match self.bound { + Bound::Unbounded => 0b00, + Bound::Included(()) => 0b01, + Bound::Excluded(()) => 0b10, + }; + + let pointer_bit = if self.pointer { 0b100 } else { 0 }; + + bound_bits | pointer_bit + } + + #[inline] + pub const fn encoded_size() -> usize { + 1 + } + + #[inline] + pub const fn pointer(&self) -> bool { + self.pointer + } + + #[inline] + pub const fn bound(&self) -> Bound<()> { + self.bound + } +} + +/// # Safety +/// - `data_ptr` must be a valid pointer to the data. +/// - `kp` must be pointing to key which is stored in the data_ptr. +#[inline] +pub(crate) unsafe fn fetch_raw_key<'a>(data_ptr: *const u8, kp: &RecordPointer) -> (u64, &'a [u8]) { + let entry_buf = slice::from_raw_parts(data_ptr.add(kp.offset()), kp.size()); + let flag = EntryFlags::from_bits_retain(entry_buf[0]); + debug_assert!( + !(flag.contains(EntryFlags::RANGE_SET) + | flag.contains(EntryFlags::RANGE_DELETION) + | flag.contains(EntryFlags::RANGE_UNSET)), + "unexpected range key" + ); + + let (mut cursor, version) = { + let version = u64::from_le_bytes( + entry_buf[EntryFlags::SIZE..EntryFlags::SIZE + VERSION_SIZE] + .try_into() + .unwrap(), + ); + (1 + VERSION_SIZE, version) + }; + + let (readed, kvlen) = decode_u64_varint(&entry_buf[cursor..]).expect(""); + cursor += readed; + let (klen, _) = split_lengths(kvlen); + let k = &entry_buf[cursor..cursor + klen as usize]; + + if !flag.contains(EntryFlags::KEY_POINTER) { + return (version, k); + } + + let pointer = Pointer::from_slice(k); + let k = slice::from_raw_parts( + data_ptr.add(pointer.offset() as usize), + pointer.size() as usize, + ); + (version, k) +} + +#[inline] +pub(crate) unsafe fn fetch_entry<'a>(data_ptr: *const u8, p: &RecordPointer) -> RawEntryRef<'a> { + let entry_buf = slice::from_raw_parts(data_ptr.add(p.offset()), p.size()); + let flag = EntryFlags::from_bits_retain(entry_buf[0]); + + debug_assert!( + !(flag.contains(EntryFlags::RANGE_SET) + | flag.contains(EntryFlags::RANGE_DELETION) + | flag.contains(EntryFlags::RANGE_UNSET)), + "unexpected range entry" + ); + + let (mut cursor, version) = { + let version = u64::from_le_bytes( + entry_buf[EntryFlags::SIZE..EntryFlags::SIZE + VERSION_SIZE] + .try_into() + .unwrap(), + ); + (EntryFlags::SIZE + VERSION_SIZE, version) + }; + + let (readed, kvlen) = decode_u64_varint(&entry_buf[cursor..]).expect(""); + cursor += readed; + let (klen, vlen) = split_lengths(kvlen); + let k = if !flag.contains(EntryFlags::KEY_POINTER) { + &entry_buf[cursor..cursor + klen as usize] + } else { + let pointer = Pointer::from_slice(&entry_buf[cursor..cursor + klen as usize]); + slice::from_raw_parts( + data_ptr.add(pointer.offset() as usize), + pointer.size() as usize, + ) + }; + cursor += klen as usize; + + let v = if flag.contains(EntryFlags::REMOVED) { + None + } else { + let v = &entry_buf[cursor..cursor + vlen as usize]; + if flag.contains(EntryFlags::VALUE_POINTER) { + let pointer = Pointer::from_slice(v); + Some(slice::from_raw_parts( + data_ptr.add(pointer.offset() as usize), + pointer.size() as usize, + )) + } else { + Some(v) + } + }; + + RawEntryRef { + flag, + key: k, + value: v, + version, + } +} + +/// # Safety +/// - `data_ptr` must be a valid pointer to the data. +/// - `kp` must be pointing to value which is stored in the data_ptr. +#[inline] +pub(crate) unsafe fn fetch_raw_range_key_start_bound<'a>( + data_ptr: *const u8, + kp: &RecordPointer, +) -> Bound<&'a [u8]> { + let entry_buf = slice::from_raw_parts(data_ptr.add(kp.offset()), kp.size()); + let flag = EntryFlags::from_bits_retain(entry_buf[0]); + + debug_assert!( + flag.contains(EntryFlags::RANGE_SET) + | flag.contains(EntryFlags::RANGE_DELETION) + | flag.contains(EntryFlags::RANGE_UNSET), + "unexpected point key" + ); + + let mut cursor = EntryFlags::SIZE + VERSION_SIZE; + + let (readed, kvlen) = + decode_u64_varint(&entry_buf[cursor..]).expect("kvlen should be decoded without error"); + cursor += readed; + let (klen, _) = split_lengths(kvlen); + + let mut range_key_buf = &entry_buf[cursor..cursor + klen as usize]; + + let (readed, range_key_len) = + decode_u64_varint(range_key_buf).expect("range key len should be decoded without error"); + range_key_buf = &range_key_buf[readed..]; + let (start_key_len, _) = split_lengths(range_key_len); + let start_key_buf = &range_key_buf[..start_key_len as usize]; + + let start_bound = BoundedKey::decode(start_key_buf[0]); + let raw_start_key = &start_key_buf[1..]; + let start_key = if start_bound.pointer() { + let pointer = Pointer::from_slice(raw_start_key); + let key = slice::from_raw_parts( + data_ptr.add(pointer.offset() as usize), + pointer.size() as usize, + ); + key + } else { + raw_start_key + }; + start_bound.bound().map(|_| start_key) +} + +struct FetchRangeKey<'a> { + flag: EntryFlags, + start_bound: Bound<&'a [u8]>, + end_bound: Bound<&'a [u8]>, + version: u64, + value: Option, +} + +/// # Safety +/// - `data_ptr` must be a valid pointer to the data. +/// - `kp` must be pointing to value which is stored in the data_ptr. +#[inline] +unsafe fn fetch_raw_range_key_helper<'a>( + data_ptr: *const u8, + kp: &RecordPointer, + f: impl FnOnce(&EntryFlags), +) -> FetchRangeKey<'a> { + let entry_buf = slice::from_raw_parts(data_ptr.add(kp.offset()), kp.size()); + let flag = EntryFlags::from_bits_retain(entry_buf[0]); + + #[cfg(debug_assertions)] + f(&flag); + + let (mut cursor, version) = { + let version = u64::from_le_bytes( + entry_buf[EntryFlags::SIZE..EntryFlags::SIZE + VERSION_SIZE] + .try_into() + .unwrap(), + ); + (EntryFlags::SIZE + VERSION_SIZE, version) + }; + + let (readed, kvlen) = + decode_u64_varint(&entry_buf[cursor..]).expect("kvlen should be decoded without error"); + cursor += readed; + let (klen, vlen) = split_lengths(kvlen); + + let mut range_key_buf = &entry_buf[cursor..cursor + klen as usize]; + cursor += klen as usize; + + let (readed, range_key_len) = + decode_u64_varint(range_key_buf).expect("range key len should be decoded without error"); + range_key_buf = &range_key_buf[readed..]; + let (start_key_len, end_key_len) = split_lengths(range_key_len); + let start_key_buf = &range_key_buf[..start_key_len as usize]; + let end_key_buf = + &range_key_buf[start_key_len as usize..start_key_len as usize + end_key_len as usize]; + + let start_bound = BoundedKey::decode(start_key_buf[0]); + let raw_start_key = &start_key_buf[1..]; + let start_key = if start_bound.pointer() { + let pointer = Pointer::from_slice(raw_start_key); + let key = slice::from_raw_parts( + data_ptr.add(pointer.offset() as usize), + pointer.size() as usize, + ); + key + } else { + raw_start_key + }; + let start_bound = start_bound.bound().map(|_| start_key); + + let end_bound = BoundedKey::decode(end_key_buf[0]); + let raw_end_key = &end_key_buf[1..]; + let end_key = if end_bound.pointer() { + let pointer = Pointer::from_slice(raw_end_key); + let key = slice::from_raw_parts( + data_ptr.add(pointer.offset() as usize), + pointer.size() as usize, + ); + key + } else { + raw_end_key + }; + let end_bound = end_bound.bound().map(|_| end_key); + + let value = if flag.contains(EntryFlags::RANGE_SET) { + Some(Pointer::new(kp.offset + cursor as u32, vlen)) + } else { + None + }; + + FetchRangeKey { + flag, + start_bound, + end_bound, + value, + version, + } +} + +/// # Safety +/// - `data_ptr` must be a valid pointer to the data. +/// - `kp` must be pointing to value which is stored in the data_ptr. +#[inline] +pub(crate) unsafe fn fetch_raw_range_deletion_entry<'a>( + data_ptr: *const u8, + kp: &RecordPointer, +) -> RawRangeRemoveRef<'a> { + let FetchRangeKey { + flag, + version, + start_bound, + end_bound, + .. + } = fetch_raw_range_key_helper(data_ptr, kp, |flag| { + debug_assert!( + flag.contains(EntryFlags::RANGE_DELETION), + "expected range deletion entry" + ) + }); + + RawRangeRemoveRef { + flag, + start_bound, + end_bound, + version, + } +} + +/// # Safety +/// - `data_ptr` must be a valid pointer to the data. +/// - `kp` must be pointing to value which is stored in the data_ptr. +#[inline] +pub(crate) unsafe fn fetch_raw_range_update_entry<'a>( + data_ptr: *const u8, + kp: &RecordPointer, +) -> RawRangeUpdateRef<'a> { + let FetchRangeKey { + flag, + version, + start_bound, + end_bound, + value, + .. + } = fetch_raw_range_key_helper(data_ptr, kp, |flag| { + debug_assert!( + flag.contains(EntryFlags::RANGE_DELETION), + "expected range deletion entry" + ) + }); + + let value = value.map(|pointer| { + let v = slice::from_raw_parts(data_ptr.add(pointer.offset()), pointer.size()); + if !flag.contains(EntryFlags::VALUE_POINTER) { + let pointer = Pointer::from_slice(v); + slice::from_raw_parts( + data_ptr.add(pointer.offset() as usize), + pointer.size() as usize, + ) + } else { + v + } + }); + + RawRangeUpdateRef { + flag, + start_bound, + end_bound, + value, + version, + } +} diff --git a/src/utils.rs b/src/utils.rs index 80efd282..46a57818 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -14,7 +14,6 @@ pub(crate) const fn merge_lengths(a: u32, b: u32) -> u64 { /// - high 32 bits: the first `u32` /// - low 32 bits: the second `u32` #[inline] -#[cfg(all(feature = "memmap", not(target_family = "wasm")))] pub(crate) const fn split_lengths(len: u64) -> (u32, u32) { ((len >> 32) as u32, len as u32) } diff --git a/src/wal.rs b/src/wal.rs deleted file mode 100644 index 8536da55..00000000 --- a/src/wal.rs +++ /dev/null @@ -1,9 +0,0 @@ -pub(crate) mod base; -pub(crate) mod iter; -pub(crate) mod multiple_version; - -mod query; -pub(crate) use query::*; - -mod pointer; -pub use pointer::*; diff --git a/src/wal/base.rs b/src/wal/base.rs deleted file mode 100644 index 4709478d..00000000 --- a/src/wal/base.rs +++ /dev/null @@ -1,687 +0,0 @@ -use core::ops::{Bound, RangeBounds}; - -use among::Among; -use dbutils::{ - buffer::VacantBuffer, - checksum::BuildChecksumer, - equivalent::Comparable, - types::{KeyRef, MaybeStructured, Type}, -}; -#[cfg(all(feature = "memmap", not(target_family = "wasm")))] -use rarena_allocator::Allocator; -use ref_cast::RefCast; -use skl::{either::Either, KeySize}; - -use crate::{ - batch::Batch, - error::Error, - memtable::{BaseTable, Memtable, MemtableEntry}, - sealed::{Constructable, Wal, WalReader}, - types::{base::Entry, BufWriter, KeyBuilder, ValueBuilder}, - Options, -}; - -use super::{Query, QueryRange, Slice}; - -mod iter; -pub use iter::*; - -/// An abstract layer for the immutable write-ahead log. -pub trait Reader: Constructable { - /// Returns the reserved space in the WAL. - /// - /// ## Safety - /// - The writer must ensure that the returned slice is not modified. - /// - This method is not thread-safe, so be careful when using it. - #[inline] - unsafe fn reserved_slice(&self) -> &[u8] { - self.as_wal().reserved_slice() - } - - /// Returns the path of the WAL if it is backed by a file. - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - #[inline] - fn path(&self) -> Option<&<::Allocator as Allocator>::Path> { - self.as_wal().path() - } - - /// Returns the number of entries in the WAL. - #[inline] - fn len(&self) -> usize - where - Self::Memtable: Memtable, - ::Key: Type + Ord, - ::Value: Type, - for<'a> ::Item<'a>: MemtableEntry<'a>, - { - self.as_wal().len() - } - - /// Returns `true` if the WAL is empty. - #[inline] - fn is_empty(&self) -> bool - where - Self::Memtable: Memtable, - ::Key: Type + Ord, - ::Value: Type, - for<'a> ::Item<'a>: MemtableEntry<'a>, - { - self.as_wal().is_empty() - } - - /// Returns the maximum key size allowed in the WAL. - #[inline] - fn maximum_key_size(&self) -> KeySize { - self.as_wal().maximum_key_size() - } - - /// Returns the maximum value size allowed in the WAL. - #[inline] - fn maximum_value_size(&self) -> u32 { - self.as_wal().maximum_value_size() - } - - /// Returns the remaining capacity of the WAL. - #[inline] - fn remaining(&self) -> u32 { - self.as_wal().remaining() - } - - /// Returns the capacity of the WAL. - #[inline] - fn capacity(&self) -> u32 { - self.as_wal().capacity() - } - - /// Returns the options used to create this WAL instance. - #[inline] - fn options(&self) -> &Options { - self.as_wal().options() - } - - /// Returns an iterator over the entries in the WAL. - #[inline] - fn iter( - &self, - ) -> Iter< - '_, - <>::Memtable as BaseTable>::Iterator<'_>, - Self::Memtable, - > - where - Self::Memtable: Memtable, - ::Key: Type + Ord, - ::Value: Type, - for<'a> ::Item<'a>: MemtableEntry<'a>, - { - Iter::new(BaseIter::new(self.as_wal().iter())) - } - - /// Returns an iterator over a subset of entries in the WAL. - #[inline] - fn range<'a, Q, R>( - &'a self, - range: R, - ) -> Range<'a, R, Q, >::Memtable> - where - R: RangeBounds, - Q: ?Sized + Comparable<<::Key as Type>::Ref<'a>>, - Self::Memtable: Memtable, - for<'b> ::Item<'b>: MemtableEntry<'b>, - ::Key: Type + Ord, - ::Value: Type, - { - Range::new(BaseIter::new(self.as_wal().range(QueryRange::new(range)))) - } - - /// Returns an iterator over the keys in the WAL. - #[inline] - fn keys( - &self, - ) -> Keys< - '_, - <>::Memtable as BaseTable>::Iterator<'_>, - Self::Memtable, - > - where - Self::Memtable: Memtable, - ::Key: Type + Ord, - ::Value: Type, - for<'a> ::Item<'a>: MemtableEntry<'a>, - { - Keys::new(BaseIter::new(self.as_wal().iter())) - } - - /// Returns an iterator over a subset of keys in the WAL. - #[inline] - fn range_keys<'a, Q, R>( - &'a self, - range: R, - ) -> RangeKeys<'a, R, Q, >::Memtable> - where - R: RangeBounds, - Q: ?Sized + Comparable<<::Key as Type>::Ref<'a>>, - Self::Memtable: Memtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> ::Item<'b>: MemtableEntry<'b>, - { - RangeKeys::new(BaseIter::new(WalReader::range( - self.as_wal(), - QueryRange::new(range), - ))) - } - - /// Returns an iterator over the values in the WAL. - #[inline] - fn values( - &self, - ) -> Values< - '_, - <>::Memtable as BaseTable>::Iterator<'_>, - Self::Memtable, - > - where - Self::Memtable: Memtable, - ::Key: Type, - ::Value: Type, - for<'a> ::Item<'a>: MemtableEntry<'a>, - { - Values::new(BaseIter::new(self.as_wal().iter())) - } - - /// Returns an iterator over a subset of values in the WAL. - #[inline] - fn range_values<'a, Q, R>( - &'a self, - range: R, - ) -> RangeValues<'a, R, Q, >::Memtable> - where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<<::Key as Type>::Ref<'a>>, - Self::Memtable: Memtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> ::Item<'b>: MemtableEntry<'b>, - { - RangeValues::new(BaseIter::new(self.as_wal().range(QueryRange::new(range)))) - } - - /// Returns the first key-value pair in the map. The key in this pair is the minimum key in the wal. - #[inline] - fn first(&self) -> Option::Item<'_>>> - where - Self::Memtable: Memtable, - for<'a> ::Item<'a>: MemtableEntry<'a>, - ::Key: Ord + Type, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - ::Value: Type, - { - self.as_wal().first().map(Entry::new) - } - - /// Returns the last key-value pair in the map. The key in this pair is the maximum key in the wal. - #[inline] - fn last(&self) -> Option::Item<'_>>> - where - Self::Memtable: Memtable, - for<'a> ::Item<'a>: MemtableEntry<'a>, - ::Key: Ord + Type, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - ::Value: Type, - { - WalReader::last(self.as_wal()).map(Entry::new) - } - - /// Returns `true` if the key exists in the WAL. - #[inline] - fn contains_key<'a, Q>(&'a self, key: &Q) -> bool - where - Q: ?Sized + Comparable<<::Key as Type>::Ref<'a>>, - Self::Memtable: Memtable, - for<'b> ::Item<'b>: MemtableEntry<'b>, - ::Key: Ord + Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - ::Value: Type, - { - self.as_wal().contains_key(Query::<_, Q>::ref_cast(key)) - } - - /// Returns `true` if the key exists in the WAL. - /// - /// ## Safety - /// - The given `key` must be valid to construct to `K::Ref` without remaining. - #[inline] - unsafe fn contains_key_by_bytes(&self, key: &[u8]) -> bool - where - Self::Memtable: Memtable, - for<'b> ::Item<'b>: MemtableEntry<'b>, - ::Key: Ord + Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - ::Value: Type, - { - self - .as_wal() - .contains_key(Slice::<::Key>::ref_cast(key)) - } - - /// Gets the value associated with the key. - #[inline] - fn get<'a, Q>(&'a self, key: &Q) -> Option::Item<'a>>> - where - Q: ?Sized + Comparable<<::Key as Type>::Ref<'a>>, - Self::Memtable: Memtable, - for<'b> ::Item<'b>: MemtableEntry<'b>, - ::Key: Ord + Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - ::Value: Type, - { - self - .as_wal() - .get(Query::<_, Q>::ref_cast(key)) - .map(Entry::new) - } - - /// Gets the value associated with the key. - /// - /// ## Safety - /// - The given `key` must be valid to construct to `K::Ref` without remaining. - #[inline] - unsafe fn get_by_bytes( - &self, - key: &[u8], - ) -> Option::Item<'_>>> - where - Self::Memtable: Memtable, - for<'a> ::Item<'a>: MemtableEntry<'a>, - ::Key: Ord + Type, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - ::Value: Type, - { - self - .as_wal() - .get(Slice::<::Key>::ref_cast(key)) - .map(Entry::new) - } - - /// Returns a value associated to the highest element whose key is below the given bound. - /// If no such element is found then `None` is returned. - #[inline] - fn upper_bound<'a, Q>( - &'a self, - bound: Bound<&Q>, - ) -> Option::Item<'a>>> - where - Q: ?Sized + Comparable<<::Key as Type>::Ref<'a>>, - Self::Memtable: Memtable, - for<'b> ::Item<'b>: MemtableEntry<'b>, - ::Key: Ord + Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - ::Value: Type, - { - self - .as_wal() - .upper_bound(bound.map(Query::<_, Q>::ref_cast)) - .map(Entry::new) - } - - /// Returns a value associated to the highest element whose key is below the given bound. - /// If no such element is found then `None` is returned. - /// - /// ## Safety - /// - The given `key` in `Bound` must be valid to construct to `K::Ref` without remaining. - #[inline] - unsafe fn upper_bound_by_bytes( - &self, - bound: Bound<&[u8]>, - ) -> Option::Item<'_>>> - where - Self::Memtable: Memtable, - for<'a> ::Item<'a>: MemtableEntry<'a>, - ::Key: Ord + Type, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - ::Value: Type, - { - self - .as_wal() - .upper_bound(bound.map(Slice::<::Key>::ref_cast)) - .map(Entry::new) - } - - /// Returns a value associated to the lowest element whose key is above the given bound. - /// If no such element is found then `None` is returned. - #[inline] - fn lower_bound<'a, Q>( - &'a self, - bound: Bound<&Q>, - ) -> Option::Item<'a>>> - where - Q: ?Sized + Comparable<<::Key as Type>::Ref<'a>>, - Self::Memtable: Memtable, - for<'b> ::Item<'b>: MemtableEntry<'b>, - ::Key: Ord + Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - ::Value: Type, - { - self - .as_wal() - .lower_bound(bound.map(Query::<::Key, Q>::ref_cast)) - .map(Entry::new) - } - - /// Returns a value associated to the lowest element whose key is above the given bound. - /// If no such element is found then `None` is returned. - /// - /// ## Safety - /// - The given `key` in `Bound` must be valid to construct to `K::Ref` without remaining. - #[inline] - unsafe fn lower_bound_by_bytes( - &self, - bound: Bound<&[u8]>, - ) -> Option::Item<'_>>> - where - Self::Memtable: Memtable, - for<'b> ::Item<'b>: MemtableEntry<'b>, - ::Key: Ord + Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - ::Value: Type, - { - self - .as_wal() - .lower_bound(bound.map(Slice::<::Key>::ref_cast)) - .map(Entry::new) - } -} - -impl Reader for T -where - T: Constructable, - T::Memtable: Memtable, - for<'a> ::Item<'a>: MemtableEntry<'a>, -{ -} - -/// An abstract layer for the write-ahead log. -pub trait Writer: Reader -where - Self::Reader: Reader, - Self::Memtable: Memtable, - for<'a> ::Item<'a>: MemtableEntry<'a>, -{ - /// Returns `true` if this WAL instance is read-only. - #[inline] - fn read_only(&self) -> bool { - self.as_wal().read_only() - } - - /// Returns the mutable reference to the reserved slice. - /// - /// ## Safety - /// - The caller must ensure that the there is no others accessing reserved slice for either read or write. - /// - This method is not thread-safe, so be careful when using it. - #[inline] - unsafe fn reserved_slice_mut<'a>(&'a mut self) -> &'a mut [u8] - where - Self::Allocator: 'a, - { - self.as_wal().reserved_slice_mut() - } - - /// Flushes the to disk. - #[inline] - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - fn flush(&self) -> Result<(), Error> { - self.as_wal().flush() - } - - /// Flushes the to disk. - #[inline] - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - fn flush_async(&self) -> Result<(), Error> { - self.as_wal().flush_async() - } - - /// Returns the read-only view for the WAL. - fn reader(&self) -> Self::Reader; - - /// Inserts a key-value pair into the WAL. This method - /// allows the caller to build the key in place. - /// - /// See also [`insert_with_value_builder`](Writer::insert_with_value_builder) and [`insert_with_builders`](Writer::insert_with_builders). - #[inline] - fn insert_with_key_builder<'a, E>( - &'a mut self, - kb: KeyBuilder) -> Result>, - value: impl Into::Value>>, - ) -> Result< - (), - Among::Value as Type>::Error, Error>, - > - where - Self::Checksumer: BuildChecksumer, - Self::Memtable: BaseTable, - ::Key: Type + Ord + 'static, - <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - ::Value: Type + 'static, - { - self.as_wal().insert(None, kb, value.into()) - } - - /// Inserts a key-value pair into the WAL. This method - /// allows the caller to build the value in place. - /// - /// See also [`insert_with_key_builder`](Writer::insert_with_key_builder) and [`insert_with_builders`](Writer::insert_with_builders). - #[inline] - fn insert_with_value_builder<'a, E>( - &'a mut self, - key: impl Into::Key>>, - vb: ValueBuilder) -> Result>, - ) -> Result< - (), - Among<<::Key as Type>::Error, E, Error>, - > - where - Self::Checksumer: BuildChecksumer, - Self::Memtable: BaseTable, - ::Key: Type + Ord + 'static, - <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - ::Value: Type + 'static, - { - self.as_wal().insert(None, key.into(), vb) - } - - /// Inserts a key-value pair into the WAL. This method - /// allows the caller to build the key and value in place. - #[inline] - fn insert_with_builders<'a, KE, VE>( - &'a mut self, - kb: KeyBuilder) -> Result>, - vb: ValueBuilder) -> Result>, - ) -> Result<(), Among>> - where - Self::Checksumer: BuildChecksumer, - Self::Memtable: BaseTable, - ::Key: Type + Ord + 'static, - <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - ::Value: Type + 'static, - { - self.as_wal().insert(None, kb, vb) - } - - /// Inserts a key-value pair into the WAL. - #[inline] - fn insert<'a>( - &'a mut self, - key: impl Into::Key>>, - value: impl Into::Value>>, - ) -> Result< - (), - Among< - <::Key as Type>::Error, - <::Value as Type>::Error, - Error, - >, - > - where - Self::Checksumer: BuildChecksumer, - Self::Memtable: BaseTable, - ::Key: Type + Ord + 'static, - <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - ::Value: Type + 'static, - { - self.as_wal().insert(None, key.into(), value.into()) - } - - /// Removes a key-value pair from the WAL. This method - /// allows the caller to build the key in place. - #[inline] - fn remove_with_builder<'a, KE>( - &'a mut self, - kb: KeyBuilder) -> Result>, - ) -> Result<(), Either>> - where - Self::Checksumer: BuildChecksumer, - Self::Memtable: BaseTable, - ::Key: Type + Ord + 'static, - <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - ::Value: Type + 'static, - { - self.as_wal().remove(None, kb) - } - - /// Removes a key-value pair from the WAL. - #[inline] - fn remove<'a>( - &'a mut self, - key: impl Into::Key>>, - ) -> Result<(), Either<<::Key as Type>::Error, Error>> - where - Self::Checksumer: BuildChecksumer, - Self::Memtable: BaseTable, - ::Key: Type + Ord + 'static, - <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - ::Value: Type + 'static, - { - self.as_wal().remove(None, key.into()) - } - - /// Inserts a batch of key-value pairs into the WAL. - #[inline] - fn insert_batch<'a, B>( - &'a mut self, - batch: &mut B, - ) -> Result< - (), - Among< - <::Key as Type>::Error, - <::Value as Type>::Error, - Error, - >, - > - where - B: Batch< - Self::Memtable, - Key = MaybeStructured<'a, ::Key>, - Value = MaybeStructured<'a, ::Value>, - >, - Self::Checksumer: BuildChecksumer, - Self::Memtable: BaseTable, - ::Key: Type + Ord + 'static, - <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - ::Value: Type + 'static, - { - self.as_wal().insert_batch::(batch) - } - - /// Inserts a batch of key-value pairs into the WAL. - #[inline] - fn insert_batch_with_key_builder<'a, B>( - &'a mut self, - batch: &mut B, - ) -> Result< - (), - Among< - ::Error, - <::Value as Type>::Error, - Error, - >, - > - where - B: Batch::Value>>, - B::Key: BufWriter, - Self::Checksumer: BuildChecksumer, - Self::Memtable: BaseTable, - ::Key: Type + Ord + 'static, - <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - ::Value: Type + 'static, - { - self.as_wal().insert_batch::(batch) - } - - /// Inserts a batch of key-value pairs into the WAL. - #[inline] - fn insert_batch_with_value_builder<'a, B>( - &'a mut self, - batch: &mut B, - ) -> Result< - (), - Among< - <::Key as Type>::Error, - ::Error, - Error, - >, - > - where - B: Batch::Key>>, - B::Value: BufWriter, - Self::Checksumer: BuildChecksumer, - Self::Memtable: BaseTable, - ::Key: Type + Ord + 'static, - <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - ::Value: Type + 'static, - { - self.as_wal().insert_batch::(batch) - } - - /// Inserts a batch of key-value pairs into the WAL. - #[inline] - fn insert_batch_with_builders<'a, KB, VB, B>( - &'a mut self, - batch: &mut B, - ) -> Result<(), Among>> - where - B: Batch, - KB: BufWriter, - VB: BufWriter, - Self::Checksumer: BuildChecksumer, - Self::Memtable: BaseTable, - ::Key: Type + Ord + 'static, - <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - ::Value: Type + 'static, - { - self.as_wal().insert_batch::(batch) - } -} diff --git a/src/wal/base/iter.rs b/src/wal/base/iter.rs deleted file mode 100644 index 1547e77e..00000000 --- a/src/wal/base/iter.rs +++ /dev/null @@ -1,476 +0,0 @@ -use core::{iter::FusedIterator, marker::PhantomData, ops::RangeBounds}; - -use crate::{ - memtable::{BaseEntry, Memtable, MemtableEntry}, - types::base::{Entry, Key, Value}, - wal::{KeyPointer, ValuePointer}, -}; - -use dbutils::{equivalent::Comparable, types::Type}; - -use super::{Query, QueryRange}; - -/// Iterator over the entries in the WAL. -pub struct BaseIter<'a, I, M> -where - M: Memtable, - for<'b> M::Item<'b>: MemtableEntry<'b>, -{ - iter: I, - head: Option<(KeyPointer, ValuePointer)>, - tail: Option<(KeyPointer, ValuePointer)>, - _m: PhantomData<&'a ()>, -} - -impl BaseIter<'_, I, M> -where - M: Memtable, - for<'b> M::Item<'b>: MemtableEntry<'b>, -{ - #[inline] - pub(super) fn new(iter: I) -> Self { - Self { - iter, - head: None, - tail: None, - _m: PhantomData, - } - } -} - -impl<'a, I, M> Iterator for BaseIter<'a, I, M> -where - M: Memtable + 'a, - for<'b> M::Item<'b>: MemtableEntry<'b>, - I: Iterator>, -{ - type Item = M::Item<'a>; - - #[inline] - fn next(&mut self) -> Option { - self.iter.next().inspect(|ent| { - self.head = Some((ent.key(), ent.value())); - }) - } -} - -impl<'a, I, M> DoubleEndedIterator for BaseIter<'a, I, M> -where - M: Memtable + 'a, - for<'b> M::Item<'b>: MemtableEntry<'b>, - I: DoubleEndedIterator>, -{ - #[inline] - fn next_back(&mut self) -> Option { - self.iter.next_back().inspect(|ent| { - self.tail = Some((ent.key(), ent.value())); - }) - } -} - -impl<'a, I, M> FusedIterator for BaseIter<'a, I, M> -where - M: Memtable + 'a, - for<'b> M::Item<'b>: MemtableEntry<'b>, - I: FusedIterator>, -{ -} - -/// Iterator over the entries in the WAL. -pub struct Iter<'a, I, M> -where - M: Memtable, - for<'b> M::Item<'b>: MemtableEntry<'b>, -{ - iter: BaseIter<'a, I, M>, -} - -impl<'a, I, M> Iter<'a, I, M> -where - M: Memtable, - for<'b> M::Item<'b>: MemtableEntry<'b>, -{ - #[inline] - pub(super) fn new(iter: BaseIter<'a, I, M>) -> Self { - Self { iter } - } -} - -impl<'a, I, M> Iterator for Iter<'a, I, M> -where - M: Memtable + 'a, - for<'b> M::Item<'b>: MemtableEntry<'b>, - M::Key: Type + Ord, - M::Value: Type, - I: Iterator>, -{ - type Item = Entry<'a, M::Item<'a>>; - - #[inline] - fn next(&mut self) -> Option { - self.iter.next().map(Entry::new) - } -} - -impl<'a, I, M> DoubleEndedIterator for Iter<'a, I, M> -where - M: Memtable + 'a, - for<'b> M::Item<'b>: MemtableEntry<'b>, - M::Key: Type + Ord, - M::Value: Type, - I: DoubleEndedIterator>, -{ - #[inline] - fn next_back(&mut self) -> Option { - self.iter.next_back().map(Entry::new) - } -} - -impl<'a, I, M> FusedIterator for Iter<'a, I, M> -where - M: Memtable + 'a, - for<'b> M::Item<'b>: MemtableEntry<'b>, - M::Key: Type + Ord, - M::Value: Type, - I: FusedIterator>, -{ -} - -/// Iterator over the keys in the WAL. -pub struct Keys<'a, I, M> -where - M: Memtable, - for<'b> M::Item<'b>: MemtableEntry<'b>, -{ - iter: BaseIter<'a, I, M>, -} - -impl<'a, I, M> Keys<'a, I, M> -where - M: Memtable, - for<'b> M::Item<'b>: MemtableEntry<'b>, -{ - #[inline] - pub(super) fn new(iter: BaseIter<'a, I, M>) -> Self { - Self { iter } - } -} - -impl<'a, I, M> Iterator for Keys<'a, I, M> -where - M: Memtable + 'a, - M::Key: Type, - for<'b> M::Item<'b>: MemtableEntry<'b>, - I: Iterator>, -{ - type Item = Key<'a, M::Item<'a>>; - - #[inline] - fn next(&mut self) -> Option { - self.iter.next().map(Key::new) - } -} - -impl<'a, I, M> DoubleEndedIterator for Keys<'a, I, M> -where - M: Memtable + 'a, - M::Key: Type, - for<'b> M::Item<'b>: MemtableEntry<'b>, - I: DoubleEndedIterator>, -{ - #[inline] - fn next_back(&mut self) -> Option { - self.iter.next_back().map(Key::new) - } -} - -impl<'a, I, M> FusedIterator for Keys<'a, I, M> -where - M: Memtable + 'a, - M::Key: Type, - for<'b> M::Item<'b>: MemtableEntry<'b>, - I: FusedIterator>, -{ -} - -/// Iterator over the values in the WAL. -pub struct Values<'a, I, M> -where - M: Memtable, - for<'b> M::Item<'b>: MemtableEntry<'b>, -{ - iter: BaseIter<'a, I, M>, -} - -impl<'a, I, M> Values<'a, I, M> -where - M: Memtable, - for<'b> M::Item<'b>: MemtableEntry<'b>, -{ - #[inline] - pub(super) fn new(iter: BaseIter<'a, I, M>) -> Self { - Self { iter } - } -} - -impl<'a, I, M> Iterator for Values<'a, I, M> -where - M: Memtable + 'a, - M::Value: Type, - for<'b> M::Item<'b>: MemtableEntry<'b>, - I: Iterator>, -{ - type Item = Value<'a, M::Item<'a>>; - - #[inline] - fn next(&mut self) -> Option { - self.iter.next().map(Value::new) - } -} - -impl<'a, I, M> DoubleEndedIterator for Values<'a, I, M> -where - M: Memtable + 'a, - M::Value: Type, - for<'b> M::Item<'b>: MemtableEntry<'b>, - I: DoubleEndedIterator>, -{ - #[inline] - fn next_back(&mut self) -> Option { - self.iter.next_back().map(Value::new) - } -} - -impl<'a, I, M> FusedIterator for Values<'a, I, M> -where - M: Memtable + 'a, - M::Value: Type, - for<'b> M::Item<'b>: MemtableEntry<'b>, - I: FusedIterator>, -{ -} - -/// An iterator over a subset of the entries in the WAL. -pub struct Range<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: Memtable + 'a, - B::Key: Type + Ord, - for<'b> B::Item<'b>: MemtableEntry<'b>, -{ - iter: BaseIter<'a, B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>, B>, -} - -impl<'a, R, Q, B> Range<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: Memtable + 'a, - B::Key: Type + Ord, - for<'b> B::Item<'b>: MemtableEntry<'b>, -{ - #[inline] - pub(super) fn new( - iter: BaseIter<'a, B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>, B>, - ) -> Self { - Self { iter } - } -} - -impl<'a, R, Q, B> Iterator for Range<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: Memtable + 'a, - B::Key: Type + Ord, - B::Value: Type, - B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>: Iterator>, - for<'b> B::Item<'b>: MemtableEntry<'b>, -{ - type Item = Entry<'a, B::Item<'a>>; - - #[inline] - fn next(&mut self) -> Option { - self.iter.next().map(Entry::new) - } -} - -impl<'a, R, Q, B> DoubleEndedIterator for Range<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: Memtable + 'a, - B::Key: Type + Ord, - B::Value: Type, - B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>: - DoubleEndedIterator>, - for<'b> B::Item<'b>: MemtableEntry<'b>, -{ - #[inline] - fn next_back(&mut self) -> Option { - self.iter.next_back().map(Entry::new) - } -} - -impl<'a, R, Q, B> FusedIterator for Range<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: Memtable + 'a, - B::Key: Type + Ord, - B::Value: Type, - B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>: - FusedIterator>, - for<'b> B::Item<'b>: MemtableEntry<'b>, -{ -} - -/// An iterator over the keys in a subset of the entries in the WAL. -pub struct RangeKeys<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: Memtable + 'a, - B::Key: Type + Ord, - for<'b> B::Item<'b>: MemtableEntry<'b>, -{ - iter: BaseIter<'a, B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>, B>, -} - -impl<'a, R, Q, B> RangeKeys<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: Memtable + 'a, - B::Key: Type + Ord, - for<'b> B::Item<'b>: MemtableEntry<'b>, -{ - #[inline] - pub(super) fn new( - iter: BaseIter<'a, B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>, B>, - ) -> Self { - Self { iter } - } -} - -impl<'a, R, Q, B> Iterator for RangeKeys<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: Memtable + 'a, - B::Key: Type + Ord, - B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>: Iterator>, - for<'b> B::Item<'b>: MemtableEntry<'b>, -{ - type Item = Key<'a, B::Item<'a>>; - - #[inline] - fn next(&mut self) -> Option { - self.iter.next().map(Key::new) - } -} - -impl<'a, R, Q, B> DoubleEndedIterator for RangeKeys<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: Memtable + 'a, - B::Key: Type + Ord, - B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>: - DoubleEndedIterator>, - for<'b> B::Item<'b>: MemtableEntry<'b>, -{ - #[inline] - fn next_back(&mut self) -> Option { - self.iter.next_back().map(Key::new) - } -} - -impl<'a, R, Q, B> FusedIterator for RangeKeys<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: Memtable + 'a, - B::Key: Type + Ord, - B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>: - FusedIterator>, - for<'b> B::Item<'b>: MemtableEntry<'b>, -{ -} - -/// An iterator over the values in a subset of the entries in the WAL. -pub struct RangeValues<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: Memtable + 'a, - B::Key: Type + Ord, - for<'b> B::Item<'b>: MemtableEntry<'b>, -{ - iter: BaseIter<'a, B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>, B>, -} - -impl<'a, R, Q, B> RangeValues<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: Memtable + 'a, - B::Key: Type + Ord, - for<'b> B::Item<'b>: MemtableEntry<'b>, -{ - #[inline] - pub(super) fn new( - iter: BaseIter<'a, B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>, B>, - ) -> Self { - Self { iter } - } -} - -impl<'a, R, Q, B> Iterator for RangeValues<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: Memtable + 'a, - B::Key: Type + Ord, - B::Value: Type, - B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>: Iterator>, - for<'b> B::Item<'b>: MemtableEntry<'b>, -{ - type Item = Value<'a, B::Item<'a>>; - - #[inline] - fn next(&mut self) -> Option { - self.iter.next().map(Value::new) - } -} - -impl<'a, R, Q, B> DoubleEndedIterator for RangeValues<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: Memtable + 'a, - B::Key: Type + Ord, - B::Value: Type, - B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>: - DoubleEndedIterator>, - for<'b> B::Item<'b>: MemtableEntry<'b>, -{ - #[inline] - fn next_back(&mut self) -> Option { - self.iter.next_back().map(Value::new) - } -} - -impl<'a, R, Q, B> FusedIterator for RangeValues<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: Memtable + 'a, - B::Key: Type + Ord, - B::Value: Type, - B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>: - FusedIterator>, - for<'b> B::Item<'b>: MemtableEntry<'b>, -{ -} diff --git a/src/wal/iter.rs b/src/wal/iter.rs deleted file mode 100644 index 8b137891..00000000 --- a/src/wal/iter.rs +++ /dev/null @@ -1 +0,0 @@ - diff --git a/src/wal/multiple_version.rs b/src/wal/multiple_version.rs deleted file mode 100644 index cabfd0fb..00000000 --- a/src/wal/multiple_version.rs +++ /dev/null @@ -1,1118 +0,0 @@ -use core::ops::{Bound, RangeBounds}; - -use among::Among; -use dbutils::{ - buffer::VacantBuffer, - checksum::BuildChecksumer, - equivalent::Comparable, - types::{KeyRef, MaybeStructured, Type}, -}; -#[cfg(all(feature = "memmap", not(target_family = "wasm")))] -use rarena_allocator::Allocator; -use ref_cast::RefCast; -use skl::{either::Either, KeySize}; - -use crate::{ - batch::Batch, - error::Error, - memtable::{BaseTable, MultipleVersionMemtable, VersionedMemtableEntry}, - sealed::{Constructable, MultipleVersionWalReader, Wal}, - types::{ - multiple_version::{Entry, VersionedEntry}, - BufWriter, KeyBuilder, ValueBuilder, - }, - Options, -}; - -use super::{Query, QueryRange, Slice}; - -mod iter; -pub use iter::*; - -/// An abstract layer for the immutable write-ahead log. -pub trait Reader: Constructable { - /// Returns the reserved space in the WAL. - /// - /// ## Safety - /// - The writer must ensure that the returned slice is not modified. - /// - This method is not thread-safe, so be careful when using it. - #[inline] - unsafe fn reserved_slice(&self) -> &[u8] { - self.as_wal().reserved_slice() - } - - /// Returns the path of the WAL if it is backed by a file. - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - #[inline] - fn path(&self) -> Option<&<::Allocator as Allocator>::Path> { - self.as_wal().path() - } - - /// Returns the maximum key size allowed in the WAL. - #[inline] - fn maximum_key_size(&self) -> KeySize { - self.as_wal().maximum_key_size() - } - - /// Returns the maximum value size allowed in the WAL. - #[inline] - fn maximum_value_size(&self) -> u32 { - self.as_wal().maximum_value_size() - } - - /// Returns the maximum version in the WAL. - #[inline] - fn maximum_version(&self) -> u64 - where - Self::Memtable: MultipleVersionMemtable + 'static, - for<'a> ::Item<'a>: VersionedMemtableEntry<'a>, - { - Wal::memtable(self.as_wal()).maximum_version() - } - - /// Returns the minimum version in the WAL. - #[inline] - fn minimum_version(&self) -> u64 - where - Self::Memtable: MultipleVersionMemtable + 'static, - for<'a> ::Item<'a>: VersionedMemtableEntry<'a>, - { - Wal::memtable(self.as_wal()).minimum_version() - } - - /// Returns `true` if the WAL may contain an entry whose version is less or equal to the given version. - #[inline] - fn may_contain_version(&self, version: u64) -> bool - where - Self::Memtable: MultipleVersionMemtable + 'static, - for<'a> ::Item<'a>: VersionedMemtableEntry<'a>, - { - Wal::memtable(self.as_wal()).may_contain_version(version) - } - - /// Returns the remaining capacity of the WAL. - #[inline] - fn remaining(&self) -> u32 { - self.as_wal().remaining() - } - - /// Returns the capacity of the WAL. - #[inline] - fn capacity(&self) -> u32 { - self.as_wal().capacity() - } - - /// Returns the options used to create this WAL instance. - #[inline] - fn options(&self) -> &Options { - self.as_wal().options() - } - - /// Returns an iterator over the entries in the WAL. - #[inline] - fn iter( - &self, - version: u64, - ) -> Iter< - '_, - <>::Memtable as BaseTable>::Iterator<'_>, - Self::Memtable, - > - where - Self::Memtable: MultipleVersionMemtable + 'static, - ::Key: Type + Ord, - for<'a> ::Item<'a>: VersionedMemtableEntry<'a>, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - ::Value: Type, - { - Iter::new(BaseIter::new(version, self.as_wal().iter(version))) - } - - /// Returns an iterator over the entries (all versions) in the WAL. - #[inline] - fn iter_all_versions( - &self, - version: u64, - ) -> IterAll< - '_, - <>::Memtable as MultipleVersionMemtable>::IterAll<'_>, - Self::Memtable, - > - where - Self::Memtable: MultipleVersionMemtable + 'static, - ::Key: Type + Ord, - for<'a> <::Key as Type>::Ref<'a>: - KeyRef<'a, ::Key>, - ::Value: Type, - for<'a> ::Item<'a>: VersionedMemtableEntry<'a>, - { - IterAll::new(MultipleVersionBaseIter::new( - version, - self.as_wal().iter_all_versions(version), - )) - } - - /// Returns an iterator over a subset of entries in the WAL. - #[inline] - fn range<'a, Q, R>( - &'a self, - version: u64, - range: R, - ) -> Range<'a, R, Q, >::Memtable> - where - R: RangeBounds, - Q: ?Sized + Comparable<<::Key as Type>::Ref<'a>>, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - Range::new(BaseIter::new( - version, - self.as_wal().range(version, QueryRange::new(range)), - )) - } - - /// Returns an iterator over a subset of entries (all versions) in the WAL. - #[inline] - fn range_all_versions<'a, Q, R>( - &'a self, - version: u64, - range: R, - ) -> RangeAll<'a, R, Q, >::Memtable> - where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<<::Key as Type>::Ref<'a>>, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - RangeAll::new(MultipleVersionBaseIter::new( - version, - self - .as_wal() - .range_all_versions(version, QueryRange::new(range)), - )) - } - - /// Returns an iterator over the keys in the WAL. - #[inline] - fn keys( - &self, - version: u64, - ) -> Keys< - '_, - <>::Memtable as BaseTable>::Iterator<'_>, - Self::Memtable, - > - where - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - Keys::new(BaseIter::new(version, self.as_wal().iter(version))) - } - - /// Returns an iterator over a subset of keys in the WAL. - #[inline] - fn range_keys<'a, Q, R>( - &'a self, - version: u64, - range: R, - ) -> RangeKeys<'a, R, Q, >::Memtable> - where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<<::Key as Type>::Ref<'a>>, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - RangeKeys::new(BaseIter::new( - version, - self.as_wal().range(version, QueryRange::new(range)), - )) - } - - /// Returns an iterator over the values in the WAL. - #[inline] - fn values( - &self, - version: u64, - ) -> Values< - '_, - <>::Memtable as BaseTable>::Iterator<'_>, - Self::Memtable, - > - where - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - Values::new(BaseIter::new(version, self.as_wal().iter(version))) - } - - /// Returns an iterator over a subset of values in the WAL. - #[inline] - fn range_values<'a, Q, R>( - &'a self, - version: u64, - range: R, - ) -> RangeValues<'a, R, Q, >::Memtable> - where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<<::Key as Type>::Ref<'a>>, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - RangeValues::new(BaseIter::new( - version, - self.as_wal().range(version, QueryRange::new(range)), - )) - } - - /// Returns the first key-value pair in the map. The key in this pair is the minimum key in the wal. - #[inline] - fn first(&self, version: u64) -> Option::Item<'_>>> - where - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self - .as_wal() - .first(version) - .map(|ent| Entry::with_version(ent, version)) - } - - /// Returns the first key-value pair in the map. The key in this pair is the minimum key in the wal. - /// - /// Compared to [`first`](Reader::first), this method returns a versioned item, which means that the returned item - /// may already be marked as removed. - #[inline] - fn first_versioned( - &self, - version: u64, - ) -> Option::VersionedItem<'_>>> - where - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self - .as_wal() - .first_versioned(version) - .map(|ent| VersionedEntry::with_version(ent, version)) - } - - /// Returns the last key-value pair in the map. The key in this pair is the maximum key in the wal. - #[inline] - fn last(&self, version: u64) -> Option::Item<'_>>> - where - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - MultipleVersionWalReader::last(self.as_wal(), version) - .map(|ent| Entry::with_version(ent, version)) - } - - /// Returns the last key-value pair in the map. The key in this pair is the maximum key in the wal. - /// - /// Compared to [`last`](Reader::last), this method returns a versioned item, which means that the returned item - /// may already be marked as removed. - #[inline] - fn last_versioned( - &self, - version: u64, - ) -> Option::VersionedItem<'_>>> - where - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self - .as_wal() - .last_versioned(version) - .map(|ent| VersionedEntry::with_version(ent, version)) - } - - /// Returns `true` if the key exists in the WAL. - #[inline] - fn contains_key<'a, Q>(&'a self, version: u64, key: &Q) -> bool - where - Q: ?Sized + Comparable<<::Key as Type>::Ref<'a>>, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self - .as_wal() - .contains_key(version, Query::<_, Q>::ref_cast(key)) - } - - /// Returns `true` if the key exists in the WAL. - /// - /// Compared to [`contains_key`](Reader::contains_key), this method returns `true` even if the latest is marked as removed. - #[inline] - fn contains_key_versioned<'a, Q>(&'a self, version: u64, key: &Q) -> bool - where - Q: ?Sized + Comparable<<::Key as Type>::Ref<'a>>, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self - .as_wal() - .contains_key_versioned(version, Query::<_, Q>::ref_cast(key)) - } - - /// Returns `true` if the key exists in the WAL. - /// - /// ## Safety - /// - The given `key` must be valid to construct to `K::Ref` without remaining. - #[inline] - unsafe fn contains_key_by_bytes(&self, version: u64, key: &[u8]) -> bool - where - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self.as_wal().contains_key(version, Slice::ref_cast(key)) - } - - /// Returns `true` if the key exists in the WAL. - /// - /// Compared to [`contains_key_by_bytes`](Reader::contains_key_by_bytes), this method returns `true` even if the latest is marked as removed. - /// - /// ## Safety - /// - The given `key` must be valid to construct to `K::Ref` without remaining. - #[inline] - unsafe fn contains_key_versioned_by_bytes(&self, version: u64, key: &[u8]) -> bool - where - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self - .as_wal() - .contains_key_versioned(version, Slice::ref_cast(key)) - } - - /// Gets the value associated with the key. - #[inline] - fn get<'a, Q>( - &'a self, - version: u64, - key: &Q, - ) -> Option::Item<'a>>> - where - Q: ?Sized + Comparable<<::Key as Type>::Ref<'a>>, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self - .as_wal() - .get(version, Query::<_, Q>::ref_cast(key)) - .map(|ent| Entry::with_version(ent, version)) - } - - /// Gets the value associated with the key. - /// - /// Compared to [`get`](Reader::get), this method returns a versioned item, which means that the returned item - /// may already be marked as removed. - #[inline] - fn get_versioned<'a, Q>( - &'a self, - version: u64, - key: &Q, - ) -> Option::VersionedItem<'a>>> - where - Q: ?Sized + Comparable<<::Key as Type>::Ref<'a>>, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self - .as_wal() - .get_versioned(version, Query::<_, Q>::ref_cast(key)) - .map(|ent| VersionedEntry::with_version(ent, version)) - } - - /// Gets the value associated with the key. - /// - /// ## Safety - /// - The given `key` must be valid to construct to `K::Ref` without remaining. - #[inline] - unsafe fn get_by_bytes( - &self, - version: u64, - key: &[u8], - ) -> Option::Item<'_>>> - where - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self - .as_wal() - .get(version, Slice::ref_cast(key)) - .map(|ent| Entry::with_version(ent, version)) - } - - /// Gets the value associated with the key. - /// - /// Compared to [`get_by_bytes`](Reader::get_by_bytes), this method returns a versioned item, which means that the returned item - /// may already be marked as removed. - /// - /// ## Safety - /// - The given `key` must be valid to construct to `K::Ref` without remaining. - #[inline] - unsafe fn get_versioned_by_bytes( - &self, - version: u64, - key: &[u8], - ) -> Option::VersionedItem<'_>>> - where - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self - .as_wal() - .get_versioned(version, Slice::ref_cast(key)) - .map(|ent| VersionedEntry::with_version(ent, version)) - } - - /// Returns a value associated to the highest element whose key is below the given bound. - /// If no such element is found then `None` is returned. - #[inline] - fn upper_bound<'a, Q>( - &'a self, - version: u64, - bound: Bound<&Q>, - ) -> Option::Item<'a>>> - where - Q: ?Sized + Comparable<<::Key as Type>::Ref<'a>>, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self - .as_wal() - .upper_bound(version, bound.map(Query::ref_cast)) - .map(|ent| Entry::with_version(ent, version)) - } - - /// Returns a value associated to the highest element whose key is below the given bound. - /// - /// Compared to [`upper_bound`](Reader::upper_bound), this method returns a versioned item, which means that the returned item - /// may already be marked as removed. - #[inline] - fn upper_bound_versioned<'a, Q>( - &'a self, - version: u64, - bound: Bound<&Q>, - ) -> Option::VersionedItem<'a>>> - where - Q: ?Sized + Comparable<<::Key as Type>::Ref<'a>>, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self - .as_wal() - .upper_bound_versioned(version, bound.map(Query::ref_cast)) - .map(|ent| VersionedEntry::with_version(ent, version)) - } - - /// Returns a value associated to the highest element whose key is below the given bound. - /// If no such element is found then `None` is returned. - /// - /// ## Safety - /// - The given `key` in `Bound` must be valid to construct to `K::Ref` without remaining. - #[inline] - unsafe fn upper_bound_by_bytes( - &self, - version: u64, - bound: Bound<&[u8]>, - ) -> Option::Item<'_>>> - where - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self - .as_wal() - .upper_bound(version, bound.map(Slice::ref_cast)) - .map(|ent| Entry::with_version(ent, version)) - } - - /// Returns a value associated to the highest element whose key is below the given bound. - /// If no such element is found then `None` is returned. - /// - /// Compared to [`upper_bound_by_bytes`](Reader::upper_bound_by_bytes), this method returns a versioned item, which means that the returned item - /// - /// ## Safety - /// - The given `key` in `Bound` must be valid to construct to `K::Ref` without remaining. - #[inline] - unsafe fn upper_bound_versioned_by_bytes( - &self, - version: u64, - bound: Bound<&[u8]>, - ) -> Option::VersionedItem<'_>>> - where - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self - .as_wal() - .upper_bound_versioned(version, bound.map(Slice::ref_cast)) - .map(|ent| VersionedEntry::with_version(ent, version)) - } - - /// Returns a value associated to the lowest element whose key is above the given bound. - /// If no such element is found then `None` is returned. - #[inline] - fn lower_bound<'a, Q>( - &'a self, - version: u64, - bound: Bound<&Q>, - ) -> Option::Item<'a>>> - where - Q: ?Sized + Comparable<<::Key as Type>::Ref<'a>>, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self - .as_wal() - .lower_bound(version, bound.map(Query::ref_cast)) - .map(|ent| Entry::with_version(ent, version)) - } - - /// Returns a value associated to the lowest element whose key is above the given bound. - /// If no such element is found then `None` is returned. - /// - /// Compared to [`lower_bound`](Reader::lower_bound), this method returns a versioned item, which means that the returned item - /// may already be marked as removed. - #[inline] - fn lower_bound_versioned<'a, Q>( - &'a self, - version: u64, - bound: Bound<&Q>, - ) -> Option::VersionedItem<'a>>> - where - Q: ?Sized + Comparable<<::Key as Type>::Ref<'a>>, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self - .as_wal() - .lower_bound_versioned(version, bound.map(Query::ref_cast)) - .map(|ent| VersionedEntry::with_version(ent, version)) - } - - /// Returns a value associated to the lowest element whose key is above the given bound. - /// If no such element is found then `None` is returned. - /// - /// ## Safety - /// - The given `key` in `Bound` must be valid to construct to `K::Ref` without remaining. - #[inline] - unsafe fn lower_bound_by_bytes( - &self, - version: u64, - bound: Bound<&[u8]>, - ) -> Option::Item<'_>>> - where - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self - .as_wal() - .lower_bound(version, bound.map(Slice::ref_cast)) - .map(|ent| Entry::with_version(ent, version)) - } - - /// Returns a value associated to the lowest element whose key is above the given bound. - /// If no such element is found then `None` is returned. - /// - /// Compared to [`lower_bound_by_bytes`](Reader::lower_bound_by_bytes), this method returns a versioned item, which means that the returned item - /// may already be marked as removed. - /// - /// ## Safety - /// - The given `key` in `Bound` must be valid to construct to `K::Ref` without remaining. - #[inline] - unsafe fn lower_bound_versioned_by_bytes( - &self, - version: u64, - bound: Bound<&[u8]>, - ) -> Option::VersionedItem<'_>>> - where - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord, - ::Value: Type, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self - .as_wal() - .lower_bound_versioned( - version, - bound.map(Slice::<::Key>::ref_cast), - ) - .map(|ent| VersionedEntry::with_version(ent, version)) - } -} - -impl Reader for T -where - T: Constructable, - T::Memtable: MultipleVersionMemtable, - for<'a> ::Item<'a>: VersionedMemtableEntry<'a>, - for<'a> ::VersionedItem<'a>: VersionedMemtableEntry<'a>, -{ -} - -/// An abstract layer for the write-ahead log. -pub trait Writer: Reader -where - Self::Reader: Reader, -{ - /// Returns `true` if this WAL instance is read-only. - #[inline] - fn read_only(&self) -> bool { - self.as_wal().read_only() - } - - /// Returns the mutable reference to the reserved slice. - /// - /// ## Safety - /// - The caller must ensure that the there is no others accessing reserved slice for either read or write. - /// - This method is not thread-safe, so be careful when using it. - #[inline] - unsafe fn reserved_slice_mut<'a>(&'a mut self) -> &'a mut [u8] - where - Self::Allocator: 'a, - { - self.as_wal().reserved_slice_mut() - } - - /// Flushes the to disk. - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - #[inline] - fn flush(&self) -> Result<(), Error> { - self.as_wal().flush() - } - - /// Flushes the to disk. - #[cfg(all(feature = "memmap", not(target_family = "wasm")))] - #[cfg_attr(docsrs, doc(cfg(all(feature = "memmap", not(target_family = "wasm")))))] - #[inline] - fn flush_async(&self) -> Result<(), Error> { - self.as_wal().flush_async() - } - - /// Returns the read-only view for the WAL. - fn reader(&self) -> Self::Reader; - - /// Inserts a key-value pair into the WAL. This method - /// allows the caller to build the key in place. - /// - /// See also [`insert_with_value_builder`](Writer::insert_with_value_builder) and [`insert_with_builders`](Writer::insert_with_builders). - #[inline] - fn insert_with_key_builder<'a, E>( - &'a mut self, - version: u64, - kb: KeyBuilder) -> Result>, - value: impl Into::Value>>, - ) -> Result< - (), - Among::Value as Type>::Error, Error>, - > - where - Self::Checksumer: BuildChecksumer, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord + 'static, - ::Value: Type + 'static, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self.as_wal().insert(Some(version), kb, value.into()) - } - - /// Inserts a key-value pair into the WAL. This method - /// allows the caller to build the value in place. - /// - /// See also [`insert_with_key_builder`](Writer::insert_with_key_builder) and [`insert_with_builders`](Writer::insert_with_builders). - #[inline] - fn insert_with_value_builder<'a, E>( - &'a mut self, - version: u64, - key: impl Into::Key>>, - vb: ValueBuilder) -> Result>, - ) -> Result< - (), - Among<<::Key as Type>::Error, E, Error>, - > - where - Self::Checksumer: BuildChecksumer, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord + 'static, - ::Value: Type + 'static, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self.as_wal().insert(Some(version), key.into(), vb) - } - - /// Inserts a key-value pair into the WAL. This method - /// allows the caller to build the key and value in place. - #[inline] - fn insert_with_builders( - &mut self, - version: u64, - kb: KeyBuilder) -> Result>, - vb: ValueBuilder) -> Result>, - ) -> Result<(), Among>> - where - Self::Checksumer: BuildChecksumer, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord + 'static, - ::Value: Type + 'static, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self.as_wal().insert(Some(version), kb, vb) - } - - /// Inserts a key-value pair into the WAL. - #[inline] - fn insert<'a>( - &'a mut self, - version: u64, - key: impl Into::Key>>, - value: impl Into::Value>>, - ) -> Result< - (), - Among< - <::Key as Type>::Error, - <::Value as Type>::Error, - Error, - >, - > - where - Self::Checksumer: BuildChecksumer, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord + 'static, - ::Value: Type + 'static, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self - .as_wal() - .insert(Some(version), key.into(), value.into()) - } - - /// Removes a key-value pair from the WAL. This method - /// allows the caller to build the key in place. - #[inline] - fn remove_with_builder( - &mut self, - version: u64, - kb: KeyBuilder) -> Result>, - ) -> Result<(), Either>> - where - Self::Checksumer: BuildChecksumer, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord + 'static, - ::Value: Type + 'static, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self.as_wal().remove(Some(version), kb) - } - - /// Removes a key-value pair from the WAL. - #[inline] - fn remove<'a>( - &'a mut self, - version: u64, - key: impl Into::Key>>, - ) -> Result<(), Either<<::Key as Type>::Error, Error>> - where - Self::Checksumer: BuildChecksumer, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord + 'static, - ::Value: Type + 'static, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self.as_wal().remove(Some(version), key.into()) - } - - /// Inserts a batch of key-value pairs into the WAL. - #[inline] - fn insert_batch<'a, B>( - &'a mut self, - batch: &mut B, - ) -> Result< - (), - Among< - <::Key as Type>::Error, - <::Value as Type>::Error, - Error, - >, - > - where - B: Batch< - Self::Memtable, - Key = MaybeStructured<'a, ::Key>, - Value = MaybeStructured<'a, ::Value>, - >, - Self::Checksumer: BuildChecksumer, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord + 'static, - ::Value: Type + 'static, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self.as_wal().insert_batch::(batch) - } - - /// Inserts a batch of key-value pairs into the WAL. - #[inline] - fn insert_batch_with_key_builder<'a, B>( - &'a mut self, - batch: &mut B, - ) -> Result< - (), - Among< - ::Error, - <::Value as Type>::Error, - Error, - >, - > - where - B: Batch::Value>>, - B::Key: BufWriter, - Self::Checksumer: BuildChecksumer, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord + 'static, - ::Value: Type + 'static, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self.as_wal().insert_batch::(batch) - } - - /// Inserts a batch of key-value pairs into the WAL. - #[inline] - fn insert_batch_with_value_builder<'a, B>( - &'a mut self, - batch: &mut B, - ) -> Result< - (), - Among< - <::Key as Type>::Error, - ::Error, - Error, - >, - > - where - B: Batch::Key>>, - B::Value: BufWriter, - Self::Checksumer: BuildChecksumer, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord + 'static, - ::Value: Type + 'static, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self.as_wal().insert_batch::(batch) - } - - /// Inserts a batch of key-value pairs into the WAL. - #[inline] - fn insert_batch_with_builders( - &mut self, - batch: &mut B, - ) -> Result<(), Among>> - where - B: Batch, - KB: BufWriter, - VB: BufWriter, - Self::Checksumer: BuildChecksumer, - Self::Memtable: MultipleVersionMemtable, - ::Key: Type + Ord + 'static, - ::Value: Type + 'static, - for<'b> <::Key as Type>::Ref<'b>: - KeyRef<'b, ::Key>, - for<'b> ::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> ::VersionedItem<'b>: - VersionedMemtableEntry<'b>, - { - self.as_wal().insert_batch::(batch) - } -} diff --git a/src/wal/multiple_version/iter.rs b/src/wal/multiple_version/iter.rs deleted file mode 100644 index ea2bf712..00000000 --- a/src/wal/multiple_version/iter.rs +++ /dev/null @@ -1,881 +0,0 @@ -use core::{iter::FusedIterator, marker::PhantomData, ops::RangeBounds}; - -use dbutils::{equivalent::Comparable, types::Type}; - -use crate::{ - memtable::{BaseEntry, MultipleVersionMemtable, VersionedMemtableEntry}, - types::multiple_version::{Entry, Key, Value, VersionedEntry}, - wal::{KeyPointer, ValuePointer}, -}; - -use super::{Query, QueryRange}; - -/// Iterator over the entries in the WAL. -pub struct BaseIter<'a, I, M> -where - M: MultipleVersionMemtable + 'a, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - iter: I, - version: u64, - head: Option<(KeyPointer, ValuePointer)>, - tail: Option<(KeyPointer, ValuePointer)>, - _m: PhantomData<&'a ()>, -} - -impl<'a, I, M> BaseIter<'a, I, M> -where - M: MultipleVersionMemtable + 'a, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - #[inline] - pub(super) fn new(version: u64, iter: I) -> Self { - Self { - version, - iter, - head: None, - tail: None, - _m: PhantomData, - } - } - - /// Returns the query version of the iterator. - #[inline] - pub(super) const fn version(&self) -> u64 { - self.version - } -} - -impl<'a, I, M> Iterator for BaseIter<'a, I, M> -where - M: MultipleVersionMemtable + 'a, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, - I: Iterator>, -{ - type Item = M::Item<'a>; - - #[inline] - fn next(&mut self) -> Option { - self.iter.next().inspect(|ent| { - self.head = Some((ent.key(), ent.value().unwrap())); - }) - } -} - -impl<'a, I, M> DoubleEndedIterator for BaseIter<'a, I, M> -where - M: MultipleVersionMemtable + 'a, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, - I: DoubleEndedIterator>, -{ - #[inline] - fn next_back(&mut self) -> Option { - self.iter.next_back().inspect(|ent| { - self.tail = Some((ent.key(), ent.value().unwrap())); - }) - } -} - -impl<'a, I, M> FusedIterator for BaseIter<'a, I, M> -where - M: MultipleVersionMemtable + 'a, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, - I: FusedIterator>, -{ -} - -/// Iterator over the entries in the WAL. -pub struct MultipleVersionBaseIter<'a, I, M> -where - M: MultipleVersionMemtable + 'a, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - iter: I, - version: u64, - head: Option<(KeyPointer, Option>)>, - tail: Option<(KeyPointer, Option>)>, - _m: PhantomData<&'a ()>, -} - -impl<'a, I, M> MultipleVersionBaseIter<'a, I, M> -where - M: MultipleVersionMemtable + 'a, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - #[inline] - pub(super) fn new(version: u64, iter: I) -> Self { - Self { - version, - iter, - head: None, - tail: None, - _m: PhantomData, - } - } - - /// Returns the query version of the iterator. - #[inline] - pub(super) const fn version(&self) -> u64 { - self.version - } -} - -impl<'a, I, M> Iterator for MultipleVersionBaseIter<'a, I, M> -where - M: MultipleVersionMemtable + 'a, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, - I: Iterator>, -{ - type Item = M::VersionedItem<'a>; - - #[inline] - fn next(&mut self) -> Option { - self.iter.next().inspect(|ent| { - self.head = Some((ent.key(), ent.value())); - }) - } -} - -impl<'a, I, M> DoubleEndedIterator for MultipleVersionBaseIter<'a, I, M> -where - M: MultipleVersionMemtable + 'a, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, - I: DoubleEndedIterator>, -{ - #[inline] - fn next_back(&mut self) -> Option { - self.iter.next_back().inspect(|ent| { - self.tail = Some((ent.key(), ent.value())); - }) - } -} - -impl<'a, I, M> FusedIterator for MultipleVersionBaseIter<'a, I, M> -where - M: MultipleVersionMemtable + 'a, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, - I: FusedIterator>, -{ -} - -/// Iterator over the entries in the WAL. -pub struct Iter<'a, I, M> -where - M: MultipleVersionMemtable, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - iter: BaseIter<'a, I, M>, - version: u64, -} - -impl<'a, I, M> Iter<'a, I, M> -where - M: MultipleVersionMemtable + 'a, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - #[inline] - pub(super) fn new(iter: BaseIter<'a, I, M>) -> Self { - Self { - version: iter.version(), - iter, - } - } - - /// Returns the query version of the entries in the iterator. - #[inline] - pub const fn version(&self) -> u64 { - self.version - } -} - -impl<'a, I, M> Iterator for Iter<'a, I, M> -where - M: MultipleVersionMemtable + 'a, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, - M::Key: Type + Ord, - M::Value: Type, - I: Iterator>, -{ - type Item = Entry<'a, M::Item<'a>>; - - #[inline] - fn next(&mut self) -> Option { - self - .iter - .next() - .map(|ent| Entry::with_version(ent, self.version)) - } -} - -impl<'a, I, M> DoubleEndedIterator for Iter<'a, I, M> -where - M: MultipleVersionMemtable + 'a, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, - M::Key: Type + Ord, - M::Value: Type, - I: DoubleEndedIterator>, -{ - #[inline] - fn next_back(&mut self) -> Option { - self - .iter - .next_back() - .map(|ent| Entry::with_version(ent, self.version)) - } -} - -impl<'a, I, M> FusedIterator for Iter<'a, I, M> -where - M::Key: Type + Ord, - M::Value: Type, - M: MultipleVersionMemtable + 'a, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, - I: FusedIterator>, -{ -} - -/// Iterator over the keys in the WAL. -pub struct Keys<'a, I, M> -where - M: MultipleVersionMemtable, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - iter: BaseIter<'a, I, M>, - version: u64, -} - -impl<'a, I, M> Keys<'a, I, M> -where - M: MultipleVersionMemtable + 'a, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - #[inline] - pub(super) fn new(iter: BaseIter<'a, I, M>) -> Self { - Self { - version: iter.version(), - iter, - } - } - - /// Returns the query version of the keys in the iterator. - #[inline] - pub const fn version(&self) -> u64 { - self.version - } -} - -impl<'a, I, M> Iterator for Keys<'a, I, M> -where - M::Key: Type, - M: MultipleVersionMemtable + 'a, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, - I: Iterator>, -{ - type Item = Key<'a, M::Item<'a>>; - - #[inline] - fn next(&mut self) -> Option { - self - .iter - .next() - .map(|ent| Key::with_version(ent, self.version)) - } -} - -impl<'a, I, M> DoubleEndedIterator for Keys<'a, I, M> -where - M::Key: Type, - M: MultipleVersionMemtable + 'a, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, - I: DoubleEndedIterator>, -{ - #[inline] - fn next_back(&mut self) -> Option { - self - .iter - .next_back() - .map(|ent| Key::with_version(ent, self.version)) - } -} - -impl<'a, I, M> FusedIterator for Keys<'a, I, M> -where - M::Key: Type, - M: MultipleVersionMemtable + 'a, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, - I: FusedIterator>, -{ -} - -/// Iterator over the values in the WAL. -pub struct Values<'a, I, M> -where - M: MultipleVersionMemtable, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - iter: BaseIter<'a, I, M>, - version: u64, -} - -impl<'a, I, M> Values<'a, I, M> -where - M: MultipleVersionMemtable + 'a, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - #[inline] - pub(super) fn new(iter: BaseIter<'a, I, M>) -> Self { - Self { - version: iter.version(), - iter, - } - } - - /// Returns the query version of the values in the iterator. - #[inline] - pub const fn version(&self) -> u64 { - self.version - } -} - -impl<'a, I, M> Iterator for Values<'a, I, M> -where - M: MultipleVersionMemtable + 'a, - M::Value: Type, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, - I: Iterator>, -{ - type Item = Value<'a, M::Item<'a>>; - - #[inline] - fn next(&mut self) -> Option { - self - .iter - .next() - .map(|ent| Value::with_version(ent, self.version)) - } -} - -impl<'a, I, M> DoubleEndedIterator for Values<'a, I, M> -where - M: MultipleVersionMemtable + 'a, - M::Value: Type, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, - I: DoubleEndedIterator>, -{ - #[inline] - fn next_back(&mut self) -> Option { - self - .iter - .next_back() - .map(|ent| Value::with_version(ent, self.version)) - } -} - -impl<'a, I, M> FusedIterator for Values<'a, I, M> -where - M::Value: Type, - M: MultipleVersionMemtable + 'a, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, - I: FusedIterator>, -{ -} - -/// An iterator over a subset of the entries in the WAL. -pub struct Range<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: MultipleVersionMemtable, - B::Key: Type + Ord, - for<'b> B::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> B::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - iter: BaseIter<'a, B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>, B>, - version: u64, -} - -impl<'a, R, Q, B> Range<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: MultipleVersionMemtable + 'a, - B::Key: Type + Ord, - for<'b> B::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> B::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - #[inline] - pub(super) fn new( - iter: BaseIter<'a, B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>, B>, - ) -> Self { - Self { - version: iter.version(), - iter, - } - } - - /// Returns the query version of the entries in the iterator. - #[inline] - pub const fn version(&self) -> u64 { - self.version - } -} - -impl<'a, R, Q, B> Iterator for Range<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: MultipleVersionMemtable + 'a, - B::Key: Type + Ord, - B::Value: Type, - B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>: Iterator>, - for<'b> B::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> B::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - type Item = Entry<'a, B::Item<'a>>; - - #[inline] - fn next(&mut self) -> Option { - self - .iter - .next() - .map(|ent| Entry::with_version(ent, self.version)) - } -} - -impl<'a, R, Q, B> DoubleEndedIterator for Range<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: MultipleVersionMemtable + 'a, - B::Key: Type + Ord, - B::Value: Type, - B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>: - DoubleEndedIterator>, - for<'b> B::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> B::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - #[inline] - fn next_back(&mut self) -> Option { - self - .iter - .next_back() - .map(|ent| Entry::with_version(ent, self.version)) - } -} - -impl<'a, R, Q, B> FusedIterator for Range<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: MultipleVersionMemtable + 'a, - B::Key: Type + Ord, - B::Value: Type, - B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>: - FusedIterator>, - for<'b> B::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> B::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ -} - -/// An iterator over the keys in a subset of the entries in the WAL. -pub struct RangeKeys<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: MultipleVersionMemtable, - B::Key: Type + Ord, - for<'b> B::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> B::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - iter: BaseIter<'a, B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>, B>, - version: u64, -} - -impl<'a, R, Q, B> RangeKeys<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: MultipleVersionMemtable + 'a, - B::Key: Type + Ord, - for<'b> B::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> B::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - #[inline] - pub(super) fn new( - iter: BaseIter<'a, B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>, B>, - ) -> Self { - Self { - version: iter.version(), - iter, - } - } - - /// Returns the query version of the keys in the iterator. - #[inline] - pub const fn version(&self) -> u64 { - self.version - } -} - -impl<'a, R, Q, B> Iterator for RangeKeys<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: MultipleVersionMemtable + 'a, - B::Key: Type + Ord, - B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>: Iterator>, - for<'b> B::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> B::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - type Item = Key<'a, B::Item<'a>>; - - #[inline] - fn next(&mut self) -> Option { - self - .iter - .next() - .map(|ent| Key::with_version(ent, self.version)) - } -} - -impl<'a, R, Q, B> DoubleEndedIterator for RangeKeys<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: MultipleVersionMemtable + 'a, - B::Key: Type + Ord, - B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>: - DoubleEndedIterator>, - for<'b> B::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> B::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - #[inline] - fn next_back(&mut self) -> Option { - self - .iter - .next_back() - .map(|ent| Key::with_version(ent, self.version)) - } -} - -impl<'a, R, Q, B> FusedIterator for RangeKeys<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: MultipleVersionMemtable + 'a, - B::Key: Type + Ord, - B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>: - FusedIterator>, - for<'b> B::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> B::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ -} - -/// An iterator over the values in a subset of the entries in the WAL. -pub struct RangeValues<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: MultipleVersionMemtable, - B::Key: Type + Ord, - for<'b> B::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> B::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - iter: BaseIter<'a, B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>, B>, - version: u64, -} - -impl<'a, R, Q, B> RangeValues<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: MultipleVersionMemtable + 'a, - B::Key: Type + Ord, - for<'b> B::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> B::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - #[inline] - pub(super) fn new( - iter: BaseIter<'a, B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>, B>, - ) -> Self { - Self { - version: iter.version(), - iter, - } - } - - /// Returns the query version of the iterator. - #[inline] - pub const fn version(&self) -> u64 { - self.version - } -} - -impl<'a, R, Q, B> Iterator for RangeValues<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: MultipleVersionMemtable + 'a, - B::Key: Type + Ord, - B::Value: Type, - B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>: Iterator>, - for<'b> B::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> B::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - type Item = Value<'a, B::Item<'a>>; - - #[inline] - fn next(&mut self) -> Option { - self - .iter - .next() - .map(|ent| Value::with_version(ent, self.version)) - } -} - -impl<'a, R, Q, B> DoubleEndedIterator for RangeValues<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: MultipleVersionMemtable + 'a, - B::Key: Type + Ord, - B::Value: Type, - B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>: - DoubleEndedIterator>, - for<'b> B::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> B::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - #[inline] - fn next_back(&mut self) -> Option { - self - .iter - .next_back() - .map(|ent| Value::with_version(ent, self.version)) - } -} - -impl<'a, R, Q, B> FusedIterator for RangeValues<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: MultipleVersionMemtable + 'a, - B::Key: Type + Ord, - B::Value: Type, - B::Range<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>: - FusedIterator>, - for<'b> B::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> B::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ -} - -/// Iterator over the entries in the WAL. -pub struct IterAll<'a, I, M> -where - M: MultipleVersionMemtable, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - iter: MultipleVersionBaseIter<'a, I, M>, - version: u64, -} - -impl<'a, I, M> IterAll<'a, I, M> -where - M: MultipleVersionMemtable, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - #[inline] - pub(super) fn new(iter: MultipleVersionBaseIter<'a, I, M>) -> Self { - Self { - version: iter.version(), - iter, - } - } - - /// Returns the query version of the entries in the iterator. - #[inline] - pub const fn version(&self) -> u64 { - self.version - } -} - -impl<'a, I, M> Iterator for IterAll<'a, I, M> -where - M: MultipleVersionMemtable + 'a, - M::Key: Type + Ord, - M::Value: Type, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, - I: Iterator>, -{ - type Item = VersionedEntry<'a, M::VersionedItem<'a>>; - - #[inline] - fn next(&mut self) -> Option { - self - .iter - .next() - .map(|ent| VersionedEntry::with_version(ent, self.version)) - } -} - -impl<'a, I, M> DoubleEndedIterator for IterAll<'a, I, M> -where - M: MultipleVersionMemtable + 'a, - M::Key: Type + Ord, - M::Value: Type, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, - I: DoubleEndedIterator>, -{ - #[inline] - fn next_back(&mut self) -> Option { - self - .iter - .next_back() - .map(|ent| VersionedEntry::with_version(ent, self.version)) - } -} - -impl<'a, I, M> FusedIterator for IterAll<'a, I, M> -where - M: MultipleVersionMemtable + 'a, - M::Key: Type + Ord, - M::Value: Type, - for<'b> M::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> M::VersionedItem<'b>: VersionedMemtableEntry<'b>, - I: FusedIterator>, -{ -} - -/// An iterator over a subset of the entries in the WAL. -pub struct RangeAll<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: MultipleVersionMemtable, - B::Key: Type + Ord, - for<'b> B::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> B::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - iter: MultipleVersionBaseIter< - 'a, - B::RangeAll<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>, - B, - >, - version: u64, -} - -impl<'a, R, Q, B> RangeAll<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: MultipleVersionMemtable + 'a, - B::Key: Type + Ord, - for<'b> B::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> B::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - #[inline] - pub(super) fn new( - iter: MultipleVersionBaseIter< - 'a, - B::RangeAll<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>, - B, - >, - ) -> Self { - Self { - version: iter.version(), - iter, - } - } - - /// Returns the query version of the entries in the iterator. - #[inline] - pub const fn version(&self) -> u64 { - self.version - } -} - -impl<'a, R, Q, B> Iterator for RangeAll<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: MultipleVersionMemtable + 'a, - B::Key: Type + Ord, - B::Value: Type, - B::RangeAll<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>: - Iterator>, - for<'b> B::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> B::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - type Item = VersionedEntry<'a, B::VersionedItem<'a>>; - - #[inline] - fn next(&mut self) -> Option { - self - .iter - .next() - .map(|ent| VersionedEntry::with_version(ent, self.version)) - } -} - -impl<'a, R, Q, B> DoubleEndedIterator for RangeAll<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: MultipleVersionMemtable + 'a, - B::Key: Type + Ord, - B::Value: Type, - B::RangeAll<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>: - DoubleEndedIterator>, - for<'b> B::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> B::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ - #[inline] - fn next_back(&mut self) -> Option { - self - .iter - .next_back() - .map(|ent| VersionedEntry::with_version(ent, self.version)) - } -} - -impl<'a, R, Q, B> FusedIterator for RangeAll<'a, R, Q, B> -where - R: RangeBounds + 'a, - Q: ?Sized + Comparable<::Ref<'a>>, - B: MultipleVersionMemtable + 'a, - B::Key: Type + Ord, - B::Value: Type, - B::RangeAll<'a, Query<'a, B::Key, Q>, QueryRange<'a, B::Key, Q, R>>: - FusedIterator>, - for<'b> B::Item<'b>: VersionedMemtableEntry<'b>, - for<'b> B::VersionedItem<'b>: VersionedMemtableEntry<'b>, -{ -} diff --git a/src/wal/pointer.rs b/src/wal/pointer.rs deleted file mode 100644 index 2b792570..00000000 --- a/src/wal/pointer.rs +++ /dev/null @@ -1,249 +0,0 @@ -use core::{cmp, marker::PhantomData, mem, slice}; - -use dbutils::{ - buffer::VacantBuffer, - equivalent::Comparable, - types::{KeyRef, Type, TypeRef}, -}; - -use crate::types::EntryFlags; - -const PTR_SIZE: usize = mem::size_of::(); -const U32_SIZE: usize = mem::size_of::(); - -pub struct ValuePointer { - ptr: *const u8, - len: usize, - _m: PhantomData, -} - -unsafe impl Send for ValuePointer {} -unsafe impl Sync for ValuePointer {} - -impl core::fmt::Debug for ValuePointer { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.debug_struct("ValuePointer") - .field("ptr", &self.ptr) - .field("value", &self.as_slice()) - .finish() - } -} - -impl Clone for ValuePointer { - #[inline] - fn clone(&self) -> Self { - *self - } -} - -impl Copy for ValuePointer {} - -impl ValuePointer { - #[inline] - pub(crate) fn new(len: usize, ptr: *const u8) -> Self { - Self { - ptr, - len, - _m: PhantomData, - } - } - - #[inline] - pub(crate) fn as_slice<'a>(&self) -> &'a [u8] { - if self.len == 0 { - return &[]; - } - - // SAFETY: `ptr` is a valid pointer to `len` bytes. - unsafe { slice::from_raw_parts(self.ptr, self.len) } - } -} - -impl Type for ValuePointer -where - V: ?Sized, -{ - type Ref<'a> = Self; - - type Error = (); - - #[inline] - fn encoded_len(&self) -> usize { - const SIZE: usize = PTR_SIZE + U32_SIZE; - SIZE - } - - #[inline] - fn encode_to_buffer(&self, buf: &mut VacantBuffer<'_>) -> Result { - // Safe to cast to u32 here, because the key and value length are guaranteed to be less than or equal to u32::MAX. - let val_len = self.len as u32; - let ptr = self.ptr as usize; - - buf.set_len(self.encoded_len()); - buf[0..PTR_SIZE].copy_from_slice(&ptr.to_le_bytes()); - - buf[PTR_SIZE..PTR_SIZE + U32_SIZE].copy_from_slice(&val_len.to_le_bytes()); - - Ok(PTR_SIZE + U32_SIZE) - } -} - -impl<'a, V: ?Sized> TypeRef<'a> for ValuePointer { - unsafe fn from_slice(src: &'a [u8]) -> Self { - let ptr = usize_to_addr(usize::from_le_bytes((&src[..PTR_SIZE]).try_into().unwrap())); - let len = - u32::from_le_bytes((&src[PTR_SIZE..PTR_SIZE + U32_SIZE]).try_into().unwrap()) as usize; - - Self::new(len, ptr) - } -} - -#[doc(hidden)] -pub struct KeyPointer { - flag: EntryFlags, - ptr: *const u8, - len: usize, - _m: PhantomData, -} - -unsafe impl Send for KeyPointer {} -unsafe impl Sync for KeyPointer {} - -impl core::fmt::Debug for KeyPointer { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.debug_struct("KeyPointer") - .field("ptr", &self.ptr) - .field("flag", &self.flag) - .field("key", &self.as_slice()) - .finish() - } -} - -impl Clone for KeyPointer { - #[inline] - fn clone(&self) -> Self { - *self - } -} - -impl Copy for KeyPointer {} - -impl KeyPointer { - #[inline] - pub(crate) fn new(flag: EntryFlags, len: usize, ptr: *const u8) -> Self { - Self { - ptr, - flag, - len, - _m: PhantomData, - } - } - - #[inline] - pub(crate) fn as_slice<'a>(&self) -> &'a [u8] { - if self.len == 0 { - return &[]; - } - - // SAFETY: `ptr` is a valid pointer to `len` bytes. - unsafe { slice::from_raw_parts(self.ptr, self.len) } - } -} - -impl PartialEq for KeyPointer { - fn eq(&self, other: &Self) -> bool { - self.as_slice() == other.as_slice() - } -} - -impl Eq for KeyPointer {} - -impl<'a, K> PartialOrd for KeyPointer -where - K: Type + Ord + ?Sized, - K::Ref<'a>: KeyRef<'a, K>, -{ - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl<'a, K> Ord for KeyPointer -where - K: Type + Ord + ?Sized, - K::Ref<'a>: KeyRef<'a, K>, -{ - fn cmp(&self, other: &Self) -> cmp::Ordering { - // SAFETY: WALs guarantee that the self and other must be the same as the result returned by `::encode`. - unsafe { as KeyRef>::compare_binary(self.as_slice(), other.as_slice()) } - } -} - -impl Type for KeyPointer -where - K: ?Sized, -{ - type Ref<'a> = Self; - - type Error = (); - - #[inline] - fn encoded_len(&self) -> usize { - const SIZE: usize = PTR_SIZE + U32_SIZE + mem::size_of::(); - SIZE - } - - #[inline] - fn encode_to_buffer(&self, buf: &mut VacantBuffer<'_>) -> Result { - // Safe to cast to u32 here, because the key and value length are guaranteed to be less than or equal to u32::MAX. - let key_len = self.len as u32; - let ptr = self.ptr as usize; - - buf.set_len(self.encoded_len()); - buf[0..PTR_SIZE].copy_from_slice(&ptr.to_le_bytes()); - - let mut offset = PTR_SIZE; - buf[offset] = self.flag.bits(); - offset += 1; - buf[offset..offset + U32_SIZE].copy_from_slice(&key_len.to_le_bytes()); - - Ok(offset + U32_SIZE) - } -} - -impl<'a, K: ?Sized> TypeRef<'a> for KeyPointer { - unsafe fn from_slice(src: &'a [u8]) -> Self { - let ptr = usize_to_addr(usize::from_le_bytes((&src[..PTR_SIZE]).try_into().unwrap())); - let mut offset = PTR_SIZE; - let flag = EntryFlags::from_bits_retain(src[offset]); - offset += 1; - let key_len = - u32::from_le_bytes((&src[offset..offset + U32_SIZE]).try_into().unwrap()) as usize; - - Self::new(flag, key_len, ptr) - } -} - -impl<'a, K> KeyRef<'a, Self> for KeyPointer -where - K: Type + Ord + ?Sized, - K::Ref<'a>: KeyRef<'a, K>, -{ - #[inline] - fn compare(&self, a: &Q) -> cmp::Ordering - where - Q: ?Sized + Ord + Comparable, - { - Comparable::compare(a, self).reverse() - } - - #[inline] - unsafe fn compare_binary(a: &[u8], b: &[u8]) -> cmp::Ordering { - as KeyRef>::compare_binary(a, b) - } -} - -#[inline] -const fn usize_to_addr(addr: usize) -> *const T { - addr as *const T -} diff --git a/src/wal/query.rs b/src/wal/query.rs deleted file mode 100644 index 51a68d8a..00000000 --- a/src/wal/query.rs +++ /dev/null @@ -1,107 +0,0 @@ -use core::{ - cmp, - marker::PhantomData, - ops::{Bound, RangeBounds}, -}; - -use dbutils::{ - equivalent::{Comparable, Equivalent}, - types::{KeyRef, Type, TypeRef}, -}; -use ref_cast::RefCast; - -use super::KeyPointer; - -#[derive(ref_cast::RefCast)] -#[repr(transparent)] -pub struct Slice<'a, K: ?Sized> { - _k: PhantomData<&'a K>, - data: [u8], -} - -impl<'a, K> Equivalent> for Slice<'a, K> -where - K: Type + ?Sized, - K::Ref<'a>: KeyRef<'a, K>, -{ - fn equivalent(&self, key: &KeyPointer) -> bool { - self.data.eq(key.as_slice()) - } -} - -impl<'a, K> Comparable> for Slice<'a, K> -where - K: Type + ?Sized, - K::Ref<'a>: KeyRef<'a, K>, -{ - fn compare(&self, p: &KeyPointer) -> cmp::Ordering { - unsafe { as KeyRef>::compare_binary(&self.data, p.as_slice()) } - } -} - -pub struct QueryRange<'a, K: ?Sized, Q: ?Sized, R> -where - R: RangeBounds, -{ - r: R, - _q: PhantomData<(&'a Q, &'a K)>, -} - -impl QueryRange<'_, K, Q, R> -where - R: RangeBounds, -{ - #[inline] - pub(super) const fn new(r: R) -> Self { - Self { r, _q: PhantomData } - } -} - -impl<'a, K: ?Sized, Q: ?Sized, R> RangeBounds> for QueryRange<'a, K, Q, R> -where - R: RangeBounds, -{ - #[inline] - fn start_bound(&self) -> Bound<&Query<'a, K, Q>> { - self.r.start_bound().map(RefCast::ref_cast) - } - - fn end_bound(&self) -> Bound<&Query<'a, K, Q>> { - self.r.end_bound().map(RefCast::ref_cast) - } -} - -#[derive(ref_cast::RefCast)] -#[repr(transparent)] -pub struct Query<'a, K, Q> -where - K: ?Sized, - Q: ?Sized, -{ - _k: PhantomData<&'a K>, - key: Q, -} - -impl<'a, K, Q> Equivalent> for Query<'a, K, Q> -where - K: Type + ?Sized, - Q: ?Sized + Equivalent>, -{ - #[inline] - fn equivalent(&self, p: &KeyPointer) -> bool { - let kr = unsafe { as TypeRef<'_>>::from_slice(p.as_slice()) }; - Equivalent::equivalent(&self.key, &kr) - } -} - -impl<'a, K, Q> Comparable> for Query<'a, K, Q> -where - K: Type + ?Sized, - Q: ?Sized + Comparable>, -{ - #[inline] - fn compare(&self, p: &KeyPointer) -> cmp::Ordering { - let kr = unsafe { as TypeRef<'_>>::from_slice(p.as_slice()) }; - Comparable::compare(&self.key, &kr) - } -}