Skip to content

Commit

Permalink
Merge pull request #2972 from o1-labs/martin/marc/query-saffron
Browse files Browse the repository at this point in the history
[saffron] Property based test for query
  • Loading branch information
martyall authored Jan 29, 2025
2 parents 5e4b290 + 76965e6 commit e3253d3
Show file tree
Hide file tree
Showing 3 changed files with 105 additions and 116 deletions.
3 changes: 1 addition & 2 deletions saffron/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -38,9 +38,8 @@ time = { version = "0.3", features = ["macros"] }
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = [ "ansi", "env-filter", "fmt", "time" ] }

ark-std.workspace = true

[dev-dependencies]
ark-std.workspace = true
ctor = "0.2"
proptest.workspace = true
once_cell.workspace = true
79 changes: 3 additions & 76 deletions saffron/src/blob.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,86 +92,13 @@ impl<G: CommitmentCurve> FieldBlob<G> {
}
}

#[cfg(test)]
mod blob_test_utils {
use proptest::prelude::*;

#[derive(Debug)]
pub struct BlobData(pub Vec<u8>);

#[derive(Clone, Debug)]
pub enum DataSize {
Small,
Medium,
Large,
}

impl DataSize {
const KB: usize = 1_000;
const MB: usize = 1_000_000;

fn size_range_bytes(&self) -> (usize, usize) {
match self {
// Small: 1KB - 1MB
Self::Small => (Self::KB, Self::MB),
// Medium: 1MB - 10MB
Self::Medium => (Self::MB, 10 * Self::MB),
// Large: 10MB - 100MB
Self::Large => (10 * Self::MB, 100 * Self::MB),
}
}
}

impl Arbitrary for DataSize {
type Parameters = ();
type Strategy = BoxedStrategy<Self>;

fn arbitrary_with(_: ()) -> Self::Strategy {
prop_oneof![
6 => Just(DataSize::Small), // 60% chance
3 => Just(DataSize::Medium),
1 => Just(DataSize::Large)
]
.boxed()
}
}

impl Default for DataSize {
fn default() -> Self {
Self::Small
}
}

impl Arbitrary for BlobData {
type Parameters = DataSize;
type Strategy = BoxedStrategy<Self>;

fn arbitrary() -> Self::Strategy {
DataSize::arbitrary()
.prop_flat_map(|size| {
let (min, max) = size.size_range_bytes();
prop::collection::vec(any::<u8>(), min..max)
})
.prop_map(BlobData)
.boxed()
}

fn arbitrary_with(size: Self::Parameters) -> Self::Strategy {
let (min, max) = size.size_range_bytes();
prop::collection::vec(any::<u8>(), min..max)
.prop_map(BlobData)
.boxed()
}
}
}

#[cfg(test)]
mod tests {
use crate::{commitment::commit_to_field_elems, env};

use super::*;
use crate::utils::test_utils::*;
use ark_poly::Radix2EvaluationDomain;
use blob_test_utils::*;
use mina_curves::pasta::{Fp, Vesta};
use once_cell::sync::Lazy;
use proptest::prelude::*;
Expand All @@ -191,7 +118,7 @@ mod tests {
proptest! {
#![proptest_config(ProptestConfig::with_cases(20))]
#[test]
fn test_round_trip_blob_encoding(BlobData(xs) in BlobData::arbitrary())
fn test_round_trip_blob_encoding(UserData(xs) in UserData::arbitrary())
{ let blob = FieldBlob::<Vesta>::encode(&*SRS, *DOMAIN, &xs);
let bytes = rmp_serde::to_vec(&blob).unwrap();
let a = rmp_serde::from_slice(&bytes).unwrap();
Expand All @@ -206,7 +133,7 @@ mod tests {
proptest! {
#![proptest_config(ProptestConfig::with_cases(10))]
#[test]
fn test_user_and_storage_provider_commitments_equal(BlobData(xs) in BlobData::arbitrary())
fn test_user_and_storage_provider_commitments_equal(UserData(xs) in UserData::arbitrary())
{ let elems = encode_for_domain(&*DOMAIN, &xs);
let user_commitments = commit_to_field_elems(&*SRS, *DOMAIN, elems);
let blob = FieldBlob::<Vesta>::encode(&*SRS, *DOMAIN, &xs);
Expand Down
139 changes: 101 additions & 38 deletions saffron/src/utils.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use ark_ff::{BigInteger, PrimeField};
use ark_poly::EvaluationDomain;
use ark_std::rand::Rng;

// For injectivity, you can only use this on inputs of length at most
// 'F::MODULUS_BIT_SIZE / 8', e.g. for Vesta this is 31.
Expand Down Expand Up @@ -57,17 +56,6 @@ pub struct QueryBytes {
pub len: usize,
}

/// For testing purposes
impl QueryBytes {
pub fn random(size: usize) -> Self {
let mut rng = ark_std::rand::thread_rng();
let start = rng.gen_range(0..size);
QueryBytes {
start,
len: rng.gen_range(0..(size - start)),
}
}
}
#[derive(Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Debug)]
/// We store the data in a vector of vector of field element
/// The inner vector represent polynomials
Expand Down Expand Up @@ -156,6 +144,86 @@ impl Into<QueryField> for QueryBytes {
}
}
}

#[cfg(test)]
pub mod test_utils {
use proptest::prelude::*;

#[derive(Debug, Clone)]
pub struct UserData(pub Vec<u8>);

impl UserData {
pub fn len(&self) -> usize {
self.0.len()
}
}

#[derive(Clone, Debug)]
pub enum DataSize {
Small,
Medium,
Large,
}

impl DataSize {
const KB: usize = 1_000;
const MB: usize = 1_000_000;

fn size_range_bytes(&self) -> (usize, usize) {
match self {
// Small: 1KB - 1MB
Self::Small => (Self::KB, Self::MB),
// Medium: 1MB - 10MB
Self::Medium => (Self::MB, 10 * Self::MB),
// Large: 10MB - 100MB
Self::Large => (10 * Self::MB, 100 * Self::MB),
}
}
}

impl Arbitrary for DataSize {
type Parameters = ();
type Strategy = BoxedStrategy<Self>;

fn arbitrary_with(_: ()) -> Self::Strategy {
prop_oneof![
6 => Just(DataSize::Small), // 60% chance
3 => Just(DataSize::Medium),
1 => Just(DataSize::Large)
]
.boxed()
}
}

impl Default for DataSize {
fn default() -> Self {
Self::Small
}
}

impl Arbitrary for UserData {
type Parameters = DataSize;
type Strategy = BoxedStrategy<Self>;

fn arbitrary() -> Self::Strategy {
DataSize::arbitrary()
.prop_flat_map(|size| {
let (min, max) = size.size_range_bytes();
prop::collection::vec(any::<u8>(), min..max)
})
.prop_map(UserData)
.boxed()
}

fn arbitrary_with(size: Self::Parameters) -> Self::Strategy {
let (min, max) = size.size_range_bytes();
prop::collection::vec(any::<u8>(), min..max)
.prop_map(UserData)
.boxed()
}
}
}

#[cfg(test)]
mod tests {
use super::*;
Expand All @@ -165,6 +233,7 @@ mod tests {
use o1_utils::FieldHelpers;
use once_cell::sync::Lazy;
use proptest::prelude::*;
use test_utils::UserData;

fn decode<Fp: PrimeField>(x: Fp) -> Vec<u8> {
let mut buffer = vec![0u8; Fp::size_in_bytes()];
Expand Down Expand Up @@ -215,7 +284,7 @@ mod tests {
proptest! {
#![proptest_config(ProptestConfig::with_cases(20))]
#[test]
fn test_round_trip_encoding_to_field_elems(xs in prop::collection::vec(any::<u8>(), 0..=2 * Fp::size_in_bytes() * DOMAIN.size())
fn test_round_trip_encoding_to_field_elems(UserData(xs) in UserData::arbitrary()
)
{ let chunked = encode_for_domain(&*DOMAIN, &xs);
let elems = chunked
Expand All @@ -230,33 +299,27 @@ mod tests {
}
}

// check that appying a field query = applying a byte query
proptest! {
#![proptest_config(ProptestConfig::with_cases(20))]
#[test]
fn test_round_trip_query(xs in prop::collection::vec(any::<u8>(), 0..10 * Fp::size_in_bytes() *DOMAIN.size() )
) {
proptest! ( |query in prop::strategy::Just(QueryBytes::random(xs.len()))|
let chunked = encode_for_domain(&*DOMAIN, &xs);
let expected_answer = &xs[query.start..(query.start+query.len)];
let field_query :QueryField = query.clone().into();
let got_answer = field_query.apply(chunked);
prop_assert_eq!(expected_answer,got_answer);
)

}
}


}
proptest! {
#![proptest_config(ProptestConfig::with_cases(20))]
#[test]
fn test_dependent_args(base in 0..100) {
let multiplied = (1..10).prop_map(|factor| base * factor);
prop_assume!(base > 0);
proptest!(|(multiplied in multiplied)| {
prop_assert!(base * multiplied != 0);
});
fn test_query(
(UserData(xs), queries) in UserData::arbitrary()
.prop_flat_map(|xs| {
let n = xs.len();
let query_strategy = (0..(n - 1)).prop_flat_map(move |start| {
((start + 1)..n).prop_map(move |end| QueryBytes { start, len: end - start})
});
let queries_strategy = prop::collection::vec(query_strategy, 10);
(Just(xs), queries_strategy)
})
) {
let chunked = encode_for_domain(&*DOMAIN, &xs);
for query in queries {
let expected = &xs[query.start..(query.start+query.len)];
let field_query: QueryField = query.clone().into();
let got_answer = field_query.apply(chunked.clone()); // Note: might need clone depending on your types
prop_assert_eq!(expected, got_answer);
}
}
}
}

0 comments on commit e3253d3

Please sign in to comment.