Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Proof of storage #2968

Closed
wants to merge 5 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 15 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 6 additions & 2 deletions saffron/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,15 +29,19 @@ mina-curves.workspace = true
mina-poseidon.workspace = true
o1-utils.workspace = true
poly-commitment.workspace = true
rand.workspace = true
rayon.workspace = true
rmp-serde.workspace = true
serde_json.workspace = true
serde.workspace = true
serde_with.workspace = true
sha3.workspace = true
time = { version = "0.3", features = ["macros"] }
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = [ "ansi", "env-filter", "fmt", "time" ] }


[dev-dependencies]
ark-std.workspace = true
ctor = "0.2"
proptest.workspace = true
once_cell.workspace = true
once_cell.workspace = true
131 changes: 61 additions & 70 deletions saffron/src/blob.rs
Original file line number Diff line number Diff line change
@@ -1,80 +1,55 @@
use crate::utils::{decode_into, encode_for_domain};
use ark_ff::{Field, PrimeField};
use ark_ff::PrimeField;
use ark_poly::{univariate::DensePolynomial, EvaluationDomain, Evaluations};
use ark_serialize::{
CanonicalDeserialize, CanonicalSerialize, Compress, Read, SerializationError, Valid, Validate,
Write,
};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use o1_utils::FieldHelpers;
use poly_commitment::{commitment::CommitmentCurve, ipa::SRS, PolyComm, SRS as _};
use rayon::prelude::*;
use serde::{Deserialize, Serialize};
use serde_with::serde_as;
use tracing::{debug, instrument};

// A FieldBlob<F> represents the encoding of a Vec<u8> as a list of polynomials over F,
// where F is a prime field. The polyonomials are represented in the monomial basis.
#[derive(Clone, Debug, PartialEq)]
pub struct FieldBlob<F: Field> {
#[serde_as]
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
#[serde(bound = "G::ScalarField : CanonicalDeserialize + CanonicalSerialize")]
pub struct FieldBlob<G: CommitmentCurve> {
pub n_bytes: usize,
pub domain_size: usize,
pub data: Vec<DensePolynomial<F>>,
pub commitments: Vec<PolyComm<G>>,
#[serde_as(as = "Vec<o1_utils::serialization::SerdeAs>")]
pub data: Vec<DensePolynomial<G::ScalarField>>,
}

impl<F: CanonicalSerialize + Field> CanonicalSerialize for FieldBlob<F> {
fn serialize_with_mode<W: Write>(
&self,
mut writer: W,
mode: Compress,
) -> Result<(), SerializationError> {
self.n_bytes.serialize_with_mode(&mut writer, mode)?;
self.domain_size.serialize_with_mode(&mut writer, mode)?;
self.data.serialize_with_mode(&mut writer, mode)?;
Ok(())
}

fn serialized_size(&self, mode: Compress) -> usize {
self.n_bytes.serialized_size(mode)
+ self.domain_size.serialized_size(mode)
+ self.data.serialized_size(mode)
}
}

impl<F: Valid + Field> Valid for FieldBlob<F> {
fn check(&self) -> Result<(), SerializationError> {
self.n_bytes.check()?;
self.domain_size.check()?;
self.data.check()?;
Ok(())
}
#[instrument(skip_all, level = "debug")]
fn commit_to_blob_data<G: CommitmentCurve>(
srs: &SRS<G>,
data: &[DensePolynomial<G::ScalarField>],
) -> Vec<PolyComm<G>> {
let num_chunks = 1;
data.par_iter()
.map(|p| srs.commit_non_hiding(p, num_chunks))
.collect()
}

impl<F: CanonicalDeserialize + Field> CanonicalDeserialize for FieldBlob<F> {
fn deserialize_with_mode<R: Read>(
mut reader: R,
compress: Compress,
validate: Validate,
) -> Result<Self, SerializationError> {
let n_bytes = usize::deserialize_with_mode(&mut reader, compress, validate)?;
let domain_size = usize::deserialize_with_mode(&mut reader, compress, validate)?;
let data =
Vec::<DensePolynomial<F>>::deserialize_with_mode(&mut reader, compress, validate)?;
Ok(Self {
n_bytes,
domain_size,
data,
})
}
}

impl<F: PrimeField> FieldBlob<F> {
#[instrument(skip_all)]
pub fn encode<D: EvaluationDomain<F>>(domain: D, bytes: &[u8]) -> FieldBlob<F> {
impl<G: CommitmentCurve> FieldBlob<G> {
#[instrument(skip_all, level = "debug")]
pub fn encode<D: EvaluationDomain<G::ScalarField>>(
srs: &SRS<G>,
domain: D,
bytes: &[u8],
) -> FieldBlob<G> {
let field_elements = encode_for_domain(&domain, bytes);
let domain_size = domain.size();

let data: Vec<DensePolynomial<F>> = field_elements
let data: Vec<DensePolynomial<G::ScalarField>> = field_elements
.par_iter()
.map(|chunk| Evaluations::from_vec_and_domain(chunk.to_vec(), domain).interpolate())
.collect();

let commitments = commit_to_blob_data(srs, &data);

debug!(
"Encoded {} bytes into {} polynomials",
bytes.len(),
Expand All @@ -84,12 +59,13 @@ impl<F: PrimeField> FieldBlob<F> {
FieldBlob {
n_bytes: bytes.len(),
domain_size,
commitments,
data,
}
}

#[instrument(skip_all)]
pub fn decode<D: EvaluationDomain<F>>(domain: D, blob: FieldBlob<F>) -> Vec<u8> {
#[instrument(skip_all, level = "debug")]
pub fn decode<D: EvaluationDomain<G::ScalarField>>(domain: D, blob: FieldBlob<G>) -> Vec<u8> {
// TODO: find an Error type and use Result
if domain.size() != blob.domain_size {
panic!(
Expand All @@ -98,8 +74,8 @@ impl<F: PrimeField> FieldBlob<F> {
domain.size()
);
}
let n = (F::MODULUS_BIT_SIZE / 8) as usize;
let m = F::size_in_bytes();
let n = (G::ScalarField::MODULUS_BIT_SIZE / 8) as usize;
let m = G::ScalarField::size_in_bytes();
let mut bytes = Vec::with_capacity(blob.n_bytes);
let mut buffer = vec![0u8; m];

Expand All @@ -118,31 +94,46 @@ impl<F: PrimeField> FieldBlob<F> {

#[cfg(test)]
mod tests {
use crate::commitment::commit_to_field_elems;

use super::*;
use ark_poly::Radix2EvaluationDomain;
use mina_curves::pasta::Fp;
use mina_curves::pasta::{Fp, Vesta};
use once_cell::sync::Lazy;
use proptest::prelude::*;

static DOMAIN: Lazy<Radix2EvaluationDomain<Fp>> = Lazy::new(|| {
const SRS_SIZE: usize = 1 << 16;
Radix2EvaluationDomain::new(SRS_SIZE).unwrap()
});
const SRS_SIZE: usize = 1 << 16;

static SRS: Lazy<SRS<Vesta>> = Lazy::new(|| SRS::create(SRS_SIZE));

static DOMAIN: Lazy<Radix2EvaluationDomain<Fp>> =
Lazy::new(|| Radix2EvaluationDomain::new(SRS_SIZE).unwrap());

// check that Vec<u8> -> FieldBlob<Fp> -> Vec<u8> is the identity function
proptest! {
#![proptest_config(ProptestConfig::with_cases(20))]
#[test]
fn test_round_trip_blob_encoding( xs in prop::collection::vec(any::<u8>(), 0..=2 * Fp::size_in_bytes() * DOMAIN.size()))
{ let blob = FieldBlob::<Fp>::encode(*DOMAIN, &xs);
let mut buf = Vec::new();
blob.serialize_compressed(&mut buf).unwrap();
let a = FieldBlob::<Fp>::deserialize_compressed(&buf[..]).unwrap();
{ let blob = FieldBlob::<Vesta>::encode(&*SRS, *DOMAIN, &xs);
let bytes = rmp_serde::to_vec(&blob).unwrap();
let a = rmp_serde::from_slice(&bytes).unwrap();
// check that ark-serialize is behaving as expected
prop_assert_eq!(blob.clone(), a);
let ys = FieldBlob::<Fp>::decode(*DOMAIN, blob);
let ys = FieldBlob::<Vesta>::decode(*DOMAIN, blob);
// check that we get the byte blob back again
prop_assert_eq!(xs,ys);
}
}

proptest! {
#![proptest_config(ProptestConfig::with_cases(10))]
#[test]
fn test_user_and_storage_provider_commitments_equal(xs in prop::collection::vec(any::<u8>(), 0..=2 * Fp::size_in_bytes() * DOMAIN.size())
)
{ let elems = encode_for_domain(&*DOMAIN, &xs);
let user_commitments = commit_to_field_elems(&*SRS, *DOMAIN, elems);
let blob = FieldBlob::<Vesta>::encode(&*SRS, *DOMAIN, &xs);
prop_assert_eq!(user_commitments, blob.commitments);
}
}
}
69 changes: 4 additions & 65 deletions saffron/src/commitment.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
use crate::blob::FieldBlob;
use ark_ec::AffineRepr;
use ark_ff::One;
use ark_poly::{Evaluations, Radix2EvaluationDomain as D};
Expand All @@ -11,7 +10,7 @@ use poly_commitment::{
use rayon::prelude::*;
use tracing::instrument;

#[instrument(skip_all)]
#[instrument(skip_all, level = "debug")]
pub fn commit_to_field_elems<G: CommitmentCurve>(
srs: &SRS<G>,
domain: D<G::ScalarField>,
Expand All @@ -26,19 +25,7 @@ pub fn commit_to_field_elems<G: CommitmentCurve>(
.collect()
}

#[instrument(skip_all)]
pub fn commit_to_blob<G: CommitmentCurve>(
srs: &SRS<G>,
blob: &FieldBlob<G::ScalarField>,
) -> Vec<PolyComm<G>> {
let num_chunks = 1;
blob.data
.par_iter()
.map(|p| srs.commit_non_hiding(p, num_chunks))
.collect()
}

#[instrument(skip_all)]
#[instrument(skip_all, level = "debug")]
pub fn fold_commitments<
G: AffineRepr,
EFqSponge: Clone + FqSponge<G::BaseField, G, G::ScalarField>,
Expand All @@ -49,62 +36,14 @@ pub fn fold_commitments<
for commitment in commitments {
absorb_commitment(sponge, commitment)
}
let challenge = sponge.challenge();
let alpha = sponge.challenge();
let powers: Vec<G::ScalarField> = commitments
.iter()
.scan(G::ScalarField::one(), |acc, _| {
let res = *acc;
*acc *= challenge;
*acc *= alpha;
Some(res)
})
.collect::<Vec<_>>();
PolyComm::multi_scalar_mul(&commitments.iter().collect::<Vec<_>>(), &powers)
}

#[cfg(test)]
mod tests {
use crate::utils::encode_for_domain;

use super::*;
use ark_poly::{EvaluationDomain, Radix2EvaluationDomain};
use mina_curves::pasta::{Fp, Vesta, VestaParameters};
use mina_poseidon::{constants::PlonkSpongeConstantsKimchi, sponge::DefaultFqSponge, FqSponge};
use o1_utils::FieldHelpers;
use once_cell::sync::Lazy;
use proptest::prelude::*;

const SRS_SIZE: usize = 1 << 16;

static SRS: Lazy<SRS<Vesta>> = Lazy::new(|| SRS::create(SRS_SIZE));

static DOMAIN: Lazy<Radix2EvaluationDomain<Fp>> =
Lazy::new(|| Radix2EvaluationDomain::new(SRS_SIZE).unwrap());

proptest! {
#![proptest_config(ProptestConfig::with_cases(10))]
#[test]
fn test_user_and_storage_provider_commitments_equal(xs in prop::collection::vec(any::<u8>(), 0..=2 * Fp::size_in_bytes() * DOMAIN.size())
)
{ let elems = encode_for_domain(&*DOMAIN, &xs);
let user_commitments = commit_to_field_elems(&*SRS, *DOMAIN, elems);
let blob = FieldBlob::<Fp>::encode(*DOMAIN, &xs);
let storeage_provider_commitments = commit_to_blob(&*SRS, &blob);
prop_assert_eq!(&user_commitments, &storeage_provider_commitments);
let user_commitment =
{ let mut fq_sponge = DefaultFqSponge::<VestaParameters, PlonkSpongeConstantsKimchi>::new(
mina_poseidon::pasta::fq_kimchi::static_params(),
);
fold_commitments(&mut fq_sponge, &user_commitments)

};
let storage_provider_commitment =
{ let mut fq_sponge = DefaultFqSponge::<VestaParameters, PlonkSpongeConstantsKimchi>::new(
mina_poseidon::pasta::fq_kimchi::static_params(),
);
fold_commitments(&mut fq_sponge, &storeage_provider_commitments)

};
prop_assert_eq!(&user_commitment, &storage_provider_commitment);
}
}
}
Loading
Loading