Skip to content

Commit

Permalink
feat: begin SDR-PoRep Halo2 compatibility
Browse files Browse the repository at this point in the history
  • Loading branch information
DrPeterVanNostrand committed Feb 8, 2022
1 parent eedea08 commit 38b4eaa
Show file tree
Hide file tree
Showing 15 changed files with 200 additions and 41 deletions.
2 changes: 2 additions & 0 deletions filecoin-hashers/src/blake2s.rs
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,8 @@ impl From<Blake2sDomain> for Fr {
}

impl Domain for Blake2sDomain {
type Field = Fr;

fn into_bytes(&self) -> Vec<u8> {
self.0.to_vec()
}
Expand Down
18 changes: 15 additions & 3 deletions filecoin-hashers/src/halo/poseidon.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use bellperson::{
};
use blstrs::Scalar as Fr;
use ff::PrimeField;
use generic_array::typenum::{Unsigned, U2, U4, U8};
use generic_array::typenum::{Unsigned, U11, U2, U4, U8};
use lazy_static::lazy_static;
use merkletree::{
hash::{Algorithm, Hashable},
Expand Down Expand Up @@ -45,6 +45,16 @@ lazy_static! {
tm.insert::<FieldArity<Fq, PoseidonMDArity>>(PoseidonConstants::new());
tm
};

// Used during column hashing.
pub static ref POSEIDON_CONSTANTS_2_PALLAS: PoseidonConstants<Fp, U2> =
PoseidonConstants::new();
pub static ref POSEIDON_CONSTANTS_11_PALLAS: PoseidonConstants<Fp, U11> =
PoseidonConstants::new();
pub static ref POSEIDON_CONSTANTS_2_VESTA: PoseidonConstants<Fq, U2> =
PoseidonConstants::new();
pub static ref POSEIDON_CONSTANTS_11_VESTA: PoseidonConstants<Fq, U11> =
PoseidonConstants::new();
}

pub struct FieldArity<F, A>(PhantomData<(F, A)>)
Expand Down Expand Up @@ -99,15 +109,15 @@ impl<F: FieldExt> Default for PoseidonDomain<F> {
// Pasta curves.
impl<F: FieldExt> From<Fr> for PoseidonDomain<F> {
fn from(_fr: Fr) -> Self {
panic!("cannot convert BSL12-381 scalar to halo::PoseidonDomain")
panic!("cannot convert BLS12-381 scalar to halo::PoseidonDomain")
}
}

// Disallow converting between fields.
#[allow(clippy::from_over_into)]
impl<F: FieldExt> Into<Fr> for PoseidonDomain<F> {
fn into(self) -> Fr {
panic!("cannot convert halo::PoseidonDomain into BSL12-381 scalar")
panic!("cannot convert halo::PoseidonDomain into BLS12-381 scalar")
}
}

Expand Down Expand Up @@ -196,6 +206,8 @@ impl<F: FieldExt> std::hash::Hash for PoseidonDomain<F> {
}

impl<F: FieldExt> Domain for PoseidonDomain<F> {
type Field = F;

fn into_bytes(&self) -> Vec<u8> {
self.0.as_ref().to_vec()
}
Expand Down
6 changes: 4 additions & 2 deletions filecoin-hashers/src/halo/sha256.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,15 +43,15 @@ impl<F: FieldExt> Into<groth::Sha256Domain> for Sha256Domain<F> {
// Pasta curves.
impl<F: FieldExt> From<Fr> for Sha256Domain<F> {
fn from(_fr: Fr) -> Self {
panic!("cannot convert BSL12-381 scalar to halo::Sha256Domain")
panic!("cannot convert BLS12-381 scalar to halo::Sha256Domain")
}
}

// Disallow converting between fields.
#[allow(clippy::from_over_into)]
impl<F: FieldExt> Into<Fr> for Sha256Domain<F> {
fn into(self) -> Fr {
panic!("cannot convert halo::Sha256Domain into BSL12-381 scalar")
panic!("cannot convert halo::Sha256Domain into BLS12-381 scalar")
}
}

Expand Down Expand Up @@ -142,6 +142,8 @@ impl<F: FieldExt> Element for Sha256Domain<F> {
}

impl<F: FieldExt> Domain for Sha256Domain<F> {
type Field = F;

fn into_bytes(&self) -> Vec<u8> {
self.inner.into_bytes()
}
Expand Down
2 changes: 2 additions & 0 deletions filecoin-hashers/src/poseidon.rs
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,8 @@ impl AsRef<[u8]> for PoseidonDomain {
}

impl Domain for PoseidonDomain {
type Field = Fr;

fn into_bytes(&self) -> Vec<u8> {
self.0.to_vec()
}
Expand Down
2 changes: 2 additions & 0 deletions filecoin-hashers/src/sha256.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,8 @@ impl From<Sha256Domain> for Fr {
}

impl Domain for Sha256Domain {
type Field = Fr;

fn into_bytes(&self) -> Vec<u8> {
self.0.to_vec()
}
Expand Down
22 changes: 20 additions & 2 deletions filecoin-hashers/src/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use bellperson::{
ConstraintSystem, SynthesisError,
};
use blstrs::Scalar as Fr;
use ff::PrimeField;
use ff::{Field, PrimeField};
use merkletree::{
hash::{Algorithm as LightAlgorithm, Hashable as LightHashable},
merkle::Element,
Expand All @@ -35,13 +35,31 @@ pub trait Domain:
+ Element
+ StdHash
{
// TODO (jake): constrain this to `PrimeField<Repr = [u8; 32]>` and add `Domain:
// From<Self::Field> + Into<Self::Field>` and maybe remove the `Fr` dependence.
type Field: PrimeField;

#[allow(clippy::wrong_self_convention)]
fn into_bytes(&self) -> Vec<u8>;
fn try_from_bytes(raw: &[u8]) -> anyhow::Result<Self>;
/// Write itself into the given slice, LittleEndian bytes.
fn write_bytes(&self, _: &mut [u8]) -> anyhow::Result<()>;

fn random<R: RngCore>(rng: &mut R) -> Self;
fn into_field(self) -> Self::Field {
let mut repr = <Self::Field as PrimeField>::Repr::default();
self.write_bytes(repr.as_mut())
.expect("domain's field is not 32 bytes");
Self::Field::from_repr_vartime(repr).expect("from_repr failure")
}

fn from_field(f: Self::Field) -> Self {
Self::try_from_bytes(f.to_repr().as_ref()).expect("try_from_bytes failure")
}

fn random<R: RngCore>(rng: &mut R) -> Self {
// Generate a field element then convert it to ensure that we stay in the field.
Self::from_field(Self::Field::random(rng))
}
}

pub trait HashFunction<T: Domain>: Clone + Debug + Send + Sync + LightAlgorithm<T> {
Expand Down
2 changes: 2 additions & 0 deletions storage-proofs-porep/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@ yastl = "0.1.2"
fil_logger = "0.1"
pairing = "0.21"
blstrs = "0.4.0"
pasta_curves = "0.3.0"
typemap = "0.3.3"

[target."cfg(target_arch = \"aarch64\")".dependencies]
sha2 = { version = "0.9.3", features = ["compress", "asm"] }
Expand Down
15 changes: 14 additions & 1 deletion storage-proofs-porep/benches/parents.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use filecoin_hashers::{blake2s::Blake2sHasher, sha256::Sha256Hasher, Hasher};
use filecoin_hashers::{blake2s::Blake2sHasher, halo, sha256::Sha256Hasher, Hasher};
#[cfg(feature = "cpu-profile")]
use gperftools::profiler::PROFILER;
use pasta_curves::Fp;
use storage_proofs_core::{
api_version::ApiVersion,
drgraph::{Graph, BASE_DEGREE},
Expand Down Expand Up @@ -61,6 +62,18 @@ fn parents_loop_benchmark(c: &mut Criterion) {
let mut parents = vec![0; graph.degree()];
b.iter(|| black_box(parents_loop::<Sha256Hasher, _>(&graph, &mut parents)))
});
// Only bench one of the pasta fields for now (both Pasta fields should have the same
// benchmark performance).
group.bench_function(format!("Poseidon-pallas-{}", size), |b| {
let graph = pregenerate_graph::<halo::PoseidonHasher<Fp>>(size, ApiVersion::V1_1_0);
let mut parents = vec![0; graph.degree()];
b.iter(|| {
black_box(parents_loop::<halo::PoseidonHasher<Fp>, _>(
&graph,
&mut parents,
))
})
});
}

group.finish();
Expand Down
16 changes: 8 additions & 8 deletions storage-proofs-porep/src/encode.rs
Original file line number Diff line number Diff line change
@@ -1,22 +1,22 @@
use blstrs::Scalar as Fr;
use ff::PrimeField;
use filecoin_hashers::Domain;

pub fn encode<T: Domain>(key: T, value: T) -> T {
let value: Fr = value.into();
let mut result: Fr = key.into();
let value = value.into_field();
let mut result = key.into_field();

encode_fr(&mut result, value);
result.into()
T::from_field(result)
}

pub fn encode_fr(key: &mut Fr, value: Fr) {
pub fn encode_fr<F: PrimeField>(key: &mut F, value: F) {
*key += value;
}

pub fn decode<T: Domain>(key: T, value: T) -> T {
let mut result: Fr = value.into();
let key: Fr = key.into();
let mut result = value.into_field();
let key = key.into_field();

result -= key;
result.into()
T::from_field(result)
}
7 changes: 3 additions & 4 deletions storage-proofs-porep/src/stacked/vanilla/column.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
use std::marker::PhantomData;

use blstrs::Scalar as Fr;
use filecoin_hashers::Hasher;
use filecoin_hashers::{Domain, Hasher};
use serde::{Deserialize, Serialize};
use storage_proofs_core::{
error::Result,
Expand Down Expand Up @@ -39,13 +38,13 @@ impl<H: Hasher> Column<H> {
}

/// Calculate the column hashes `C_i = H(E_i, O_i)` for the passed in column.
pub fn hash(&self) -> Fr {
pub fn hash(&self) -> <H::Domain as Domain>::Field {
hash_single_column(
&self
.rows
.iter()
.copied()
.map(Into::into)
.map(|domain| domain.into_field())
.collect::<Vec<_>>(),
)
}
Expand Down
9 changes: 4 additions & 5 deletions storage-proofs-porep/src/stacked/vanilla/column_proof.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use blstrs::Scalar as Fr;
use filecoin_hashers::Hasher;
use filecoin_hashers::{Domain, Hasher};
use log::trace;
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use storage_proofs_core::{error::Result, merkle::MerkleProofTrait};
Expand Down Expand Up @@ -40,7 +39,7 @@ impl<Proof: MerkleProofTrait> ColumnProof<Proof> {
self.column().get_node_at_layer(layer)
}

pub fn column_hash(&self) -> Fr {
pub fn column_hash(&self) -> <<Proof::Hasher as Hasher>::Domain as Domain>::Field {
self.column.hash()
}

Expand All @@ -49,10 +48,10 @@ impl<Proof: MerkleProofTrait> ColumnProof<Proof> {
challenge: u32,
expected_root: &<Proof::Hasher as Hasher>::Domain,
) -> bool {
let c_i = self.column_hash();
let c_i = <Proof::Hasher as Hasher>::Domain::from_field(self.column_hash());

check_eq!(&self.inclusion_proof.root(), expected_root);
check!(self.inclusion_proof.validate_data(c_i.into()));
check!(self.inclusion_proof.validate_data(c_i));
check!(self.inclusion_proof.validate(challenge as usize));

true
Expand Down
18 changes: 14 additions & 4 deletions storage-proofs-porep/src/stacked/vanilla/encoding_proof.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use std::marker::PhantomData;

use blstrs::Scalar as Fr;
use filecoin_hashers::Hasher;
use ff::PrimeField;
use filecoin_hashers::{Domain, Hasher};
use fr32::bytes_into_fr_repr_safe;
use log::trace;
use serde::{Deserialize, Serialize};
Expand Down Expand Up @@ -50,6 +50,8 @@ impl<H: Hasher> EncodingProof<H> {
bytes_into_fr_repr_safe(hasher.finalize().as_ref()).into()
}

// TODO (jake): bound `StackedDrg`'s `H` and `G` to the same `Domain::Field`
// pub fn verify<G: Hasher>(...) -> bool where G::Domain::Field = H::Domain::Field { ... }
pub fn verify<G: Hasher>(
&self,
replica_id: &H::Domain,
Expand All @@ -58,8 +60,16 @@ impl<H: Hasher> EncodingProof<H> {
) -> bool {
let key = self.create_key(replica_id);

let fr: Fr = (*decoded_node).into();
let encoded_node = encode(key, fr.into());
// TODO (jake): once we bound `H::Domain::Field == G::Domain::Field` we can simplify this
// conversion.
let decoded_node = {
let mut repr = <<H::Domain as Domain>::Field as PrimeField>::Repr::default();
repr.as_mut().copy_from_slice(decoded_node.as_ref());
let f = <<H::Domain as Domain>::Field as PrimeField>::from_repr_vartime(repr)
.expect("from_repr failure");
<H::Domain as Domain>::from_field(f)
};
let encoded_node = encode(key, decoded_node);

check_eq!(exp_encoded_node, &encoded_node);

Expand Down
57 changes: 50 additions & 7 deletions storage-proofs-porep/src/stacked/vanilla/hash.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,60 @@
use std::marker::PhantomData;

use blstrs::Scalar as Fr;
use filecoin_hashers::{POSEIDON_CONSTANTS_11, POSEIDON_CONSTANTS_2};
use neptune::poseidon::Poseidon;
use ff::PrimeField;
use filecoin_hashers::{
halo::poseidon::{
POSEIDON_CONSTANTS_11_PALLAS, POSEIDON_CONSTANTS_11_VESTA, POSEIDON_CONSTANTS_2_PALLAS,
POSEIDON_CONSTANTS_2_VESTA,
},
POSEIDON_CONSTANTS_11, POSEIDON_CONSTANTS_2,
};
use generic_array::typenum::{U11, U2};
use lazy_static::lazy_static;
use neptune::poseidon::{Arity, Poseidon, PoseidonConstants};
use pasta_curves::{Fp, Fq};
use typemap::ShareMap;

lazy_static! {
pub static ref POSEIDON_CONSTANTS: ShareMap = {
let mut tm = ShareMap::custom();
tm.insert::<FieldArity<Fr, U2>>(&*POSEIDON_CONSTANTS_2);
tm.insert::<FieldArity<Fr, U11>>(&*POSEIDON_CONSTANTS_11);
tm.insert::<FieldArity<Fp, U2>>(&*POSEIDON_CONSTANTS_2_PALLAS);
tm.insert::<FieldArity<Fp, U11>>(&*POSEIDON_CONSTANTS_11_PALLAS);
tm.insert::<FieldArity<Fq, U2>>(&*POSEIDON_CONSTANTS_2_VESTA);
tm.insert::<FieldArity<Fq, U11>>(&*POSEIDON_CONSTANTS_11_VESTA);
tm
};
}

pub struct FieldArity<F, A>(PhantomData<(F, A)>)
where
F: PrimeField,
A: Arity<F>;

impl<F, A> typemap::Key for FieldArity<F, A>
where
F: PrimeField,
A: Arity<F>,
{
type Value = &'static PoseidonConstants<F, A>;
}

/// Hash all elements in the given column.
pub fn hash_single_column(column: &[Fr]) -> Fr {
pub fn hash_single_column<F: PrimeField>(column: &[F]) -> F {
match column.len() {
2 => {
let mut hasher = Poseidon::new_with_preimage(column, &*POSEIDON_CONSTANTS_2);
hasher.hash()
let consts = &POSEIDON_CONSTANTS
.get::<FieldArity<F, U2>>()
.expect("Poseidon constants not found for field and arity-2");
Poseidon::new_with_preimage(column, consts).hash()
}
11 => {
let mut hasher = Poseidon::new_with_preimage(column, &*POSEIDON_CONSTANTS_11);
hasher.hash()
let consts = &POSEIDON_CONSTANTS
.get::<FieldArity<F, U11>>()
.expect("Poseidon constants not found for field and arity-11");
Poseidon::new_with_preimage(column, consts).hash()
}
_ => panic!("unsupported column size: {}", column.len()),
}
Expand Down
5 changes: 3 additions & 2 deletions storage-proofs-porep/src/stacked/vanilla/proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -806,11 +806,12 @@ impl<'a, Tree: 'static + MerkleTreeTrait, G: 'static + Hasher> StackedDrg<'a, Tr
let el: <Tree::Hasher as Hasher>::Domain = store
.read_at((i * nodes_count) + j + chunk * chunk_size)
.expect("store read_at failure");
el.into()
el.into_field()
})
.collect();

*hash = hash_single_column(&data).into();
let digest = hash_single_column(&data);
*hash = <Tree::Hasher as Hasher>::Domain::from_field(digest);
}
});
}
Expand Down
Loading

0 comments on commit 38b4eaa

Please sign in to comment.