refactor: improve visibilities (#65)

* refactor: improve visibilities

* feat: update curve25519-dalek version
This commit is contained in:
ashWhiteHat 2024-04-12 08:15:02 +09:00 коммит произвёл GitHub
Родитель e0f964c3f9
Коммит f25d18a6d0
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: B5690EEEBB952194
4 изменённых файлов: 75 добавлений и 91 удалений

Просмотреть файл

@ -11,9 +11,8 @@ license-file = "LICENSE"
keywords = ["zkSNARKs", "cryptography", "proofs"]
[dependencies]
curve25519-dalek = { version = "3.2.0", features = [
curve25519-dalek = { version = "4.1.1", features = [
"serde",
"u64_backend",
"alloc",
], default-features = false }
merlin = { version = "3.0.0", default-features = false }
@ -58,9 +57,8 @@ harness = false
required-features = ["std"]
[features]
default = ["std", "simd_backend"]
default = ["std"]
std = [
"curve25519-dalek/std",
"digest/std",
"merlin/std",
"rand/std",
@ -72,6 +70,5 @@ std = [
"itertools/use_std",
"flate2/rust_backend",
]
simd_backend = ["curve25519-dalek/simd_backend"]
multicore = ["rayon"]
profile = ["colored"]

Просмотреть файл

@ -1,7 +1,6 @@
use super::group::{GroupElement, VartimeMultiscalarMul, GROUP_BASEPOINT_COMPRESSED};
use super::scalar::Scalar;
use digest::XofReader;
use digest::{ExtendableOutput, Input};
use digest::{ExtendableOutput, Input, XofReader};
use sha3::Shake256;
#[derive(Debug)]

Просмотреть файл

@ -110,21 +110,24 @@ impl R1CSInstance {
let num_poly_vars_x = num_cons.log_2();
let num_poly_vars_y = (2 * num_vars).log_2();
let mat_A = (0..A.len())
.map(|i| SparseMatEntry::new(A[i].0, A[i].1, A[i].2))
let mat_A = A
.iter()
.map(|(row, col, val)| SparseMatEntry::new(*row, *col, *val))
.collect::<Vec<SparseMatEntry>>();
let mat_B = (0..B.len())
.map(|i| SparseMatEntry::new(B[i].0, B[i].1, B[i].2))
let mat_B = B
.iter()
.map(|(row, col, val)| SparseMatEntry::new(*row, *col, *val))
.collect::<Vec<SparseMatEntry>>();
let mat_C = (0..C.len())
.map(|i| SparseMatEntry::new(C[i].0, C[i].1, C[i].2))
let mat_C = C
.iter()
.map(|(row, col, val)| SparseMatEntry::new(*row, *col, *val))
.collect::<Vec<SparseMatEntry>>();
let poly_A = SparseMatPolynomial::new(num_poly_vars_x, num_poly_vars_y, mat_A);
let poly_B = SparseMatPolynomial::new(num_poly_vars_x, num_poly_vars_y, mat_B);
let poly_C = SparseMatPolynomial::new(num_poly_vars_x, num_poly_vars_y, mat_C);
R1CSInstance {
Self {
num_cons,
num_vars,
num_inputs,
@ -257,11 +260,7 @@ impl R1CSInstance {
assert_eq!(Az.len(), self.num_cons);
assert_eq!(Bz.len(), self.num_cons);
assert_eq!(Cz.len(), self.num_cons);
let res: usize = (0..self.num_cons)
.map(|i| usize::from(Az[i] * Bz[i] != Cz[i]))
.sum();
res == 0
(0..self.num_cons).all(|i| Az[i] * Bz[i] == Cz[i])
}
pub fn multiply_vec(

Просмотреть файл

@ -51,18 +51,14 @@ impl Derefs {
pub fn new(row_ops_val: Vec<DensePolynomial>, col_ops_val: Vec<DensePolynomial>) -> Self {
assert_eq!(row_ops_val.len(), col_ops_val.len());
let derefs = {
// combine all polynomials into a single polynomial (used below to produce a single commitment)
let comb = DensePolynomial::merge(row_ops_val.iter().chain(col_ops_val.iter()));
// combine all polynomials into a single polynomial (used below to produce a single commitment)
let comb = DensePolynomial::merge(row_ops_val.iter().chain(col_ops_val.iter()));
Derefs {
row_ops_val,
col_ops_val,
comb,
}
};
derefs
Derefs {
row_ops_val,
col_ops_val,
comb,
}
}
pub fn commit(&self, gens: &PolyCommitmentGens) -> DerefsCommitment {
@ -343,7 +339,7 @@ impl AppendToTranscript for SparseMatPolyCommitment {
impl SparseMatPolynomial {
pub fn new(num_vars_x: usize, num_vars_y: usize, M: Vec<SparseMatEntry>) -> Self {
SparseMatPolynomial {
Self {
num_vars_x,
num_vars_y,
M,
@ -377,8 +373,9 @@ impl SparseMatPolynomial {
assert_eq!(sparse_polys[i].num_vars_y, sparse_polys[0].num_vars_y);
}
let N = (0..sparse_polys.len())
.map(|i| sparse_polys[i].get_num_nz_entries())
let N = sparse_polys
.iter()
.map(|sparse_poly| sparse_poly.get_num_nz_entries())
.max()
.unwrap();
@ -430,13 +427,10 @@ impl SparseMatPolynomial {
assert_eq!(self.num_vars_x.pow2(), eval_table_rx.len());
assert_eq!(self.num_vars_y.pow2(), eval_table_ry.len());
(0..self.M.len())
.map(|i| {
let row = self.M[i].row;
let col = self.M[i].col;
let val = &self.M[i].val;
eval_table_rx[row] * eval_table_ry[col] * val
})
self
.M
.iter()
.map(|SparseMatEntry { row, col, val }| eval_table_rx[*row] * eval_table_ry[*col] * val)
.sum()
}
@ -448,25 +442,22 @@ impl SparseMatPolynomial {
let eval_table_rx = EqPolynomial::new(rx.to_vec()).evals();
let eval_table_ry = EqPolynomial::new(ry.to_vec()).evals();
(0..polys.len())
.map(|i| polys[i].evaluate_with_tables(&eval_table_rx, &eval_table_ry))
polys
.iter()
.map(|poly| poly.evaluate_with_tables(&eval_table_rx, &eval_table_ry))
.collect::<Vec<Scalar>>()
}
pub fn multiply_vec(&self, num_rows: usize, num_cols: usize, z: &[Scalar]) -> Vec<Scalar> {
assert_eq!(z.len(), num_cols);
(0..self.M.len())
.map(|i| {
let row = self.M[i].row;
let col = self.M[i].col;
let val = &self.M[i].val;
(row, val * z[col])
})
.fold(vec![Scalar::zero(); num_rows], |mut Mz, (r, v)| {
Mz[r] += v;
self.M.iter().fold(
vec![Scalar::zero(); num_rows],
|mut Mz, SparseMatEntry { row, col, val }| {
Mz[*row] += val * z[*col];
Mz
})
},
)
}
pub fn compute_eval_table_sparse(
@ -477,13 +468,13 @@ impl SparseMatPolynomial {
) -> Vec<Scalar> {
assert_eq!(rx.len(), num_rows);
let mut M_evals: Vec<Scalar> = vec![Scalar::zero(); num_cols];
for i in 0..self.M.len() {
let entry = &self.M[i];
M_evals[entry.col] += rx[entry.row] * entry.val;
}
M_evals
self.M.iter().fold(
vec![Scalar::zero(); num_cols],
|mut M_evals, SparseMatEntry { row, col, val }| {
M_evals[*col] += rx[*row] * val;
M_evals
},
)
}
pub fn multi_commit(
@ -706,18 +697,18 @@ impl HashLayerProof {
let (rand_mem, rand_ops) = rand;
// decommit ops-addr at rand_ops
let mut eval_ops_addr_vec: Vec<Scalar> = Vec::new();
for i in 0..addr_timestamps.ops_addr.len() {
let eval_ops_addr = addr_timestamps.ops_addr[i].evaluate(rand_ops);
eval_ops_addr_vec.push(eval_ops_addr);
}
let eval_ops_addr_vec = addr_timestamps
.ops_addr
.iter()
.map(|addr| addr.evaluate(rand_ops))
.collect();
// decommit read_ts at rand_ops
let mut eval_read_ts_vec: Vec<Scalar> = Vec::new();
for i in 0..addr_timestamps.read_ts.len() {
let eval_read_ts = addr_timestamps.read_ts[i].evaluate(rand_ops);
eval_read_ts_vec.push(eval_read_ts);
}
let eval_read_ts_vec = addr_timestamps
.read_ts
.iter()
.map(|addr| addr.evaluate(rand_ops))
.collect();
// decommit audit-ts at rand_mem
let eval_audit_ts = addr_timestamps.audit_ts.evaluate(rand_mem);
@ -738,11 +729,15 @@ impl HashLayerProof {
let (rand_mem, rand_ops) = rand;
// decommit derefs at rand_ops
let eval_row_ops_val = (0..derefs.row_ops_val.len())
.map(|i| derefs.row_ops_val[i].evaluate(rand_ops))
let eval_row_ops_val = derefs
.row_ops_val
.iter()
.map(|row| row.evaluate(rand_ops))
.collect::<Vec<Scalar>>();
let eval_col_ops_val = (0..derefs.col_ops_val.len())
.map(|i| derefs.col_ops_val[i].evaluate(rand_ops))
let eval_col_ops_val = derefs
.col_ops_val
.iter()
.map(|col| col.evaluate(rand_ops))
.collect::<Vec<Scalar>>();
let proof_derefs = DerefsEvalProof::prove(
derefs,
@ -1581,15 +1576,9 @@ impl SparsePolynomial {
fn compute_chi(a: &[bool], r: &[Scalar]) -> Scalar {
assert_eq!(a.len(), r.len());
let mut chi_i = Scalar::one();
for j in 0..r.len() {
if a[j] {
chi_i *= r[j];
} else {
chi_i *= Scalar::one() - r[j];
}
}
chi_i
a.iter().zip(r.iter()).fold(Scalar::one(), |sum, (a, r)| {
sum * if *a { *r } else { Scalar::one() - r }
})
}
// Takes O(n log n). TODO: do this in O(n) where n is the number of entries in Z
@ -1620,15 +1609,15 @@ mod tests {
let num_vars_x: usize = num_rows.log_2();
let num_vars_y: usize = num_cols.log_2();
let mut M: Vec<SparseMatEntry> = Vec::new();
for _i in 0..num_nz_entries {
M.push(SparseMatEntry::new(
(csprng.next_u64() % (num_rows as u64)) as usize,
(csprng.next_u64() % (num_cols as u64)) as usize,
Scalar::random(&mut csprng),
));
}
let M = (0..num_nz_entries)
.map(|_i| {
SparseMatEntry::new(
(csprng.next_u64() % (num_rows as u64)) as usize,
(csprng.next_u64() % (num_cols as u64)) as usize,
Scalar::random(&mut csprng),
)
})
.collect();
let poly_M = SparseMatPolynomial::new(num_vars_x, num_vars_y, M);
let gens = SparseMatPolyCommitmentGens::new(