-
Notifications
You must be signed in to change notification settings - Fork 2
Implement BDN signature aggregation scheme #34
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
f2f103f
aa0d946
f2d5f47
382a712
0abe7e4
2f41ed4
3aeb0dc
20fed3c
188c652
b461096
f717d52
2f20c0d
83c5d57
0d65adf
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,4 +1,4 @@ | ||
| 26 | ||
| 23 | ||
| API/M | ||
| APIs | ||
| benchmark/GD | ||
|
|
@@ -41,4 +41,5 @@ G1 | |
| G2 | ||
| JSON | ||
| CBOR | ||
| TODO | ||
| TODO | ||
| + | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Do we have unit test to prove correctness of this implementation?
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. The coverage was indeed limited, now added more here: moshababo@b461096 |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -2,16 +2,24 @@ | |
| // SPDX-License-Identifier: Apache-2.0, MIT | ||
|
|
||
| //! BDN (Boneh-Drijvers-Neven) signature aggregation scheme, for preventing rogue public-key attacks. | ||
| //! Those attacks could allow an attacker to forge a public-key and then make a verifiable | ||
| //! signature for an aggregation of signatures. It fixes the situation by adding coefficients to the aggregate. | ||
| //! | ||
| //! NOTE: currently uses standard BLS aggregation without coefficient weighting, hence returns incorrect values compared to go-f3. | ||
| //! See the papers: | ||
| //! - <https://eprint.iacr.org/2018/483.pdf> | ||
| //! - <https://crypto.stanford.edu/~dabo/pubs/papers/BLSmultisig.html> | ||
| //! | ||
| use crate::verifier::BLSError; | ||
| use bls_signatures::{PublicKey, Signature}; | ||
| use bls12_381::{G1Projective, G2Affine, G2Projective}; | ||
| use blake2::Blake2xs; | ||
| use blake2::digest::{ExtendableOutput, Update, XofReader}; | ||
| use bls_signatures::{PublicKey, Serialize, Signature}; | ||
| use bls12_381::{G1Projective, G2Projective, Scalar}; | ||
| use rayon::prelude::*; | ||
|
|
||
| /// BDN aggregation context for managing signature and public key aggregation | ||
| pub struct BDNAggregation { | ||
| pub_keys: Vec<PublicKey>, | ||
| pub(crate) coefficients: Vec<Scalar>, | ||
| pub(crate) terms: Vec<PublicKey>, | ||
| } | ||
|
|
||
| impl BDNAggregation { | ||
|
|
@@ -20,43 +28,114 @@ impl BDNAggregation { | |
| return Err(BLSError::EmptyPublicKeys); | ||
| } | ||
|
|
||
| Ok(Self { pub_keys }) | ||
| let coefficients = Self::calc_coefficients(&pub_keys)?; | ||
| let terms = Self::calc_terms(&pub_keys, &coefficients); | ||
|
|
||
| Ok(Self { | ||
| coefficients, | ||
| terms, | ||
| }) | ||
| } | ||
|
|
||
| /// Aggregates signatures using standard BLS aggregation | ||
| /// TODO: Implement BDN aggregation scheme: https://github.com/ChainSafe/rust-f3/issues/29 | ||
| pub fn aggregate_sigs(&self, sigs: Vec<Signature>) -> Result<Signature, BLSError> { | ||
| if sigs.len() != self.pub_keys.len() { | ||
| /// Aggregates signatures using BDN aggregation with coefficients. | ||
| /// Computes: `sum((coef_i + 1) * sig_i)` for signatures at the given indices | ||
| pub fn aggregate_sigs( | ||
| &self, | ||
| indices: &[u64], | ||
| sigs: &[Signature], | ||
| ) -> Result<Signature, BLSError> { | ||
| if sigs.len() != indices.len() { | ||
| return Err(BLSError::LengthMismatch { | ||
| pub_keys: self.pub_keys.len(), | ||
| pub_keys: indices.len(), | ||
| sigs: sigs.len(), | ||
| }); | ||
| } | ||
|
|
||
| // Standard BLS aggregation | ||
| for &idx in indices { | ||
| if idx as usize >= self.coefficients.len() { | ||
| return Err(BLSError::SignerIndexOutOfRange(idx as usize)); | ||
| } | ||
| } | ||
|
|
||
| let mut agg_point = G2Projective::identity(); | ||
| for sig in sigs { | ||
| let sig: G2Affine = sig.into(); | ||
| agg_point += sig; | ||
| for (sig, &idx) in sigs.iter().zip(indices.iter()) { | ||
| let coef = self.coefficients[idx as usize]; | ||
| let sig_point: G2Projective = (*sig).into(); | ||
| let sig_c = sig_point * coef; | ||
| let sig_c = sig_c + sig_point; | ||
|
|
||
| agg_point += sig_c; | ||
| } | ||
|
|
||
| // Convert back to Signature | ||
| let agg_sig: Signature = agg_point.into(); | ||
| Ok(agg_sig) | ||
| } | ||
|
|
||
| /// Aggregates public keys using standard BLS aggregation | ||
| /// TODO: Implement BDN aggregation scheme: https://github.com/ChainSafe/rust-f3/issues/29 | ||
| pub fn aggregate_pub_keys(&self) -> Result<PublicKey, BLSError> { | ||
| // Standard BLS aggregation | ||
| /// Aggregates public keys indices using BDN aggregation with coefficients. | ||
| /// Computes: `sum((coef_i + 1) * pub_key_i)` | ||
| pub fn aggregate_pub_keys(&self, indices: &[u64]) -> Result<PublicKey, BLSError> { | ||
| for &idx in indices { | ||
| if idx as usize >= self.terms.len() { | ||
| return Err(BLSError::SignerIndexOutOfRange(idx as usize)); | ||
| } | ||
| } | ||
|
|
||
| // Sum of pre-computed terms (which are already `(coef_i + 1) * pub_key_i`) | ||
| let mut agg_point = G1Projective::identity(); | ||
| for pub_key in &self.pub_keys { | ||
| let pub_key_point: G1Projective = (*pub_key).into(); | ||
| agg_point += pub_key_point; | ||
| for &idx in indices { | ||
| let term_point: G1Projective = self.terms[idx as usize].into(); | ||
| agg_point += term_point; | ||
| } | ||
|
|
||
| // Convert back to PublicKey | ||
| let agg_pub_key: PublicKey = agg_point.into(); | ||
| Ok(agg_pub_key) | ||
| } | ||
|
coderabbitai[bot] marked this conversation as resolved.
|
||
|
|
||
| pub fn calc_coefficients(pub_keys: &[PublicKey]) -> Result<Vec<Scalar>, BLSError> { | ||
| let mut hasher = Blake2xs::new(0xFFFF); | ||
|
|
||
| // Hash all public keys | ||
| for pub_key in pub_keys { | ||
| let bytes = pub_key.as_bytes(); | ||
| hasher.update(&bytes); | ||
| } | ||
|
|
||
| // Read 16 bytes per public key | ||
| let mut reader = hasher.finalize_xof(); | ||
| let mut output = vec![0u8; pub_keys.len() * 16]; | ||
| reader.read(&mut output); | ||
|
|
||
| // Convert every consecutive 16 bytes chunk to a scalar | ||
| let mut coefficients = Vec::with_capacity(pub_keys.len()); | ||
| for i in 0..pub_keys.len() { | ||
| let chunk = &output[i * 16..(i + 1) * 16]; | ||
|
|
||
| // Convert 16 bytes to 32 bytes, for scalar (pad with zeros) | ||
| let mut bytes_32 = [0u8; 32]; | ||
| bytes_32[..16].copy_from_slice(chunk); | ||
|
|
||
| // BLS12-381 scalars expects little-endian byte representation | ||
| let scalar = Scalar::from_bytes(&bytes_32) | ||
| .into_option() | ||
| .ok_or(BLSError::InvalidScalar)?; | ||
| coefficients.push(scalar); | ||
| } | ||
|
|
||
| Ok(coefficients) | ||
| } | ||
|
Comment on lines
+96
to
+127
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 🛠️ Refactor suggestion | 🟠 Major 🧩 Analysis chainCoefficient derivation: tighten XOF usage, domain separation, and scalar mapping.
Apply: - pub fn calc_coefficients(pub_keys: &[PublicKey]) -> Result<Vec<Scalar>, BLSError> {
- let mut hasher = Blake2xs::new(0xFFFF);
+ pub fn calc_coefficients(pub_keys: &[PublicKey]) -> Result<Vec<Scalar>, BLSError> {
+ // 64 bytes per coefficient; uniform reduction via from_bytes_wide
+ let out_len = pub_keys.len() * 64;
+ let mut hasher = Blake2xs::new(out_len);
- // Hash all public keys
+ // Domain separation + all public keys (compressed)
+ hasher.update(b"F3-BDN-COEFF-V1");
for pub_key in pub_keys {
let bytes = pub_key.as_bytes();
hasher.update(&bytes);
}
- // Read 16 bytes per public key
let mut reader = hasher.finalize_xof();
- let mut output = vec![0u8; pub_keys.len() * 16];
+ let mut output = vec![0u8; out_len];
reader.read(&mut output);
- // Convert every consecutive 16 bytes chunk to a scalar
let mut coefficients = Vec::with_capacity(pub_keys.len());
- for i in 0..pub_keys.len() {
- let chunk = &output[i * 16..(i + 1) * 16];
-
- // Convert 16 bytes to 32 bytes, for scalar (pad with zeros)
- let mut bytes_32 = [0u8; 32];
- bytes_32[..16].copy_from_slice(chunk);
-
- // BLS12-381 scalars expects little-endian byte representation
- let scalar = Scalar::from_bytes(&bytes_32);
- if scalar.is_some().into() {
- coefficients.push(scalar.unwrap());
- } else {
- return Err(BLSError::InvalidScalar);
- }
+ for i in 0..pub_keys.len() {
+ let chunk = &output[i * 64..(i + 1) * 64];
+ let mut wide = [0u8; 64];
+ wide.copy_from_slice(chunk);
+ let scalar = Scalar::from_bytes_wide(&wide); // uniform mod r
+ coefficients.push(scalar);
}
Ok(coefficients)
}Verification needed: does go-f3 include message in the coefficient hash (L vs L||msg)? Aligning is critical for cross‑impl compatibility. 🌐 Web query: 💡 Result: Short answer: coefficients are derived from H(L) only (i.e., from the public-key list), not H(L || msg). Evidence:
References:
If you want I can open the exact kyber bdn.go source and paste the exact file+line reference. Tighten XOF usage and scalar mapping; maintain BDN compatibility
🤖 Prompt for AI Agents |
||
|
|
||
| pub fn calc_terms(pub_keys: &[PublicKey], coefficients: &[Scalar]) -> Vec<PublicKey> { | ||
| pub_keys | ||
| .par_iter() | ||
| .enumerate() | ||
| .map(|(i, pub_key)| { | ||
| let pub_key_point: G1Projective = (*pub_key).into(); | ||
| let pub_c = pub_key_point * coefficients[i]; | ||
| let term = pub_c + pub_key_point; | ||
| term.into() | ||
| }) | ||
| .collect() | ||
| } | ||
| } | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Remove the stray
+character.The
+character does not belong in a spell-check dictionary file. Dictionary files typically contain one term per line without special characters.Apply this diff to remove it:
-+📝 Committable suggestion
🤖 Prompt for AI Agents