Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add interface to expose the mnemonic checksum #62

Merged
merged 1 commit into from
Jan 29, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
74 changes: 74 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -606,6 +606,35 @@ impl Mnemonic {
let (arr, len) = self.to_entropy_array();
arr[0..len].to_vec()
}

/// Return checksum value for the Mnemonic.
///
/// The checksum value is the numerical value of the first `self.word_count() / 3` bits of the
/// [SHA256](https://en.wikipedia.org/wiki/SHA-2) digest of the Mnemonic's entropy, and is
/// encoded by the last word of the mnemonic sentence.
///
/// This is useful for validating the integrity of a mnemonic: For a valid mnemonic `m`, the
/// following assertion should hold:
///
/// ```rust
/// # use bip39::Mnemonic;
/// # use bitcoin_hashes::{Hash, sha256, hex::FromHex};
/// # let ent = Vec::from_hex("98FE3D0FF6E955A484B0A1D0C9CE10F6").unwrap();
/// # let m = Mnemonic::from_entropy(&ent).unwrap();
/// let checksum_width = m.word_count() / 3;
/// let shift_width = 8 - checksum_width;
/// assert_eq!(sha256::Hash::hash(&m.to_entropy())[0] >> shift_width, m.checksum());
/// ```
///
/// Note that since this library constrains initialization of `Mnemonic` instances through an
/// API that guarantees validity, all `Mnemonic` instances should be valid and the above
/// condition should hold.
pub fn checksum(&self) -> u8 {
let word_count = self.word_count();
let last_word = self.words[word_count - 1];
let mask = 0xFF >> (8 - word_count / 3);
last_word as u8 & mask
}
}

impl fmt::Display for Mnemonic {
Expand Down Expand Up @@ -879,6 +908,51 @@ mod tests {
}
}

#[test]
fn checksum() {
let vectors = [
"00000000000000000000000000000000",
"7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f",
"80808080808080808080808080808080",
"ffffffffffffffffffffffffffffffff",
"000000000000000000000000000000000000000000000000",
"7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f",
"808080808080808080808080808080808080808080808080",
"ffffffffffffffffffffffffffffffffffffffffffffffff",
"0000000000000000000000000000000000000000000000000000000000000000",
"7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f",
"8080808080808080808080808080808080808080808080808080808080808080",
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
"9e885d952ad362caeb4efe34a8e91bd2",
"6610b25967cdcca9d59875f5cb50b0ea75433311869e930b",
"68a79eaca2324873eacc50cb9c6eca8cc68ea5d936f98787c60c7ebc74e6ce7c",
"c0ba5a8e914111210f2bd131f3d5e08d",
"6d9be1ee6ebd27a258115aad99b7317b9c8d28b6d76431c3",
"9f6a2878b2520799a44ef18bc7df394e7061a224d2c33cd015b157d746869863",
"23db8160a31d3e0dca3688ed941adbf3",
"8197a4a47f0425faeaa69deebc05ca29c0a5b5cc76ceacc0",
"066dca1a2bb7e8a1db2832148ce9933eea0f3ac9548d793112d9a95c9407efad",
"f30f8c1da665478f49b001d94c5fc452",
"c10ec20dc3cd9f652c7fac2f1230f7a3c828389a14392f05",
"f585c11aec520db57dd353c69554b21a89b20fb0650966fa0a9d6f74fd989d8f",
"ed3b83f0d7913a19667a1cfd7298cd57",
"70639a4e81b151277b345476d169a3743ff3c141",
"ba2520298b92063a7a0ee1d453ba92513af81d4f86e1d336",
"9447d2cf44349cd88a58f5b4ff6f83b9a2d54c42f033e12b8e4d00cc",
"38711e550dc6557df8082b2a87f7860ebbe47ea5867a7068f5f0f5b85db68be8",
];

for entropy_hex in &vectors {
let ent = Vec::from_hex(entropy_hex).unwrap();
let m = Mnemonic::from_entropy(&ent).unwrap();
let word_count = m.word_count();
let cs = m.checksum();
let digest = sha256::Hash::hash(&ent);
dbg!(digest);
assert_eq!(digest[0] >> (8 - word_count / 3), cs);
}
}

#[test]
fn test_invalid_engish() {
// correct phrase:
Expand Down
Loading