Refactor the context canonicalization

This commit is contained in:
Rodolphe Bréard 2024-02-17 16:29:54 +01:00
parent d679546331
commit 65796a2c91
6 changed files with 63 additions and 98 deletions

View file

@ -19,7 +19,6 @@ i-understand-and-accept-the-risks = []
[dependencies]
base64ct = { version = "1.6.0", default-features = false, features = ["std"] }
blake3 = { version = "1.5.0", default-features = false, features = ["std", "traits-preview"] }
digest = { version = "0.10.7", default-features = false, features = ["std"] }
blake3 = { version = "1.5.0", default-features = false }
getrandom = { version = "0.2.12", default-features = false }
thiserror = { version = "1.0.57", default-features = false }

View file

@ -1,9 +1,9 @@
use crate::key::derive_key;
use crate::kdf::derive_key;
use crate::{Error, InputKeyMaterialList};
pub fn encrypt(
ikml: &InputKeyMaterialList,
key_context: &[impl AsRef<[u8]>],
key_context: &[&str],
data: impl AsRef<[u8]>,
data_context: &[impl AsRef<[u8]>],
) -> Result<String, Error> {
@ -14,7 +14,7 @@ pub fn encrypt(
pub fn decrypt(
ikml: &InputKeyMaterialList,
key_context: &[impl AsRef<[u8]>],
key_context: &[&str],
data: impl AsRef<[u8]>,
data_context: &[impl AsRef<[u8]>],
) -> Result<Vec<u8>, Error> {

View file

@ -1,9 +1,57 @@
pub(crate) fn blake3_derive(context: &[u8], ikm: &[u8]) -> Vec<u8> {
// TODO: remove this hack as soon as `blake3::derive_key` accepts bytes
use std::fmt::Write;
let context: String = context.iter().fold(String::new(), |mut output, b| {
let _ = write!(output, "{b:02x}");
output
});
blake3::derive_key(&context, ikm).to_vec()
use crate::ikm::InputKeyMaterial;
use base64ct::{Base64UrlUnpadded, Encoding};
pub(crate) type KdfFunction = dyn Fn(&str, &[u8]) -> Vec<u8>;
const CANONICALIZATION_BUFFER_SIZE: usize = 1024;
const CANONICALIZATION_SEPARATOR: &str = ":";
fn canonicalize(key_context: &[&str]) -> String {
match key_context.len() {
0 => String::new(),
1 => key_context[0].to_string(),
_ => {
let mut ret = String::with_capacity(CANONICALIZATION_BUFFER_SIZE);
for (i, ctx_elem) in key_context.iter().enumerate() {
if i != 0 {
ret += CANONICALIZATION_SEPARATOR;
}
ret += &Base64UrlUnpadded::encode_string(ctx_elem.as_bytes());
}
ret
}
}
}
pub(crate) fn derive_key(ikm: &InputKeyMaterial, key_context: &[&str]) -> Vec<u8> {
let key_context = canonicalize(key_context);
let kdf = ikm.scheme.get_kdf();
kdf(&key_context, &ikm.content)
}
pub(crate) fn blake3_derive(context: &str, ikm: &[u8]) -> Vec<u8> {
blake3::derive_key(context, ikm).to_vec()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn canonicalize_empty() {
let canon = canonicalize(&[]);
assert_eq!(canon, String::new());
}
#[test]
fn canonicalize_one() {
let canon = canonicalize(&["test"]);
assert_eq!(&canon, "test");
}
#[test]
fn canonicalize_many() {
let canon = canonicalize(&["test", "bis", "ter", ""]);
assert_eq!(&canon, "dGVzdA:Ymlz:dGVy:");
}
}

View file

@ -1,69 +0,0 @@
use crate::ikm::InputKeyMaterial;
use crate::scheme::Scheme;
use digest::Digest;
fn canonicalize(scheme: Scheme, key_context: &[impl AsRef<[u8]>]) -> Vec<u8> {
match key_context.len() {
0 => Vec::new(),
1 => key_context[0].as_ref().into(),
n => {
let mut ret = Vec::with_capacity(n * scheme.key_size());
for ctx_elem in key_context {
let mut elem_hasher = scheme.canonicalization_hasher();
elem_hasher.update(ctx_elem.as_ref());
ret.extend_from_slice(&elem_hasher.finalize());
}
ret
}
}
}
pub(crate) fn derive_key(ikm: &InputKeyMaterial, key_context: &[impl AsRef<[u8]>]) -> Vec<u8> {
let key_context = canonicalize(ikm.scheme, key_context);
let kdf = ikm.scheme.get_kdf();
kdf(&key_context, &ikm.content)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::scheme::Scheme;
const EMPTY_CTX: &[[u8; 0]] = &[];
#[test]
fn canonicalize_empty() {
let canon = canonicalize(Scheme::XChaCha20Poly1305WithBlake3, EMPTY_CTX);
assert_eq!(canon, vec![]);
}
#[test]
fn canonicalize_one() {
let s = "test";
let canon = canonicalize(Scheme::XChaCha20Poly1305WithBlake3, &[s]);
assert_eq!(canon, s.as_bytes());
}
#[test]
fn blake3_canonicalize_many() {
let canon = canonicalize(
Scheme::XChaCha20Poly1305WithBlake3,
&["test", "bis", "ter", ""],
);
assert_eq!(
canon,
[
0x48, 0x78, 0xca, 0x04, 0x25, 0xc7, 0x39, 0xfa, 0x42, 0x7f, 0x7e, 0xda, 0x20, 0xfe,
0x84, 0x5f, 0x6b, 0x2e, 0x46, 0xba, 0x5f, 0xe2, 0xa1, 0x4d, 0xf5, 0xb1, 0xe3, 0x2f,
0x50, 0x60, 0x32, 0x15, 0x62, 0x01, 0x8f, 0x96, 0x98, 0x6e, 0x0e, 0x48, 0x8e, 0x07,
0x4d, 0xa6, 0xb7, 0x28, 0xbc, 0x24, 0x2b, 0xf0, 0xcc, 0x6e, 0x5a, 0x5d, 0x4e, 0x78,
0x02, 0x14, 0x0a, 0x52, 0xbf, 0xe1, 0x58, 0x86, 0x91, 0x9e, 0x10, 0xc3, 0xe4, 0xbf,
0x61, 0x2f, 0x36, 0x39, 0xbe, 0x86, 0xb9, 0x34, 0x6b, 0xc3, 0x4a, 0x8c, 0x89, 0x14,
0xbd, 0x78, 0x9b, 0x47, 0x79, 0xf7, 0xc9, 0x83, 0x32, 0x80, 0x1c, 0x1b, 0xaf, 0x13,
0x49, 0xb9, 0xf5, 0xf9, 0xa1, 0xa6, 0xa0, 0x40, 0x4d, 0xea, 0x36, 0xdc, 0xc9, 0x49,
0x9b, 0xcb, 0x25, 0xc9, 0xad, 0xc1, 0x12, 0xb7, 0xcc, 0x9a, 0x93, 0xca, 0xe4, 0x1f,
0x32, 0x62
]
);
}
}

View file

@ -5,8 +5,6 @@ mod error;
mod ikm;
#[cfg(feature = "encryption")]
mod kdf;
#[cfg(feature = "encryption")]
mod key;
#[cfg(any(feature = "encryption", feature = "ikm-management"))]
mod scheme;

View file

@ -1,3 +1,4 @@
use crate::kdf::KdfFunction;
use crate::Error;
#[derive(Copy, Clone, Debug, PartialEq)]
@ -6,21 +7,9 @@ pub enum Scheme {
}
impl Scheme {
pub(crate) fn canonicalization_hasher(&self) -> impl digest::Digest {
pub(crate) fn get_kdf(&self) -> Box<KdfFunction> {
match self {
Scheme::XChaCha20Poly1305WithBlake3 => blake3::Hasher::new(),
}
}
pub(crate) fn get_kdf(&self) -> impl Fn(&[u8], &[u8]) -> Vec<u8> {
match self {
Scheme::XChaCha20Poly1305WithBlake3 => crate::kdf::blake3_derive,
}
}
pub(crate) fn key_size(&self) -> usize {
match self {
Scheme::XChaCha20Poly1305WithBlake3 => 32,
Scheme::XChaCha20Poly1305WithBlake3 => Box::new(crate::kdf::blake3_derive),
}
}
}