age: Use rayon for processing STREAM chunks in parallel

This commit is contained in:
Jack Grigg 2020-11-04 21:08:45 +00:00
parent eea64b8cd2
commit 4f72ab0eca
3 changed files with 24 additions and 18 deletions

1
Cargo.lock generated
View file

@ -76,6 +76,7 @@ dependencies = [
"quickcheck_macros",
"rand 0.7.3",
"rand 0.8.4",
"rayon",
"rpassword",
"rsa",
"rust-embed",

View file

@ -75,6 +75,7 @@ rust-embed = "6"
# Performance
num_cpus = "1.0"
rayon = "1.5"
# Common CLI dependencies
console = { version = "0.15", optional = true, default-features = false }

View file

@ -7,6 +7,7 @@ use chacha20poly1305::{
};
use lazy_static::lazy_static;
use pin_project::pin_project;
use rayon::prelude::*;
use std::cmp;
use std::convert::TryInto;
use std::io::{self, Read, Seek, SeekFrom, Write};
@ -51,9 +52,9 @@ impl Nonce {
self.0 = u128::from(val) << 8;
}
fn increment_counter(&mut self) {
fn increment_counter(&mut self, by: usize) {
// Increment the 11-byte counter
self.0 += 1 << 8;
self.0 += (by as u128) << 8;
if self.0 >> (8 * 12) != 0 {
panic!("We overflowed the nonce!");
}
@ -197,26 +198,29 @@ impl Stream {
let num_chunks = chunks.len();
let mut encrypted = vec![0; chunks_len + TAG_SIZE * num_chunks];
for (i, (encrypted, chunk)) in encrypted
encrypted
.chunks_mut(ENCRYPTED_CHUNK_SIZE)
.zip(chunks)
.enumerate()
{
if i + 1 == num_chunks {
self.nonce.set_last(last).unwrap();
}
.par_bridge()
.for_each_with(self.nonce, |nonce, (i, (encrypted, chunk))| {
nonce.increment_counter(i);
if i + 1 == num_chunks {
nonce.set_last(last).unwrap();
}
let (buffer, tag) = encrypted.split_at_mut(chunk.len());
buffer.copy_from_slice(chunk);
tag.copy_from_slice(
self.aead
.encrypt_in_place_detached(&self.nonce.to_bytes().into(), &[], buffer)
.expect("we will never hit chacha20::MAX_BLOCKS because of the chunk size")
.as_slice(),
);
let (buffer, tag) = encrypted.split_at_mut(chunk.len());
buffer.copy_from_slice(chunk);
tag.copy_from_slice(
self.aead
.encrypt_in_place_detached(&nonce.to_bytes().into(), &[], buffer)
.expect("we will never hit chacha20::MAX_BLOCKS because of the chunk size")
.as_slice(),
);
});
self.nonce.increment_counter();
}
self.nonce.increment_counter(num_chunks);
self.nonce.set_last(last).unwrap();
Ok(encrypted)
}
@ -251,7 +255,7 @@ impl Stream {
)
.map_err(|_| io::Error::new(io::ErrorKind::InvalidData, "decryption error"))?;
self.nonce.increment_counter();
self.nonce.increment_counter(1);
}
Ok(SecretVec::new(decrypted))