aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCaleb Garrett <[email protected]>2024-02-16 13:15:14 -0500
committerCaleb Garrett <[email protected]>2024-02-25 20:59:07 -0500
commitc2b03eff62245bd325a781e1e260c150e0a5040c (patch)
treec78bec6f490679fdcebd6743d0104c2dfb7b12bb
parent565acdf24301a72fe084aa18b7c55a6110609374 (diff)
GCM mode functional.
-rw-r--r--embassy-stm32/src/cryp/mod.rs234
1 files changed, 193 insertions, 41 deletions
diff --git a/embassy-stm32/src/cryp/mod.rs b/embassy-stm32/src/cryp/mod.rs
index 4db95d55c..447bcf2f8 100644
--- a/embassy-stm32/src/cryp/mod.rs
+++ b/embassy-stm32/src/cryp/mod.rs
@@ -1,3 +1,4 @@
1//! Crypto Accelerator (CRYP)
1use embassy_hal_internal::{into_ref, PeripheralRef}; 2use embassy_hal_internal::{into_ref, PeripheralRef};
2 3
3use crate::pac; 4use crate::pac;
@@ -8,6 +9,8 @@ use crate::{interrupt, peripherals, Peripheral};
8const DES_BLOCK_SIZE: usize = 8; // 64 bits 9const DES_BLOCK_SIZE: usize = 8; // 64 bits
9const AES_BLOCK_SIZE: usize = 16; // 128 bits 10const AES_BLOCK_SIZE: usize = 16; // 128 bits
10 11
12/// Holds the state information for a cipher operation.
13/// Allows suspending/resuming of cipher operations.
11pub struct Context<'c> { 14pub struct Context<'c> {
12 algo: Algorithm, 15 algo: Algorithm,
13 mode: Mode, 16 mode: Mode,
@@ -19,28 +22,44 @@ pub struct Context<'c> {
19 key: &'c [u8], 22 key: &'c [u8],
20 csgcmccm: [u32; 8], 23 csgcmccm: [u32; 8],
21 csgcm: [u32; 8], 24 csgcm: [u32; 8],
25 header_len: u64,
26 payload_len: u64,
22} 27}
23 28
29/// Selects the encryption algorithm.
24#[derive(PartialEq, Clone, Copy)] 30#[derive(PartialEq, Clone, Copy)]
25pub enum Algorithm { 31pub enum Algorithm {
32 /// Advanced Encryption Standard
26 AES, 33 AES,
34 /// Data Encryption Standard
27 DES, 35 DES,
36 /// Triple-DES
28 TDES, 37 TDES,
29} 38}
30 39
40/// Selects the cipher mode.
31#[derive(PartialEq, Clone, Copy)] 41#[derive(PartialEq, Clone, Copy)]
32pub enum Mode { 42pub enum Mode {
43 /// Electronic Codebook
33 ECB, 44 ECB,
45 /// Cipher Block Chaining
34 CBC, 46 CBC,
47 /// Counter Mode
35 CTR, 48 CTR,
49 /// Galois Counter Mode
36 GCM, 50 GCM,
51 /// Galois Message Authentication Code
37 GMAC, 52 GMAC,
53 /// Counter with CBC-MAC
38 CCM, 54 CCM,
39} 55}
40 56
57/// Selects whether the crypto processor operates in encryption or decryption mode.
41#[derive(PartialEq, Clone, Copy)] 58#[derive(PartialEq, Clone, Copy)]
42pub enum Direction { 59pub enum Direction {
60 /// Encryption mode
43 Encrypt, 61 Encrypt,
62 /// Decryption mode
44 Decrypt, 63 Decrypt,
45} 64}
46 65
@@ -49,6 +68,8 @@ pub struct Cryp<'d, T: Instance> {
49 _peripheral: PeripheralRef<'d, T>, 68 _peripheral: PeripheralRef<'d, T>,
50} 69}
51 70
71/// Initialization vector of arbitrary length.
72/// When an initialization vector is not needed, `None` may be supplied.
52pub type InitVector<'v> = Option<&'v [u8]>; 73pub type InitVector<'v> = Option<&'v [u8]>;
53 74
54impl<'d, T: Instance> Cryp<'d, T> { 75impl<'d, T: Instance> Cryp<'d, T> {
@@ -62,6 +83,8 @@ impl<'d, T: Instance> Cryp<'d, T> {
62 83
63 /// Start a new cipher operation. 84 /// Start a new cipher operation.
64 /// Key size must be 128, 192, or 256 bits. 85 /// Key size must be 128, 192, or 256 bits.
86 /// Initialization vector must only be supplied if necessary.
87 /// Panics if there is any mismatch in parameters, such as an incorrect IV length or invalid mode.
65 pub fn start<'c>(&self, key: &'c [u8], iv: InitVector, algo: Algorithm, mode: Mode, dir: Direction) -> Context<'c> { 88 pub fn start<'c>(&self, key: &'c [u8], iv: InitVector, algo: Algorithm, mode: Mode, dir: Direction) -> Context<'c> {
66 let mut ctx = Context { 89 let mut ctx = Context {
67 algo, 90 algo,
@@ -74,6 +97,8 @@ impl<'d, T: Instance> Cryp<'d, T> {
74 csgcmccm: [0; 8], 97 csgcmccm: [0; 8],
75 csgcm: [0; 8], 98 csgcm: [0; 8],
76 aad_complete: false, 99 aad_complete: false,
100 header_len: 0,
101 payload_len: 0,
77 }; 102 };
78 103
79 T::regs().cr().modify(|w| w.set_crypen(false)); 104 T::regs().cr().modify(|w| w.set_crypen(false));
@@ -102,8 +127,6 @@ impl<'d, T: Instance> Cryp<'d, T> {
102 panic!("IV length must be 128 bits for CCM."); 127 panic!("IV length must be 128 bits for CCM.");
103 } else if (mode == Mode::CTR) && (ivlen != 128) { 128 } else if (mode == Mode::CTR) && (ivlen != 128) {
104 panic!("IV length must be 128 bits for CTR."); 129 panic!("IV length must be 128 bits for CTR.");
105 } else if (mode == Mode::GCM) && (ivlen != 96) {
106 panic!("IV length must be 96 bits for GCM.");
107 } else if (mode == Mode::GMAC) && (ivlen != 96) { 130 } else if (mode == Mode::GMAC) && (ivlen != 96) {
108 panic!("IV length must be 96 bits for GMAC."); 131 panic!("IV length must be 96 bits for GMAC.");
109 } 132 }
@@ -121,17 +144,27 @@ impl<'d, T: Instance> Cryp<'d, T> {
121 Mode::ECB => T::regs().cr().modify(|w| w.set_algomode0(4)), 144 Mode::ECB => T::regs().cr().modify(|w| w.set_algomode0(4)),
122 Mode::CBC => T::regs().cr().modify(|w| w.set_algomode0(5)), 145 Mode::CBC => T::regs().cr().modify(|w| w.set_algomode0(5)),
123 Mode::CTR => T::regs().cr().modify(|w| w.set_algomode0(6)), 146 Mode::CTR => T::regs().cr().modify(|w| w.set_algomode0(6)),
124 Mode::GCM => T::regs().cr().modify(|w| w.set_algomode0(8)), 147 Mode::GCM => T::regs().cr().modify(|w| w.set_algomode0(0)),
125 Mode::GMAC => T::regs().cr().modify(|w| w.set_algomode0(8)), 148 Mode::GMAC => T::regs().cr().modify(|w| w.set_algomode0(0)),
126 Mode::CCM => T::regs().cr().modify(|w| w.set_algomode0(9)), 149 Mode::CCM => T::regs().cr().modify(|w| w.set_algomode0(1)),
150 }
151 match mode {
152 Mode::ECB => T::regs().cr().modify(|w| w.set_algomode3(false)),
153 Mode::CBC => T::regs().cr().modify(|w| w.set_algomode3(false)),
154 Mode::CTR => T::regs().cr().modify(|w| w.set_algomode3(false)),
155 Mode::GCM => T::regs().cr().modify(|w| w.set_algomode3(true)),
156 Mode::GMAC => T::regs().cr().modify(|w| w.set_algomode3(true)),
157 Mode::CCM => T::regs().cr().modify(|w| w.set_algomode3(true)),
127 } 158 }
128 } else if algo == Algorithm::DES { 159 } else if algo == Algorithm::DES {
160 T::regs().cr().modify(|w| w.set_algomode3(false));
129 match mode { 161 match mode {
130 Mode::ECB => T::regs().cr().modify(|w| w.set_algomode0(2)), 162 Mode::ECB => T::regs().cr().modify(|w| w.set_algomode0(2)),
131 Mode::CBC => T::regs().cr().modify(|w| w.set_algomode0(3)), 163 Mode::CBC => T::regs().cr().modify(|w| w.set_algomode0(3)),
132 _ => panic!("Only ECB and CBC modes are valid for DES."), 164 _ => panic!("Only ECB and CBC modes are valid for DES."),
133 } 165 }
134 } else if algo == Algorithm::TDES { 166 } else if algo == Algorithm::TDES {
167 T::regs().cr().modify(|w| w.set_algomode3(false));
135 match mode { 168 match mode {
136 Mode::ECB => T::regs().cr().modify(|w| w.set_algomode0(0)), 169 Mode::ECB => T::regs().cr().modify(|w| w.set_algomode0(0)),
137 Mode::CBC => T::regs().cr().modify(|w| w.set_algomode0(1)), 170 Mode::CBC => T::regs().cr().modify(|w| w.set_algomode0(1)),
@@ -148,23 +181,26 @@ impl<'d, T: Instance> Cryp<'d, T> {
148 181
149 // Load the IV into the registers. 182 // Load the IV into the registers.
150 if let Some(iv) = iv { 183 if let Some(iv) = iv {
184 let mut full_iv: [u8; 16] = [0; 16];
185 full_iv[0..iv.len()].copy_from_slice(iv);
186
187 if (mode == Mode::GCM) || (mode == Mode::GMAC) {
188 full_iv[15] = 2;
189 }
190
151 let mut iv_idx = 0; 191 let mut iv_idx = 0;
152 let mut iv_word: [u8; 4] = [0; 4]; 192 let mut iv_word: [u8; 4] = [0; 4];
153 iv_word.copy_from_slice(&iv[iv_idx..iv_idx + 4]); 193 iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
154 iv_idx += 4; 194 iv_idx += 4;
155 T::regs().init(0).ivlr().write_value(u32::from_be_bytes(iv_word)); 195 T::regs().init(0).ivlr().write_value(u32::from_be_bytes(iv_word));
156 iv_word.copy_from_slice(&iv[iv_idx..iv_idx + 4]); 196 iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
157 iv_idx += 4; 197 iv_idx += 4;
158 T::regs().init(0).ivrr().write_value(u32::from_be_bytes(iv_word)); 198 T::regs().init(0).ivrr().write_value(u32::from_be_bytes(iv_word));
159 if iv.len() >= 12 { 199 iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
160 iv_word.copy_from_slice(&iv[iv_idx..iv_idx + 4]); 200 iv_idx += 4;
161 iv_idx += 4; 201 T::regs().init(1).ivlr().write_value(u32::from_be_bytes(iv_word));
162 T::regs().init(1).ivlr().write_value(u32::from_be_bytes(iv_word)); 202 iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
163 } 203 T::regs().init(1).ivrr().write_value(u32::from_be_bytes(iv_word));
164 if iv.len() >= 16 {
165 iv_word.copy_from_slice(&iv[iv_idx..iv_idx + 4]);
166 T::regs().init(1).ivrr().write_value(u32::from_be_bytes(iv_word));
167 }
168 } 204 }
169 205
170 // Flush in/out FIFOs 206 // Flush in/out FIFOs
@@ -182,41 +218,116 @@ impl<'d, T: Instance> Cryp<'d, T> {
182 ctx 218 ctx
183 } 219 }
184 220
185 // pub fn aad_blocking(&self, ctx: &mut Context, aad: &[u8]) { 221 /// Controls the header phase of cipher processing.
186 // if ctx.aad_complete { 222 /// This function is only valid for GCM, CCM, and GMAC modes.
187 // panic!("Cannot update AAD after calling 'update'!") 223 /// It only needs to be called if using one of these modes and there is associated data.
188 // } 224 /// All AAD must be supplied to this function prior to starting the payload phase with `payload_blocking`.
189 // if (ctx.mode != Mode::GCM) && (ctx.mode != Mode::GMAC) && (ctx.mode != Mode::CCM) { 225 /// The AAD must be supplied in multiples of the block size (128 bits), except when supplying the last block.
190 // panic!("Associated data only valid for GCM, GMAC, and CCM modes.") 226 /// When supplying the last block of AAD, `last_aad_block` must be `true`.
191 // } 227 pub fn aad_blocking(&self, ctx: &mut Context, aad: &[u8], last_aad_block: bool) {
228 self.load_context(ctx);
192 229
193 // let mut header_size = 0; 230 let block_size;
194 // let mut header: [u8;] 231 if ctx.algo == Algorithm::DES {
232 block_size = DES_BLOCK_SIZE;
233 } else {
234 block_size = AES_BLOCK_SIZE;
235 }
236 let last_block_remainder = aad.len() % block_size;
195 237
196 // if aad.len() < 65280 { 238 // Perform checks for correctness.
239 if ctx.aad_complete {
240 panic!("Cannot update AAD after calling 'update'!")
241 }
242 if (ctx.mode != Mode::GCM) && (ctx.mode != Mode::GMAC) && (ctx.mode != Mode::CCM) {
243 panic!("Associated data only valid for GCM, GMAC, and CCM modes.")
244 }
245 if !last_aad_block {
246 if last_block_remainder != 0 {
247 panic!("Input length must be a multiple of {} bytes.", block_size);
248 }
249 }
197 250
198 // } 251 ctx.header_len += aad.len() as u64;
199 252
200 // // GCM header phase 253 // GCM header phase
201 // T::regs().cr().modify(|w| w.set_gcm_ccmph(1)); 254 T::regs().cr().modify(|w| w.set_crypen(false));
202 // T::regs().cr().modify(|w| w.set_crypen(true)); 255 T::regs().cr().modify(|w| w.set_gcm_ccmph(1));
203 // } 256 T::regs().cr().modify(|w| w.set_crypen(true));
204 257
205 pub fn update_blocking(&self, ctx: &mut Context, input: &[u8], output: &mut [u8], last_block: bool) { 258 // Load data into core, block by block.
206 self.load_context(ctx); 259 let num_full_blocks = aad.len() / block_size;
260 for block in 0..num_full_blocks {
261 let mut index = block * block_size;
262 let end_index = index + block_size;
263 // Write block in
264 while index < end_index {
265 let mut in_word: [u8; 4] = [0; 4];
266 in_word.copy_from_slice(&aad[index..index + 4]);
267 T::regs().din().write_value(u32::from_ne_bytes(in_word));
268 index += 4;
269 }
270 // Block until input FIFO is empty.
271 while !T::regs().sr().read().ifem() {}
272 }
207 273
208 ctx.aad_complete = true; 274 // Handle the final block, which is incomplete.
275 if last_block_remainder > 0 {
276 let mut last_block: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
277 last_block[..last_block_remainder].copy_from_slice(&aad[aad.len() - last_block_remainder..aad.len()]);
278 let mut index = 0;
279 let end_index = block_size;
280 // Write block in
281 while index < end_index {
282 let mut in_word: [u8; 4] = [0; 4];
283 in_word.copy_from_slice(&last_block[index..index + 4]);
284 T::regs().din().write_value(u32::from_ne_bytes(in_word));
285 index += 4;
286 }
287 // Block until input FIFO is empty
288 while !T::regs().sr().read().ifem() {}
289 }
290
291 if last_aad_block {
292 // Switch to payload phase.
293 ctx.aad_complete = true;
294 T::regs().cr().modify(|w| w.set_crypen(false));
295 T::regs().cr().modify(|w| w.set_gcm_ccmph(2));
296 T::regs().cr().modify(|w| w.fflush());
297 }
298
299 self.store_context(ctx);
300 }
301
302 /// Performs encryption/decryption on the provided context.
303 /// The context determines algorithm, mode, and state of the crypto accelerator.
304 /// When the last piece of data is supplied, `last_block` should be `true`.
305 /// This function panics under various mismatches of parameters.
306 /// Input and output buffer lengths must match.
307 /// Data must be a multiple of block size (128-bits for AES, 64-bits for DES) for CBC and ECB modes.
308 /// Padding or ciphertext stealing must be managed by the application for these modes.
309 /// Data must also be a multiple of block size unless `last_block` is `true`.
310 pub fn payload_blocking(&self, ctx: &mut Context, input: &[u8], output: &mut [u8], last_block: bool) {
311 self.load_context(ctx);
209 312
210 let block_size; 313 let block_size;
211 if ctx.algo == Algorithm::DES { 314 if ctx.algo == Algorithm::DES {
212 block_size = 8; 315 block_size = DES_BLOCK_SIZE;
213 } else { 316 } else {
214 block_size = 16; 317 block_size = AES_BLOCK_SIZE;
215 } 318 }
216 let last_block_remainder = input.len() % block_size; 319 let last_block_remainder = input.len() % block_size;
217 320
218 // Perform checks for correctness. 321 // Perform checks for correctness.
219 322 if !ctx.aad_complete && ctx.header_len > 0 {
323 panic!("Additional associated data must be processed first!");
324 } else if !ctx.aad_complete {
325 ctx.aad_complete = true;
326 T::regs().cr().modify(|w| w.set_crypen(false));
327 T::regs().cr().modify(|w| w.set_gcm_ccmph(2));
328 T::regs().cr().modify(|w| w.fflush());
329 T::regs().cr().modify(|w| w.set_crypen(true));
330 }
220 if ctx.mode == Mode::GMAC { 331 if ctx.mode == Mode::GMAC {
221 panic!("GMAC works on header data only. Do not call this function for GMAC."); 332 panic!("GMAC works on header data only. Do not call this function for GMAC.");
222 } 333 }
@@ -270,14 +381,15 @@ impl<'d, T: Instance> Cryp<'d, T> {
270 if ctx.mode == Mode::GCM && ctx.dir == Direction::Encrypt { 381 if ctx.mode == Mode::GCM && ctx.dir == Direction::Encrypt {
271 //Handle special GCM partial block process. 382 //Handle special GCM partial block process.
272 T::regs().cr().modify(|w| w.set_crypen(false)); 383 T::regs().cr().modify(|w| w.set_crypen(false));
273 T::regs().cr().write(|w| w.set_algomode0(6)); 384 T::regs().cr().modify(|w| w.set_algomode3(false));
385 T::regs().cr().modify(|w| w.set_algomode0(6));
274 let iv1r = T::regs().csgcmccmr(7).read() - 1; 386 let iv1r = T::regs().csgcmccmr(7).read() - 1;
275 T::regs().init(1).ivrr().write_value(iv1r); 387 T::regs().init(1).ivrr().write_value(iv1r);
276 T::regs().cr().modify(|w| w.set_crypen(true)); 388 T::regs().cr().modify(|w| w.set_crypen(true));
277 } 389 }
278 390
279 let mut intermediate_data: [u8; 16] = [0; 16]; 391 let mut intermediate_data: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
280 let mut last_block: [u8; 16] = [0; 16]; 392 let mut last_block: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
281 last_block[..last_block_remainder].copy_from_slice(&input[input.len() - last_block_remainder..input.len()]); 393 last_block[..last_block_remainder].copy_from_slice(&input[input.len() - last_block_remainder..input.len()]);
282 let mut index = 0; 394 let mut index = 0;
283 let end_index = block_size; 395 let end_index = block_size;
@@ -307,7 +419,8 @@ impl<'d, T: Instance> Cryp<'d, T> {
307 if ctx.mode == Mode::GCM && ctx.dir == Direction::Encrypt { 419 if ctx.mode == Mode::GCM && ctx.dir == Direction::Encrypt {
308 //Handle special GCM partial block process. 420 //Handle special GCM partial block process.
309 T::regs().cr().modify(|w| w.set_crypen(false)); 421 T::regs().cr().modify(|w| w.set_crypen(false));
310 T::regs().cr().write(|w| w.set_algomode0(8)); 422 T::regs().cr().write(|w| w.set_algomode3(true));
423 T::regs().cr().write(|w| w.set_algomode0(0));
311 T::regs().init(1).ivrr().write_value(2); 424 T::regs().init(1).ivrr().write_value(2);
312 T::regs().cr().modify(|w| w.set_crypen(true)); 425 T::regs().cr().modify(|w| w.set_crypen(true));
313 T::regs().cr().modify(|w| w.set_gcm_ccmph(3)); 426 T::regs().cr().modify(|w| w.set_gcm_ccmph(3));
@@ -324,12 +437,51 @@ impl<'d, T: Instance> Cryp<'d, T> {
324 } 437 }
325 } 438 }
326 } 439 }
440
441 ctx.payload_len += input.len() as u64;
442 }
443
444 /// This function only needs to be called for GCM, CCM, and GMAC modes to
445 /// generate an authentication tag. Calling this function on any other mode
446 /// does nothing except consumes the context. A buffer for the authentication
447 /// tag must be supplied.
448 pub fn finish_blocking(&self, mut ctx: Context, tag: &mut [u8; 16]) {
449 // Just consume the context if called for any other mode.
450 if (ctx.mode != Mode::GCM) || (ctx.mode != Mode::CCM) || (ctx.mode != Mode::GMAC) {
451 return;
452 }
453
454 self.load_context(&mut ctx);
455
456 T::regs().cr().modify(|w| w.set_crypen(false));
457 T::regs().cr().modify(|w| w.set_gcm_ccmph(3));
458 T::regs().cr().modify(|w| w.set_crypen(true));
459
460 let headerlen1: u32 = (ctx.header_len >> 32) as u32;
461 let headerlen2: u32 = ctx.header_len as u32;
462 let payloadlen1: u32 = (ctx.payload_len >> 32) as u32;
463 let payloadlen2: u32 = ctx.payload_len as u32;
464
465 T::regs().din().write_value(headerlen1.swap_bytes());
466 T::regs().din().write_value(headerlen2.swap_bytes());
467 T::regs().din().write_value(payloadlen1.swap_bytes());
468 T::regs().din().write_value(payloadlen2.swap_bytes());
469
470 while !T::regs().sr().read().ofne() {}
471
472 tag[0..4].copy_from_slice(T::regs().dout().read().to_ne_bytes().as_slice());
473 tag[4..8].copy_from_slice(T::regs().dout().read().to_ne_bytes().as_slice());
474 tag[8..12].copy_from_slice(T::regs().dout().read().to_ne_bytes().as_slice());
475 tag[12..16].copy_from_slice(T::regs().dout().read().to_ne_bytes().as_slice());
476
477 T::regs().cr().modify(|w| w.set_crypen(false));
327 } 478 }
328 479
329 fn prepare_key(&self, ctx: &Context) { 480 fn prepare_key(&self, ctx: &Context) {
330 if ctx.algo == Algorithm::AES && ctx.dir == Direction::Decrypt { 481 if ctx.algo == Algorithm::AES && ctx.dir == Direction::Decrypt {
331 if (ctx.mode == Mode::ECB) || (ctx.mode == Mode::CBC) { 482 if (ctx.mode == Mode::ECB) || (ctx.mode == Mode::CBC) {
332 T::regs().cr().modify(|w| w.set_algomode0(7)); 483 T::regs().cr().modify(|w| w.set_algomode0(7));
484 T::regs().cr().modify(|w| w.set_algomode3(false));
333 T::regs().cr().modify(|w| w.set_crypen(true)); 485 T::regs().cr().modify(|w| w.set_crypen(true));
334 while T::regs().sr().read().busy() {} 486 while T::regs().sr().read().busy() {}
335 } 487 }