diff options
| -rw-r--r-- | embassy-stm32/src/cryp/mod.rs | 651 |
1 files changed, 431 insertions, 220 deletions
diff --git a/embassy-stm32/src/cryp/mod.rs b/embassy-stm32/src/cryp/mod.rs index 447bcf2f8..29c1db12e 100644 --- a/embassy-stm32/src/cryp/mod.rs +++ b/embassy-stm32/src/cryp/mod.rs | |||
| @@ -1,4 +1,6 @@ | |||
| 1 | //! Crypto Accelerator (CRYP) | 1 | //! Crypto Accelerator (CRYP) |
| 2 | use core::marker::PhantomData; | ||
| 3 | |||
| 2 | use embassy_hal_internal::{into_ref, PeripheralRef}; | 4 | use embassy_hal_internal::{into_ref, PeripheralRef}; |
| 3 | 5 | ||
| 4 | use crate::pac; | 6 | use crate::pac; |
| @@ -9,51 +11,375 @@ use crate::{interrupt, peripherals, Peripheral}; | |||
| 9 | const DES_BLOCK_SIZE: usize = 8; // 64 bits | 11 | const DES_BLOCK_SIZE: usize = 8; // 64 bits |
| 10 | const AES_BLOCK_SIZE: usize = 16; // 128 bits | 12 | const AES_BLOCK_SIZE: usize = 16; // 128 bits |
| 11 | 13 | ||
| 14 | /// This trait encapsulates all cipher-specific behavior/ | ||
| 15 | pub trait Cipher<'c> { | ||
| 16 | /// Processing block size. Determined by the processor and the algorithm. | ||
| 17 | const BLOCK_SIZE: usize; | ||
| 18 | |||
| 19 | /// Indicates whether the cipher requires the application to provide padding. | ||
| 20 | /// If `true`, no partial blocks will be accepted (a panic will occur). | ||
| 21 | const REQUIRES_PADDING: bool = false; | ||
| 22 | |||
| 23 | /// Returns the symmetric key. | ||
| 24 | fn key(&self) -> &'c [u8]; | ||
| 25 | |||
| 26 | /// Returns the initialization vector. | ||
| 27 | fn iv(&self) -> &[u8]; | ||
| 28 | |||
| 29 | /// Sets the processor algorithm mode according to the associated cipher. | ||
| 30 | fn set_algomode(&self, p: &pac::cryp::Cryp); | ||
| 31 | |||
| 32 | /// Performs any key preparation within the processor, if necessary. | ||
| 33 | fn prepare_key(&self, _p: &pac::cryp::Cryp) {} | ||
| 34 | |||
| 35 | /// Performs any cipher-specific initialization. | ||
| 36 | fn init_phase(&self, _p: &pac::cryp::Cryp) {} | ||
| 37 | |||
| 38 | /// Called prior to processing the last data block for cipher-specific operations. | ||
| 39 | fn pre_final_block(&self, _p: &pac::cryp::Cryp) {} | ||
| 40 | |||
| 41 | /// Called after processing the last data block for cipher-specific operations. | ||
| 42 | fn post_final_block(&self, _p: &pac::cryp::Cryp, _dir: Direction, _int_data: &[u8; AES_BLOCK_SIZE]) {} | ||
| 43 | } | ||
| 44 | |||
| 45 | /// This trait enables restriction of ciphers to specific key sizes. | ||
| 46 | pub trait CipherSized {} | ||
| 47 | |||
| 48 | /// This trait enables restriction of a header phase to authenticated ciphers only. | ||
| 49 | pub trait CipherAuthenticated {} | ||
| 50 | |||
| 51 | /// AES-ECB Cipher Mode | ||
| 52 | pub struct AesEcb<'c, const KEY_SIZE: usize> { | ||
| 53 | iv: &'c [u8; 0], | ||
| 54 | key: &'c [u8; KEY_SIZE], | ||
| 55 | } | ||
| 56 | |||
| 57 | impl<'c, const KEY_SIZE: usize> AesEcb<'c, KEY_SIZE> { | ||
| 58 | /// Constructs a new AES-ECB cipher for a cryptographic operation. | ||
| 59 | pub fn new(key: &'c [u8; KEY_SIZE]) -> Self { | ||
| 60 | return Self { key: key, iv: &[0; 0] }; | ||
| 61 | } | ||
| 62 | } | ||
| 63 | |||
| 64 | impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesEcb<'c, KEY_SIZE> { | ||
| 65 | const BLOCK_SIZE: usize = AES_BLOCK_SIZE; | ||
| 66 | const REQUIRES_PADDING: bool = true; | ||
| 67 | |||
| 68 | fn key(&self) -> &'c [u8] { | ||
| 69 | self.key | ||
| 70 | } | ||
| 71 | |||
| 72 | fn iv(&self) -> &'c [u8] { | ||
| 73 | self.iv | ||
| 74 | } | ||
| 75 | |||
| 76 | fn prepare_key(&self, p: &pac::cryp::Cryp) { | ||
| 77 | p.cr().modify(|w| w.set_algomode0(7)); | ||
| 78 | p.cr().modify(|w| w.set_algomode3(false)); | ||
| 79 | p.cr().modify(|w| w.set_crypen(true)); | ||
| 80 | while p.sr().read().busy() {} | ||
| 81 | } | ||
| 82 | |||
| 83 | fn set_algomode(&self, p: &pac::cryp::Cryp) { | ||
| 84 | p.cr().modify(|w| w.set_algomode0(4)); | ||
| 85 | p.cr().modify(|w| w.set_algomode3(false)); | ||
| 86 | } | ||
| 87 | } | ||
| 88 | |||
| 89 | impl<'c> CipherSized for AesEcb<'c, { 128 / 8 }> {} | ||
| 90 | impl<'c> CipherSized for AesEcb<'c, { 192 / 8 }> {} | ||
| 91 | impl<'c> CipherSized for AesEcb<'c, { 256 / 8 }> {} | ||
| 92 | |||
| 93 | /// AES-CBC Cipher Mode | ||
| 94 | pub struct AesCbc<'c, const KEY_SIZE: usize> { | ||
| 95 | iv: &'c [u8; 16], | ||
| 96 | key: &'c [u8; KEY_SIZE], | ||
| 97 | } | ||
| 98 | |||
| 99 | impl<'c, const KEY_SIZE: usize> AesCbc<'c, KEY_SIZE> { | ||
| 100 | /// Constructs a new AES-CBC cipher for a cryptographic operation. | ||
| 101 | pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 16]) -> Self { | ||
| 102 | return Self { key: key, iv: iv }; | ||
| 103 | } | ||
| 104 | } | ||
| 105 | |||
| 106 | impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCbc<'c, KEY_SIZE> { | ||
| 107 | const BLOCK_SIZE: usize = AES_BLOCK_SIZE; | ||
| 108 | const REQUIRES_PADDING: bool = true; | ||
| 109 | |||
| 110 | fn key(&self) -> &'c [u8] { | ||
| 111 | self.key | ||
| 112 | } | ||
| 113 | |||
| 114 | fn iv(&self) -> &'c [u8] { | ||
| 115 | self.iv | ||
| 116 | } | ||
| 117 | |||
| 118 | fn prepare_key(&self, p: &pac::cryp::Cryp) { | ||
| 119 | p.cr().modify(|w| w.set_algomode0(7)); | ||
| 120 | p.cr().modify(|w| w.set_algomode3(false)); | ||
| 121 | p.cr().modify(|w| w.set_crypen(true)); | ||
| 122 | while p.sr().read().busy() {} | ||
| 123 | } | ||
| 124 | |||
| 125 | fn set_algomode(&self, p: &pac::cryp::Cryp) { | ||
| 126 | p.cr().modify(|w| w.set_algomode0(5)); | ||
| 127 | p.cr().modify(|w| w.set_algomode3(false)); | ||
| 128 | } | ||
| 129 | } | ||
| 130 | |||
| 131 | impl<'c> CipherSized for AesCbc<'c, { 128 / 8 }> {} | ||
| 132 | impl<'c> CipherSized for AesCbc<'c, { 192 / 8 }> {} | ||
| 133 | impl<'c> CipherSized for AesCbc<'c, { 256 / 8 }> {} | ||
| 134 | |||
| 135 | /// AES-CTR Cipher Mode | ||
| 136 | pub struct AesCtr<'c, const KEY_SIZE: usize> { | ||
| 137 | iv: &'c [u8; 16], | ||
| 138 | key: &'c [u8; KEY_SIZE], | ||
| 139 | } | ||
| 140 | |||
| 141 | impl<'c, const KEY_SIZE: usize> AesCtr<'c, KEY_SIZE> { | ||
| 142 | /// Constructs a new AES-CTR cipher for a cryptographic operation. | ||
| 143 | pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 16]) -> Self { | ||
| 144 | return Self { key: key, iv: iv }; | ||
| 145 | } | ||
| 146 | } | ||
| 147 | |||
| 148 | impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCtr<'c, KEY_SIZE> { | ||
| 149 | const BLOCK_SIZE: usize = AES_BLOCK_SIZE; | ||
| 150 | |||
| 151 | fn key(&self) -> &'c [u8] { | ||
| 152 | self.key | ||
| 153 | } | ||
| 154 | |||
| 155 | fn iv(&self) -> &'c [u8] { | ||
| 156 | self.iv | ||
| 157 | } | ||
| 158 | |||
| 159 | fn set_algomode(&self, p: &pac::cryp::Cryp) { | ||
| 160 | p.cr().modify(|w| w.set_algomode0(6)); | ||
| 161 | p.cr().modify(|w| w.set_algomode3(false)); | ||
| 162 | } | ||
| 163 | } | ||
| 164 | |||
| 165 | impl<'c> CipherSized for AesCtr<'c, { 128 / 8 }> {} | ||
| 166 | impl<'c> CipherSized for AesCtr<'c, { 192 / 8 }> {} | ||
| 167 | impl<'c> CipherSized for AesCtr<'c, { 256 / 8 }> {} | ||
| 168 | |||
| 169 | ///AES-GCM Cipher Mode | ||
| 170 | pub struct AesGcm<'c, const KEY_SIZE: usize> { | ||
| 171 | iv: [u8; 16], | ||
| 172 | key: &'c [u8; KEY_SIZE], | ||
| 173 | } | ||
| 174 | |||
| 175 | impl<'c, const KEY_SIZE: usize> AesGcm<'c, KEY_SIZE> { | ||
| 176 | /// Constucts a new AES-GCM cipher for a cryptographic operation. | ||
| 177 | pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 12]) -> Self { | ||
| 178 | let mut new_gcm = Self { key: key, iv: [0; 16] }; | ||
| 179 | new_gcm.iv[..12].copy_from_slice(iv); | ||
| 180 | new_gcm.iv[15] = 2; | ||
| 181 | new_gcm | ||
| 182 | } | ||
| 183 | } | ||
| 184 | |||
| 185 | impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGcm<'c, KEY_SIZE> { | ||
| 186 | const BLOCK_SIZE: usize = AES_BLOCK_SIZE; | ||
| 187 | |||
| 188 | fn key(&self) -> &'c [u8] { | ||
| 189 | self.key | ||
| 190 | } | ||
| 191 | |||
| 192 | fn iv(&self) -> &[u8] { | ||
| 193 | self.iv.as_slice() | ||
| 194 | } | ||
| 195 | |||
| 196 | fn set_algomode(&self, p: &pac::cryp::Cryp) { | ||
| 197 | p.cr().modify(|w| w.set_algomode0(0)); | ||
| 198 | p.cr().modify(|w| w.set_algomode3(true)); | ||
| 199 | } | ||
| 200 | |||
| 201 | fn init_phase(&self, p: &pac::cryp::Cryp) { | ||
| 202 | p.cr().modify(|w| w.set_gcm_ccmph(0)); | ||
| 203 | p.cr().modify(|w| w.set_crypen(true)); | ||
| 204 | while p.cr().read().crypen() {} | ||
| 205 | } | ||
| 206 | |||
| 207 | fn pre_final_block(&self, p: &pac::cryp::Cryp) { | ||
| 208 | //Handle special GCM partial block process. | ||
| 209 | p.cr().modify(|w| w.set_crypen(false)); | ||
| 210 | p.cr().modify(|w| w.set_algomode3(false)); | ||
| 211 | p.cr().modify(|w| w.set_algomode0(6)); | ||
| 212 | let iv1r = p.csgcmccmr(7).read() - 1; | ||
| 213 | p.init(1).ivrr().write_value(iv1r); | ||
| 214 | p.cr().modify(|w| w.set_crypen(true)); | ||
| 215 | } | ||
| 216 | |||
| 217 | fn post_final_block(&self, p: &pac::cryp::Cryp, dir: Direction, int_data: &[u8; AES_BLOCK_SIZE]) { | ||
| 218 | if dir == Direction::Encrypt { | ||
| 219 | //Handle special GCM partial block process. | ||
| 220 | p.cr().modify(|w| w.set_crypen(false)); | ||
| 221 | p.cr().write(|w| w.set_algomode3(true)); | ||
| 222 | p.cr().write(|w| w.set_algomode0(0)); | ||
| 223 | p.init(1).ivrr().write_value(2); | ||
| 224 | p.cr().modify(|w| w.set_crypen(true)); | ||
| 225 | p.cr().modify(|w| w.set_gcm_ccmph(3)); | ||
| 226 | let mut index = 0; | ||
| 227 | let end_index = Self::BLOCK_SIZE; | ||
| 228 | while index < end_index { | ||
| 229 | let mut in_word: [u8; 4] = [0; 4]; | ||
| 230 | in_word.copy_from_slice(&int_data[index..index + 4]); | ||
| 231 | p.din().write_value(u32::from_ne_bytes(in_word)); | ||
| 232 | index += 4; | ||
| 233 | } | ||
| 234 | for _ in 0..4 { | ||
| 235 | p.dout().read(); | ||
| 236 | } | ||
| 237 | } | ||
| 238 | } | ||
| 239 | } | ||
| 240 | |||
| 241 | impl<'c> CipherSized for AesGcm<'c, { 128 / 8 }> {} | ||
| 242 | impl<'c> CipherSized for AesGcm<'c, { 192 / 8 }> {} | ||
| 243 | impl<'c> CipherSized for AesGcm<'c, { 256 / 8 }> {} | ||
| 244 | impl<'c, const KEY_SIZE: usize> CipherAuthenticated for AesGcm<'c, KEY_SIZE> {} | ||
| 245 | |||
| 246 | /// AES-GMAC Cipher Mode | ||
| 247 | pub struct AesGmac<'c, const KEY_SIZE: usize> { | ||
| 248 | iv: [u8; 16], | ||
| 249 | key: &'c [u8; KEY_SIZE], | ||
| 250 | } | ||
| 251 | |||
| 252 | impl<'c, const KEY_SIZE: usize> AesGmac<'c, KEY_SIZE> { | ||
| 253 | /// Constructs a new AES-GMAC cipher for a cryptographic operation. | ||
| 254 | pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 12]) -> Self { | ||
| 255 | let mut new_gmac = Self { key: key, iv: [0; 16] }; | ||
| 256 | new_gmac.iv[..12].copy_from_slice(iv); | ||
| 257 | new_gmac.iv[15] = 2; | ||
| 258 | new_gmac | ||
| 259 | } | ||
| 260 | } | ||
| 261 | |||
| 262 | impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGmac<'c, KEY_SIZE> { | ||
| 263 | const BLOCK_SIZE: usize = AES_BLOCK_SIZE; | ||
| 264 | |||
| 265 | fn key(&self) -> &'c [u8] { | ||
| 266 | self.key | ||
| 267 | } | ||
| 268 | |||
| 269 | fn iv(&self) -> &[u8] { | ||
| 270 | self.iv.as_slice() | ||
| 271 | } | ||
| 272 | |||
| 273 | fn set_algomode(&self, p: &pac::cryp::Cryp) { | ||
| 274 | p.cr().modify(|w| w.set_algomode0(0)); | ||
| 275 | p.cr().modify(|w| w.set_algomode3(true)); | ||
| 276 | } | ||
| 277 | |||
| 278 | fn init_phase(&self, p: &pac::cryp::Cryp) { | ||
| 279 | p.cr().modify(|w| w.set_gcm_ccmph(0)); | ||
| 280 | p.cr().modify(|w| w.set_crypen(true)); | ||
| 281 | while p.cr().read().crypen() {} | ||
| 282 | } | ||
| 283 | |||
| 284 | fn pre_final_block(&self, p: &pac::cryp::Cryp) { | ||
| 285 | //Handle special GCM partial block process. | ||
| 286 | p.cr().modify(|w| w.set_crypen(false)); | ||
| 287 | p.cr().modify(|w| w.set_algomode3(false)); | ||
| 288 | p.cr().modify(|w| w.set_algomode0(6)); | ||
| 289 | let iv1r = p.csgcmccmr(7).read() - 1; | ||
| 290 | p.init(1).ivrr().write_value(iv1r); | ||
| 291 | p.cr().modify(|w| w.set_crypen(true)); | ||
| 292 | } | ||
| 293 | |||
| 294 | fn post_final_block(&self, p: &pac::cryp::Cryp, dir: Direction, int_data: &[u8; AES_BLOCK_SIZE]) { | ||
| 295 | if dir == Direction::Encrypt { | ||
| 296 | //Handle special GCM partial block process. | ||
| 297 | p.cr().modify(|w| w.set_crypen(false)); | ||
| 298 | p.cr().write(|w| w.set_algomode3(true)); | ||
| 299 | p.cr().write(|w| w.set_algomode0(0)); | ||
| 300 | p.init(1).ivrr().write_value(2); | ||
| 301 | p.cr().modify(|w| w.set_crypen(true)); | ||
| 302 | p.cr().modify(|w| w.set_gcm_ccmph(3)); | ||
| 303 | let mut index = 0; | ||
| 304 | let end_index = Self::BLOCK_SIZE; | ||
| 305 | while index < end_index { | ||
| 306 | let mut in_word: [u8; 4] = [0; 4]; | ||
| 307 | in_word.copy_from_slice(&int_data[index..index + 4]); | ||
| 308 | p.din().write_value(u32::from_ne_bytes(in_word)); | ||
| 309 | index += 4; | ||
| 310 | } | ||
| 311 | for _ in 0..4 { | ||
| 312 | p.dout().read(); | ||
| 313 | } | ||
| 314 | } | ||
| 315 | } | ||
| 316 | } | ||
| 317 | |||
| 318 | impl<'c> CipherSized for AesGmac<'c, { 128 / 8 }> {} | ||
| 319 | impl<'c> CipherSized for AesGmac<'c, { 192 / 8 }> {} | ||
| 320 | impl<'c> CipherSized for AesGmac<'c, { 256 / 8 }> {} | ||
| 321 | impl<'c, const KEY_SIZE: usize> CipherAuthenticated for AesGmac<'c, KEY_SIZE> {} | ||
| 322 | |||
| 323 | // struct AesCcm<'c, const KEY_SIZE: usize> { | ||
| 324 | // iv: &'c [u8], | ||
| 325 | // key: &'c [u8; KEY_SIZE], | ||
| 326 | // aad_len: usize, | ||
| 327 | // payload_len: usize, | ||
| 328 | // } | ||
| 329 | |||
| 330 | // impl<'c, const KEY_SIZE: usize> AesCcm<'c, KEY_SIZE> { | ||
| 331 | // pub fn new(&self, key: &[u8; KEY_SIZE], iv: &[u8], aad_len: usize, payload_len: usize) { | ||
| 332 | // if iv.len() > 13 { | ||
| 333 | // panic!("CCM IV length must be 13 bytes or less."); | ||
| 334 | // } | ||
| 335 | // self.key = key; | ||
| 336 | // self.iv = iv; | ||
| 337 | // self.aad_len = aad_len; | ||
| 338 | // self.payload_len = payload_len; | ||
| 339 | // } | ||
| 340 | // } | ||
| 341 | |||
| 342 | // impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCcm<'c, KEY_SIZE> { | ||
| 343 | // const BLOCK_SIZE: usize = AES_BLOCK_SIZE; | ||
| 344 | |||
| 345 | // fn key(&self) -> &'c [u8] { | ||
| 346 | // self.key | ||
| 347 | // } | ||
| 348 | |||
| 349 | // fn iv(&self) -> &'c [u8] { | ||
| 350 | // self.iv | ||
| 351 | // } | ||
| 352 | |||
| 353 | // fn set_algomode(&self, p: &pac::cryp::Cryp) { | ||
| 354 | // p.cr().modify(|w| w.set_algomode0(1)); | ||
| 355 | // p.cr().modify(|w| w.set_algomode3(true)); | ||
| 356 | // } | ||
| 357 | |||
| 358 | // fn init_phase(&self, p: &pac::cryp::Cryp) { | ||
| 359 | // todo!(); | ||
| 360 | // } | ||
| 361 | // } | ||
| 362 | |||
| 363 | // impl<'c> CipherSized for AesCcm<'c, { 128 / 8 }> {} | ||
| 364 | // impl<'c> CipherSized for AesCcm<'c, { 192 / 8 }> {} | ||
| 365 | // impl<'c> CipherSized for AesCcm<'c, { 256 / 8 }> {} | ||
| 366 | |||
| 12 | /// Holds the state information for a cipher operation. | 367 | /// Holds the state information for a cipher operation. |
| 13 | /// Allows suspending/resuming of cipher operations. | 368 | /// Allows suspending/resuming of cipher operations. |
| 14 | pub struct Context<'c> { | 369 | pub struct Context<'c, C: Cipher<'c> + CipherSized> { |
| 15 | algo: Algorithm, | 370 | phantom_data: PhantomData<&'c C>, |
| 16 | mode: Mode, | 371 | cipher: &'c C, |
| 17 | dir: Direction, | 372 | dir: Direction, |
| 18 | last_block_processed: bool, | 373 | last_block_processed: bool, |
| 19 | aad_complete: bool, | 374 | aad_complete: bool, |
| 20 | cr: u32, | 375 | cr: u32, |
| 21 | iv: [u32; 4], | 376 | iv: [u32; 4], |
| 22 | key: &'c [u8], | ||
| 23 | csgcmccm: [u32; 8], | 377 | csgcmccm: [u32; 8], |
| 24 | csgcm: [u32; 8], | 378 | csgcm: [u32; 8], |
| 25 | header_len: u64, | 379 | header_len: u64, |
| 26 | payload_len: u64, | 380 | payload_len: u64, |
| 27 | } | 381 | } |
| 28 | 382 | ||
| 29 | /// Selects the encryption algorithm. | ||
| 30 | #[derive(PartialEq, Clone, Copy)] | ||
| 31 | pub enum Algorithm { | ||
| 32 | /// Advanced Encryption Standard | ||
| 33 | AES, | ||
| 34 | /// Data Encryption Standard | ||
| 35 | DES, | ||
| 36 | /// Triple-DES | ||
| 37 | TDES, | ||
| 38 | } | ||
| 39 | |||
| 40 | /// Selects the cipher mode. | ||
| 41 | #[derive(PartialEq, Clone, Copy)] | ||
| 42 | pub enum Mode { | ||
| 43 | /// Electronic Codebook | ||
| 44 | ECB, | ||
| 45 | /// Cipher Block Chaining | ||
| 46 | CBC, | ||
| 47 | /// Counter Mode | ||
| 48 | CTR, | ||
| 49 | /// Galois Counter Mode | ||
| 50 | GCM, | ||
| 51 | /// Galois Message Authentication Code | ||
| 52 | GMAC, | ||
| 53 | /// Counter with CBC-MAC | ||
| 54 | CCM, | ||
| 55 | } | ||
| 56 | |||
| 57 | /// Selects whether the crypto processor operates in encryption or decryption mode. | 383 | /// Selects whether the crypto processor operates in encryption or decryption mode. |
| 58 | #[derive(PartialEq, Clone, Copy)] | 384 | #[derive(PartialEq, Clone, Copy)] |
| 59 | pub enum Direction { | 385 | pub enum Direction { |
| @@ -68,10 +394,6 @@ pub struct Cryp<'d, T: Instance> { | |||
| 68 | _peripheral: PeripheralRef<'d, T>, | 394 | _peripheral: PeripheralRef<'d, T>, |
| 69 | } | 395 | } |
| 70 | 396 | ||
| 71 | /// Initialization vector of arbitrary length. | ||
| 72 | /// When an initialization vector is not needed, `None` may be supplied. | ||
| 73 | pub type InitVector<'v> = Option<&'v [u8]>; | ||
| 74 | |||
| 75 | impl<'d, T: Instance> Cryp<'d, T> { | 397 | impl<'d, T: Instance> Cryp<'d, T> { |
| 76 | /// Create a new CRYP driver. | 398 | /// Create a new CRYP driver. |
| 77 | pub fn new(peri: impl Peripheral<P = T> + 'd) -> Self { | 399 | pub fn new(peri: impl Peripheral<P = T> + 'd) -> Self { |
| @@ -85,51 +407,31 @@ impl<'d, T: Instance> Cryp<'d, T> { | |||
| 85 | /// Key size must be 128, 192, or 256 bits. | 407 | /// Key size must be 128, 192, or 256 bits. |
| 86 | /// Initialization vector must only be supplied if necessary. | 408 | /// Initialization vector must only be supplied if necessary. |
| 87 | /// Panics if there is any mismatch in parameters, such as an incorrect IV length or invalid mode. | 409 | /// Panics if there is any mismatch in parameters, such as an incorrect IV length or invalid mode. |
| 88 | pub fn start<'c>(&self, key: &'c [u8], iv: InitVector, algo: Algorithm, mode: Mode, dir: Direction) -> Context<'c> { | 410 | pub fn start<'c, C: Cipher<'c> + CipherSized>(&self, cipher: &'c C, dir: Direction) -> Context<'c, C> { |
| 89 | let mut ctx = Context { | 411 | let mut ctx: Context<'c, C> = Context { |
| 90 | algo, | ||
| 91 | mode, | ||
| 92 | dir, | 412 | dir, |
| 93 | last_block_processed: false, | 413 | last_block_processed: false, |
| 94 | cr: 0, | 414 | cr: 0, |
| 95 | iv: [0; 4], | 415 | iv: [0; 4], |
| 96 | key, | ||
| 97 | csgcmccm: [0; 8], | 416 | csgcmccm: [0; 8], |
| 98 | csgcm: [0; 8], | 417 | csgcm: [0; 8], |
| 99 | aad_complete: false, | 418 | aad_complete: false, |
| 100 | header_len: 0, | 419 | header_len: 0, |
| 101 | payload_len: 0, | 420 | payload_len: 0, |
| 421 | cipher: cipher, | ||
| 422 | phantom_data: PhantomData, | ||
| 102 | }; | 423 | }; |
| 103 | 424 | ||
| 104 | T::regs().cr().modify(|w| w.set_crypen(false)); | 425 | T::regs().cr().modify(|w| w.set_crypen(false)); |
| 105 | 426 | ||
| 106 | // Checks for correctness | 427 | let key = ctx.cipher.key(); |
| 107 | if algo == Algorithm::AES { | ||
| 108 | let keylen = key.len() * 8; | ||
| 109 | let ivlen; | ||
| 110 | if let Some(iv) = iv { | ||
| 111 | ivlen = iv.len() * 8; | ||
| 112 | } else { | ||
| 113 | ivlen = 0; | ||
| 114 | } | ||
| 115 | match keylen { | ||
| 116 | 128 => T::regs().cr().modify(|w| w.set_keysize(0)), | ||
| 117 | 192 => T::regs().cr().modify(|w| w.set_keysize(1)), | ||
| 118 | 256 => T::regs().cr().modify(|w| w.set_keysize(2)), | ||
| 119 | _ => panic!("Key length must be 128, 192, or 256 bits."), | ||
| 120 | } | ||
| 121 | 428 | ||
| 122 | if (mode == Mode::GCM) && (ivlen != 96) { | 429 | if key.len() == (128 / 8) { |
| 123 | panic!("IV length must be 96 bits for GCM."); | 430 | T::regs().cr().modify(|w| w.set_keysize(0)); |
| 124 | } else if (mode == Mode::CBC) && (ivlen != 128) { | 431 | } else if key.len() == (192 / 8) { |
| 125 | panic!("IV length must be 128 bits for CBC."); | 432 | T::regs().cr().modify(|w| w.set_keysize(1)); |
| 126 | } else if (mode == Mode::CCM) && (ivlen != 128) { | 433 | } else if key.len() == (256 / 8) { |
| 127 | panic!("IV length must be 128 bits for CCM."); | 434 | T::regs().cr().modify(|w| w.set_keysize(2)); |
| 128 | } else if (mode == Mode::CTR) && (ivlen != 128) { | ||
| 129 | panic!("IV length must be 128 bits for CTR."); | ||
| 130 | } else if (mode == Mode::GMAC) && (ivlen != 96) { | ||
| 131 | panic!("IV length must be 96 bits for GMAC."); | ||
| 132 | } | ||
| 133 | } | 435 | } |
| 134 | 436 | ||
| 135 | self.load_key(key); | 437 | self.load_key(key); |
| @@ -137,40 +439,9 @@ impl<'d, T: Instance> Cryp<'d, T> { | |||
| 137 | // Set data type to 8-bit. This will match software implementations. | 439 | // Set data type to 8-bit. This will match software implementations. |
| 138 | T::regs().cr().modify(|w| w.set_datatype(2)); | 440 | T::regs().cr().modify(|w| w.set_datatype(2)); |
| 139 | 441 | ||
| 140 | self.prepare_key(&ctx); | 442 | ctx.cipher.prepare_key(&T::regs()); |
| 141 | 443 | ||
| 142 | if algo == Algorithm::AES { | 444 | ctx.cipher.set_algomode(&T::regs()); |
| 143 | match mode { | ||
| 144 | Mode::ECB => T::regs().cr().modify(|w| w.set_algomode0(4)), | ||
| 145 | Mode::CBC => T::regs().cr().modify(|w| w.set_algomode0(5)), | ||
| 146 | Mode::CTR => T::regs().cr().modify(|w| w.set_algomode0(6)), | ||
| 147 | Mode::GCM => T::regs().cr().modify(|w| w.set_algomode0(0)), | ||
| 148 | Mode::GMAC => T::regs().cr().modify(|w| w.set_algomode0(0)), | ||
| 149 | Mode::CCM => T::regs().cr().modify(|w| w.set_algomode0(1)), | ||
| 150 | } | ||
| 151 | match mode { | ||
| 152 | Mode::ECB => T::regs().cr().modify(|w| w.set_algomode3(false)), | ||
| 153 | Mode::CBC => T::regs().cr().modify(|w| w.set_algomode3(false)), | ||
| 154 | Mode::CTR => T::regs().cr().modify(|w| w.set_algomode3(false)), | ||
| 155 | Mode::GCM => T::regs().cr().modify(|w| w.set_algomode3(true)), | ||
| 156 | Mode::GMAC => T::regs().cr().modify(|w| w.set_algomode3(true)), | ||
| 157 | Mode::CCM => T::regs().cr().modify(|w| w.set_algomode3(true)), | ||
| 158 | } | ||
| 159 | } else if algo == Algorithm::DES { | ||
| 160 | T::regs().cr().modify(|w| w.set_algomode3(false)); | ||
| 161 | match mode { | ||
| 162 | Mode::ECB => T::regs().cr().modify(|w| w.set_algomode0(2)), | ||
| 163 | Mode::CBC => T::regs().cr().modify(|w| w.set_algomode0(3)), | ||
| 164 | _ => panic!("Only ECB and CBC modes are valid for DES."), | ||
| 165 | } | ||
| 166 | } else if algo == Algorithm::TDES { | ||
| 167 | T::regs().cr().modify(|w| w.set_algomode3(false)); | ||
| 168 | match mode { | ||
| 169 | Mode::ECB => T::regs().cr().modify(|w| w.set_algomode0(0)), | ||
| 170 | Mode::CBC => T::regs().cr().modify(|w| w.set_algomode0(1)), | ||
| 171 | _ => panic!("Only ECB and CBC modes are valid for TDES."), | ||
| 172 | } | ||
| 173 | } | ||
| 174 | 445 | ||
| 175 | // Set encrypt/decrypt | 446 | // Set encrypt/decrypt |
| 176 | if dir == Direction::Encrypt { | 447 | if dir == Direction::Encrypt { |
| @@ -180,38 +451,27 @@ impl<'d, T: Instance> Cryp<'d, T> { | |||
| 180 | } | 451 | } |
| 181 | 452 | ||
| 182 | // Load the IV into the registers. | 453 | // Load the IV into the registers. |
| 183 | if let Some(iv) = iv { | 454 | let iv = ctx.cipher.iv(); |
| 184 | let mut full_iv: [u8; 16] = [0; 16]; | 455 | let mut full_iv: [u8; 16] = [0; 16]; |
| 185 | full_iv[0..iv.len()].copy_from_slice(iv); | 456 | full_iv[0..iv.len()].copy_from_slice(iv); |
| 186 | 457 | let mut iv_idx = 0; | |
| 187 | if (mode == Mode::GCM) || (mode == Mode::GMAC) { | 458 | let mut iv_word: [u8; 4] = [0; 4]; |
| 188 | full_iv[15] = 2; | 459 | iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]); |
| 189 | } | 460 | iv_idx += 4; |
| 190 | 461 | T::regs().init(0).ivlr().write_value(u32::from_be_bytes(iv_word)); | |
| 191 | let mut iv_idx = 0; | 462 | iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]); |
| 192 | let mut iv_word: [u8; 4] = [0; 4]; | 463 | iv_idx += 4; |
| 193 | iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]); | 464 | T::regs().init(0).ivrr().write_value(u32::from_be_bytes(iv_word)); |
| 194 | iv_idx += 4; | 465 | iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]); |
| 195 | T::regs().init(0).ivlr().write_value(u32::from_be_bytes(iv_word)); | 466 | iv_idx += 4; |
| 196 | iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]); | 467 | T::regs().init(1).ivlr().write_value(u32::from_be_bytes(iv_word)); |
| 197 | iv_idx += 4; | 468 | iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]); |
| 198 | T::regs().init(0).ivrr().write_value(u32::from_be_bytes(iv_word)); | 469 | T::regs().init(1).ivrr().write_value(u32::from_be_bytes(iv_word)); |
| 199 | iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]); | ||
| 200 | iv_idx += 4; | ||
| 201 | T::regs().init(1).ivlr().write_value(u32::from_be_bytes(iv_word)); | ||
| 202 | iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]); | ||
| 203 | T::regs().init(1).ivrr().write_value(u32::from_be_bytes(iv_word)); | ||
| 204 | } | ||
| 205 | 470 | ||
| 206 | // Flush in/out FIFOs | 471 | // Flush in/out FIFOs |
| 207 | T::regs().cr().modify(|w| w.fflush()); | 472 | T::regs().cr().modify(|w| w.fflush()); |
| 208 | 473 | ||
| 209 | if mode == Mode::GCM { | 474 | ctx.cipher.init_phase(&T::regs()); |
| 210 | // GCM init phase | ||
| 211 | T::regs().cr().modify(|w| w.set_gcm_ccmph(0)); | ||
| 212 | T::regs().cr().modify(|w| w.set_crypen(true)); | ||
| 213 | while T::regs().cr().read().crypen() {} | ||
| 214 | } | ||
| 215 | 475 | ||
| 216 | self.store_context(&mut ctx); | 476 | self.store_context(&mut ctx); |
| 217 | 477 | ||
| @@ -224,42 +484,38 @@ impl<'d, T: Instance> Cryp<'d, T> { | |||
| 224 | /// All AAD must be supplied to this function prior to starting the payload phase with `payload_blocking`. | 484 | /// All AAD must be supplied to this function prior to starting the payload phase with `payload_blocking`. |
| 225 | /// The AAD must be supplied in multiples of the block size (128 bits), except when supplying the last block. | 485 | /// The AAD must be supplied in multiples of the block size (128 bits), except when supplying the last block. |
| 226 | /// When supplying the last block of AAD, `last_aad_block` must be `true`. | 486 | /// When supplying the last block of AAD, `last_aad_block` must be `true`. |
| 227 | pub fn aad_blocking(&self, ctx: &mut Context, aad: &[u8], last_aad_block: bool) { | 487 | pub fn aad_blocking<'c, C: Cipher<'c> + CipherSized + CipherAuthenticated>( |
| 488 | &self, | ||
| 489 | ctx: &mut Context<'c, C>, | ||
| 490 | aad: &[u8], | ||
| 491 | last_aad_block: bool, | ||
| 492 | ) { | ||
| 228 | self.load_context(ctx); | 493 | self.load_context(ctx); |
| 229 | 494 | ||
| 230 | let block_size; | 495 | let last_block_remainder = aad.len() % C::BLOCK_SIZE; |
| 231 | if ctx.algo == Algorithm::DES { | ||
| 232 | block_size = DES_BLOCK_SIZE; | ||
| 233 | } else { | ||
| 234 | block_size = AES_BLOCK_SIZE; | ||
| 235 | } | ||
| 236 | let last_block_remainder = aad.len() % block_size; | ||
| 237 | 496 | ||
| 238 | // Perform checks for correctness. | 497 | // Perform checks for correctness. |
| 239 | if ctx.aad_complete { | 498 | if ctx.aad_complete { |
| 240 | panic!("Cannot update AAD after calling 'update'!") | 499 | panic!("Cannot update AAD after calling 'update'!") |
| 241 | } | 500 | } |
| 242 | if (ctx.mode != Mode::GCM) && (ctx.mode != Mode::GMAC) && (ctx.mode != Mode::CCM) { | ||
| 243 | panic!("Associated data only valid for GCM, GMAC, and CCM modes.") | ||
| 244 | } | ||
| 245 | if !last_aad_block { | 501 | if !last_aad_block { |
| 246 | if last_block_remainder != 0 { | 502 | if last_block_remainder != 0 { |
| 247 | panic!("Input length must be a multiple of {} bytes.", block_size); | 503 | panic!("Input length must be a multiple of {} bytes.", C::BLOCK_SIZE); |
| 248 | } | 504 | } |
| 249 | } | 505 | } |
| 250 | 506 | ||
| 251 | ctx.header_len += aad.len() as u64; | 507 | ctx.header_len += aad.len() as u64; |
| 252 | 508 | ||
| 253 | // GCM header phase | 509 | // Header phase |
| 254 | T::regs().cr().modify(|w| w.set_crypen(false)); | 510 | T::regs().cr().modify(|w| w.set_crypen(false)); |
| 255 | T::regs().cr().modify(|w| w.set_gcm_ccmph(1)); | 511 | T::regs().cr().modify(|w| w.set_gcm_ccmph(1)); |
| 256 | T::regs().cr().modify(|w| w.set_crypen(true)); | 512 | T::regs().cr().modify(|w| w.set_crypen(true)); |
| 257 | 513 | ||
| 258 | // Load data into core, block by block. | 514 | // Load data into core, block by block. |
| 259 | let num_full_blocks = aad.len() / block_size; | 515 | let num_full_blocks = aad.len() / C::BLOCK_SIZE; |
| 260 | for block in 0..num_full_blocks { | 516 | for block in 0..num_full_blocks { |
| 261 | let mut index = block * block_size; | 517 | let mut index = block * C::BLOCK_SIZE; |
| 262 | let end_index = index + block_size; | 518 | let end_index = index + C::BLOCK_SIZE; |
| 263 | // Write block in | 519 | // Write block in |
| 264 | while index < end_index { | 520 | while index < end_index { |
| 265 | let mut in_word: [u8; 4] = [0; 4]; | 521 | let mut in_word: [u8; 4] = [0; 4]; |
| @@ -276,7 +532,7 @@ impl<'d, T: Instance> Cryp<'d, T> { | |||
| 276 | let mut last_block: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE]; | 532 | let mut last_block: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE]; |
| 277 | last_block[..last_block_remainder].copy_from_slice(&aad[aad.len() - last_block_remainder..aad.len()]); | 533 | last_block[..last_block_remainder].copy_from_slice(&aad[aad.len() - last_block_remainder..aad.len()]); |
| 278 | let mut index = 0; | 534 | let mut index = 0; |
| 279 | let end_index = block_size; | 535 | let end_index = C::BLOCK_SIZE; |
| 280 | // Write block in | 536 | // Write block in |
| 281 | while index < end_index { | 537 | while index < end_index { |
| 282 | let mut in_word: [u8; 4] = [0; 4]; | 538 | let mut in_word: [u8; 4] = [0; 4]; |
| @@ -307,16 +563,16 @@ impl<'d, T: Instance> Cryp<'d, T> { | |||
| 307 | /// Data must be a multiple of block size (128-bits for AES, 64-bits for DES) for CBC and ECB modes. | 563 | /// Data must be a multiple of block size (128-bits for AES, 64-bits for DES) for CBC and ECB modes. |
| 308 | /// Padding or ciphertext stealing must be managed by the application for these modes. | 564 | /// Padding or ciphertext stealing must be managed by the application for these modes. |
| 309 | /// Data must also be a multiple of block size unless `last_block` is `true`. | 565 | /// Data must also be a multiple of block size unless `last_block` is `true`. |
| 310 | pub fn payload_blocking(&self, ctx: &mut Context, input: &[u8], output: &mut [u8], last_block: bool) { | 566 | pub fn payload_blocking<'c, C: Cipher<'c> + CipherSized>( |
| 567 | &self, | ||
| 568 | ctx: &mut Context<'c, C>, | ||
| 569 | input: &[u8], | ||
| 570 | output: &mut [u8], | ||
| 571 | last_block: bool, | ||
| 572 | ) { | ||
| 311 | self.load_context(ctx); | 573 | self.load_context(ctx); |
| 312 | 574 | ||
| 313 | let block_size; | 575 | let last_block_remainder = input.len() % C::BLOCK_SIZE; |
| 314 | if ctx.algo == Algorithm::DES { | ||
| 315 | block_size = DES_BLOCK_SIZE; | ||
| 316 | } else { | ||
| 317 | block_size = AES_BLOCK_SIZE; | ||
| 318 | } | ||
| 319 | let last_block_remainder = input.len() % block_size; | ||
| 320 | 576 | ||
| 321 | // Perform checks for correctness. | 577 | // Perform checks for correctness. |
| 322 | if !ctx.aad_complete && ctx.header_len > 0 { | 578 | if !ctx.aad_complete && ctx.header_len > 0 { |
| @@ -328,9 +584,6 @@ impl<'d, T: Instance> Cryp<'d, T> { | |||
| 328 | T::regs().cr().modify(|w| w.fflush()); | 584 | T::regs().cr().modify(|w| w.fflush()); |
| 329 | T::regs().cr().modify(|w| w.set_crypen(true)); | 585 | T::regs().cr().modify(|w| w.set_crypen(true)); |
| 330 | } | 586 | } |
| 331 | if ctx.mode == Mode::GMAC { | ||
| 332 | panic!("GMAC works on header data only. Do not call this function for GMAC."); | ||
| 333 | } | ||
| 334 | if ctx.last_block_processed { | 587 | if ctx.last_block_processed { |
| 335 | panic!("The last block has already been processed!"); | 588 | panic!("The last block has already been processed!"); |
| 336 | } | 589 | } |
| @@ -339,24 +592,23 @@ impl<'d, T: Instance> Cryp<'d, T> { | |||
| 339 | } | 592 | } |
| 340 | if !last_block { | 593 | if !last_block { |
| 341 | if last_block_remainder != 0 { | 594 | if last_block_remainder != 0 { |
| 342 | panic!("Input length must be a multiple of {} bytes.", block_size); | 595 | panic!("Input length must be a multiple of {} bytes.", C::BLOCK_SIZE); |
| 343 | } | 596 | } |
| 344 | } | 597 | } |
| 345 | if (ctx.mode == Mode::ECB) || (ctx.mode == Mode::CBC) { | 598 | if C::REQUIRES_PADDING { |
| 346 | if last_block_remainder != 0 { | 599 | if last_block_remainder != 0 { |
| 347 | panic!("Input must be a multiple of {} bytes in ECB and CBC modes. Consider padding or ciphertext stealing.", block_size); | 600 | panic!("Input must be a multiple of {} bytes in ECB and CBC modes. Consider padding or ciphertext stealing.", C::BLOCK_SIZE); |
| 348 | } | 601 | } |
| 349 | } | 602 | } |
| 350 | |||
| 351 | if last_block { | 603 | if last_block { |
| 352 | ctx.last_block_processed = true; | 604 | ctx.last_block_processed = true; |
| 353 | } | 605 | } |
| 354 | 606 | ||
| 355 | // Load data into core, block by block. | 607 | // Load data into core, block by block. |
| 356 | let num_full_blocks = input.len() / block_size; | 608 | let num_full_blocks = input.len() / C::BLOCK_SIZE; |
| 357 | for block in 0..num_full_blocks { | 609 | for block in 0..num_full_blocks { |
| 358 | let mut index = block * block_size; | 610 | let mut index = block * C::BLOCK_SIZE; |
| 359 | let end_index = index + block_size; | 611 | let end_index = index + C::BLOCK_SIZE; |
| 360 | // Write block in | 612 | // Write block in |
| 361 | while index < end_index { | 613 | while index < end_index { |
| 362 | let mut in_word: [u8; 4] = [0; 4]; | 614 | let mut in_word: [u8; 4] = [0; 4]; |
| @@ -364,8 +616,8 @@ impl<'d, T: Instance> Cryp<'d, T> { | |||
| 364 | T::regs().din().write_value(u32::from_ne_bytes(in_word)); | 616 | T::regs().din().write_value(u32::from_ne_bytes(in_word)); |
| 365 | index += 4; | 617 | index += 4; |
| 366 | } | 618 | } |
| 367 | let mut index = block * block_size; | 619 | let mut index = block * C::BLOCK_SIZE; |
| 368 | let end_index = index + block_size; | 620 | let end_index = index + C::BLOCK_SIZE; |
| 369 | // Block until there is output to read. | 621 | // Block until there is output to read. |
| 370 | while !T::regs().sr().read().ofne() {} | 622 | while !T::regs().sr().read().ofne() {} |
| 371 | // Read block out | 623 | // Read block out |
| @@ -378,21 +630,13 @@ impl<'d, T: Instance> Cryp<'d, T> { | |||
| 378 | 630 | ||
| 379 | // Handle the final block, which is incomplete. | 631 | // Handle the final block, which is incomplete. |
| 380 | if last_block_remainder > 0 { | 632 | if last_block_remainder > 0 { |
| 381 | if ctx.mode == Mode::GCM && ctx.dir == Direction::Encrypt { | 633 | ctx.cipher.pre_final_block(&T::regs()); |
| 382 | //Handle special GCM partial block process. | ||
| 383 | T::regs().cr().modify(|w| w.set_crypen(false)); | ||
| 384 | T::regs().cr().modify(|w| w.set_algomode3(false)); | ||
| 385 | T::regs().cr().modify(|w| w.set_algomode0(6)); | ||
| 386 | let iv1r = T::regs().csgcmccmr(7).read() - 1; | ||
| 387 | T::regs().init(1).ivrr().write_value(iv1r); | ||
| 388 | T::regs().cr().modify(|w| w.set_crypen(true)); | ||
| 389 | } | ||
| 390 | 634 | ||
| 391 | let mut intermediate_data: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE]; | 635 | let mut intermediate_data: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE]; |
| 392 | let mut last_block: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE]; | 636 | let mut last_block: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE]; |
| 393 | last_block[..last_block_remainder].copy_from_slice(&input[input.len() - last_block_remainder..input.len()]); | 637 | last_block[..last_block_remainder].copy_from_slice(&input[input.len() - last_block_remainder..input.len()]); |
| 394 | let mut index = 0; | 638 | let mut index = 0; |
| 395 | let end_index = block_size; | 639 | let end_index = C::BLOCK_SIZE; |
| 396 | // Write block in | 640 | // Write block in |
| 397 | while index < end_index { | 641 | while index < end_index { |
| 398 | let mut in_word: [u8; 4] = [0; 4]; | 642 | let mut in_word: [u8; 4] = [0; 4]; |
| @@ -401,7 +645,7 @@ impl<'d, T: Instance> Cryp<'d, T> { | |||
| 401 | index += 4; | 645 | index += 4; |
| 402 | } | 646 | } |
| 403 | let mut index = 0; | 647 | let mut index = 0; |
| 404 | let end_index = block_size; | 648 | let end_index = C::BLOCK_SIZE; |
| 405 | // Block until there is output to read. | 649 | // Block until there is output to read. |
| 406 | while !T::regs().sr().read().ofne() {} | 650 | while !T::regs().sr().read().ofne() {} |
| 407 | // Read block out | 651 | // Read block out |
| @@ -416,41 +660,19 @@ impl<'d, T: Instance> Cryp<'d, T> { | |||
| 416 | output[output_len - last_block_remainder..output_len] | 660 | output[output_len - last_block_remainder..output_len] |
| 417 | .copy_from_slice(&intermediate_data[0..last_block_remainder]); | 661 | .copy_from_slice(&intermediate_data[0..last_block_remainder]); |
| 418 | 662 | ||
| 419 | if ctx.mode == Mode::GCM && ctx.dir == Direction::Encrypt { | 663 | ctx.cipher.post_final_block(&T::regs(), ctx.dir, &intermediate_data); |
| 420 | //Handle special GCM partial block process. | ||
| 421 | T::regs().cr().modify(|w| w.set_crypen(false)); | ||
| 422 | T::regs().cr().write(|w| w.set_algomode3(true)); | ||
| 423 | T::regs().cr().write(|w| w.set_algomode0(0)); | ||
| 424 | T::regs().init(1).ivrr().write_value(2); | ||
| 425 | T::regs().cr().modify(|w| w.set_crypen(true)); | ||
| 426 | T::regs().cr().modify(|w| w.set_gcm_ccmph(3)); | ||
| 427 | let mut index = 0; | ||
| 428 | let end_index = block_size; | ||
| 429 | while index < end_index { | ||
| 430 | let mut in_word: [u8; 4] = [0; 4]; | ||
| 431 | in_word.copy_from_slice(&intermediate_data[index..index + 4]); | ||
| 432 | T::regs().din().write_value(u32::from_ne_bytes(in_word)); | ||
| 433 | index += 4; | ||
| 434 | } | ||
| 435 | for _ in 0..4 { | ||
| 436 | T::regs().dout().read(); | ||
| 437 | } | ||
| 438 | } | ||
| 439 | } | 664 | } |
| 440 | 665 | ||
| 441 | ctx.payload_len += input.len() as u64; | 666 | ctx.payload_len += input.len() as u64; |
| 442 | } | 667 | } |
| 443 | 668 | ||
| 444 | /// This function only needs to be called for GCM, CCM, and GMAC modes to | 669 | /// This function only needs to be called for GCM, CCM, and GMAC modes to |
| 445 | /// generate an authentication tag. Calling this function on any other mode | 670 | /// generate an authentication tag. |
| 446 | /// does nothing except consumes the context. A buffer for the authentication | 671 | pub fn finish_blocking<'c, C: Cipher<'c> + CipherSized + CipherAuthenticated>( |
| 447 | /// tag must be supplied. | 672 | &self, |
| 448 | pub fn finish_blocking(&self, mut ctx: Context, tag: &mut [u8; 16]) { | 673 | mut ctx: Context<'c, C>, |
| 449 | // Just consume the context if called for any other mode. | 674 | tag: &mut [u8; 16], |
| 450 | if (ctx.mode != Mode::GCM) || (ctx.mode != Mode::CCM) || (ctx.mode != Mode::GMAC) { | 675 | ) { |
| 451 | return; | ||
| 452 | } | ||
| 453 | |||
| 454 | self.load_context(&mut ctx); | 676 | self.load_context(&mut ctx); |
| 455 | 677 | ||
| 456 | T::regs().cr().modify(|w| w.set_crypen(false)); | 678 | T::regs().cr().modify(|w| w.set_crypen(false)); |
| @@ -477,17 +699,6 @@ impl<'d, T: Instance> Cryp<'d, T> { | |||
| 477 | T::regs().cr().modify(|w| w.set_crypen(false)); | 699 | T::regs().cr().modify(|w| w.set_crypen(false)); |
| 478 | } | 700 | } |
| 479 | 701 | ||
| 480 | fn prepare_key(&self, ctx: &Context) { | ||
| 481 | if ctx.algo == Algorithm::AES && ctx.dir == Direction::Decrypt { | ||
| 482 | if (ctx.mode == Mode::ECB) || (ctx.mode == Mode::CBC) { | ||
| 483 | T::regs().cr().modify(|w| w.set_algomode0(7)); | ||
| 484 | T::regs().cr().modify(|w| w.set_algomode3(false)); | ||
| 485 | T::regs().cr().modify(|w| w.set_crypen(true)); | ||
| 486 | while T::regs().sr().read().busy() {} | ||
| 487 | } | ||
| 488 | } | ||
| 489 | } | ||
| 490 | |||
| 491 | fn load_key(&self, key: &[u8]) { | 702 | fn load_key(&self, key: &[u8]) { |
| 492 | // Load the key into the registers. | 703 | // Load the key into the registers. |
| 493 | let mut keyidx = 0; | 704 | let mut keyidx = 0; |
| @@ -524,7 +735,7 @@ impl<'d, T: Instance> Cryp<'d, T> { | |||
| 524 | T::regs().key(3).krr().write_value(u32::from_be_bytes(keyword)); | 735 | T::regs().key(3).krr().write_value(u32::from_be_bytes(keyword)); |
| 525 | } | 736 | } |
| 526 | 737 | ||
| 527 | fn store_context(&self, ctx: &mut Context) { | 738 | fn store_context<'c, C: Cipher<'c> + CipherSized>(&self, ctx: &mut Context<'c, C>) { |
| 528 | // Wait for data block processing to finish. | 739 | // Wait for data block processing to finish. |
| 529 | while !T::regs().sr().read().ifem() {} | 740 | while !T::regs().sr().read().ifem() {} |
| 530 | while T::regs().sr().read().ofne() {} | 741 | while T::regs().sr().read().ofne() {} |
| @@ -545,7 +756,7 @@ impl<'d, T: Instance> Cryp<'d, T> { | |||
| 545 | } | 756 | } |
| 546 | } | 757 | } |
| 547 | 758 | ||
| 548 | fn load_context(&self, ctx: &Context) { | 759 | fn load_context<'c, C: Cipher<'c> + CipherSized>(&self, ctx: &Context<'c, C>) { |
| 549 | // Reload state registers. | 760 | // Reload state registers. |
| 550 | T::regs().cr().write(|w| w.0 = ctx.cr); | 761 | T::regs().cr().write(|w| w.0 = ctx.cr); |
| 551 | T::regs().init(0).ivlr().write_value(ctx.iv[0]); | 762 | T::regs().init(0).ivlr().write_value(ctx.iv[0]); |
| @@ -556,10 +767,10 @@ impl<'d, T: Instance> Cryp<'d, T> { | |||
| 556 | T::regs().csgcmccmr(i).write_value(ctx.csgcmccm[i]); | 767 | T::regs().csgcmccmr(i).write_value(ctx.csgcmccm[i]); |
| 557 | T::regs().csgcmr(i).write_value(ctx.csgcm[i]); | 768 | T::regs().csgcmr(i).write_value(ctx.csgcm[i]); |
| 558 | } | 769 | } |
| 559 | self.load_key(ctx.key); | 770 | self.load_key(ctx.cipher.key()); |
| 560 | 771 | ||
| 561 | // Prepare key if applicable. | 772 | // Prepare key if applicable. |
| 562 | self.prepare_key(ctx); | 773 | ctx.cipher.prepare_key(&T::regs()); |
| 563 | T::regs().cr().write(|w| w.0 = ctx.cr); | 774 | T::regs().cr().write(|w| w.0 = ctx.cr); |
| 564 | 775 | ||
| 565 | // Enable crypto processor. | 776 | // Enable crypto processor. |
