aboutsummaryrefslogtreecommitdiff
path: root/embassy-stm32/src/cryp
diff options
context:
space:
mode:
authorCaleb Garrett <[email protected]>2024-03-12 12:01:14 -0400
committerCaleb Garrett <[email protected]>2024-03-12 12:01:14 -0400
commit61050a16d5f02a7db718c6e39c811e6e434b032b (patch)
tree4a4ade0865a373b84f5725f827304c75412c37e8 /embassy-stm32/src/cryp
parent6e9e8eeb5f6458833b28a08e7480b2630107d79c (diff)
Add CRYP DMA support. Updated example.
Diffstat (limited to 'embassy-stm32/src/cryp')
-rw-r--r--embassy-stm32/src/cryp/mod.rs613
1 files changed, 586 insertions, 27 deletions
diff --git a/embassy-stm32/src/cryp/mod.rs b/embassy-stm32/src/cryp/mod.rs
index 12353baa0..1a601533d 100644
--- a/embassy-stm32/src/cryp/mod.rs
+++ b/embassy-stm32/src/cryp/mod.rs
@@ -2,12 +2,14 @@
2#[cfg(any(cryp_v2, cryp_v3))] 2#[cfg(any(cryp_v2, cryp_v3))]
3use core::cmp::min; 3use core::cmp::min;
4use core::marker::PhantomData; 4use core::marker::PhantomData;
5use core::ptr;
5 6
6use embassy_hal_internal::{into_ref, PeripheralRef}; 7use embassy_hal_internal::{into_ref, PeripheralRef};
7use embassy_sync::waitqueue::AtomicWaker; 8use embassy_sync::waitqueue::AtomicWaker;
8 9
10use crate::dma::{NoDma, Priority, Transfer, TransferOptions};
9use crate::interrupt::typelevel::Interrupt; 11use crate::interrupt::typelevel::Interrupt;
10use crate::{dma::NoDma, interrupt, pac, peripherals, Peripheral}; 12use crate::{interrupt, pac, peripherals, Peripheral};
11 13
12const DES_BLOCK_SIZE: usize = 8; // 64 bits 14const DES_BLOCK_SIZE: usize = 8; // 64 bits
13const AES_BLOCK_SIZE: usize = 16; // 128 bits 15const AES_BLOCK_SIZE: usize = 16; // 128 bits
@@ -55,18 +57,25 @@ pub trait Cipher<'c> {
55 fn prepare_key(&self, _p: &pac::cryp::Cryp) {} 57 fn prepare_key(&self, _p: &pac::cryp::Cryp) {}
56 58
57 /// Performs any cipher-specific initialization. 59 /// Performs any cipher-specific initialization.
58 fn init_phase<T: Instance, D>(&self, _p: &pac::cryp::Cryp, _cryp: &Cryp<T, D>) {} 60 fn init_phase_blocking<T: Instance, DmaIn, DmaOut>(&self, _p: &pac::cryp::Cryp, _cryp: &Cryp<T, DmaIn, DmaOut>) {}
61
62 /// Performs any cipher-specific initialization.
63 async fn init_phase<T: Instance, DmaIn, DmaOut>(&self, _p: &pac::cryp::Cryp, _cryp: &mut Cryp<'_, T, DmaIn, DmaOut>)
64 where
65 DmaIn: crate::cryp::DmaIn<T>,
66 DmaOut: crate::cryp::DmaOut<T>,
67 {}
59 68
60 /// Called prior to processing the last data block for cipher-specific operations. 69 /// Called prior to processing the last data block for cipher-specific operations.
61 fn pre_final_block(&self, _p: &pac::cryp::Cryp, _dir: Direction, _padding_len: usize) -> [u32; 4] { 70 fn pre_final(&self, _p: &pac::cryp::Cryp, _dir: Direction, _padding_len: usize) -> [u32; 4] {
62 return [0; 4]; 71 return [0; 4];
63 } 72 }
64 73
65 /// Called after processing the last data block for cipher-specific operations. 74 /// Called after processing the last data block for cipher-specific operations.
66 fn post_final_block<T: Instance, D>( 75 fn post_final_blocking<T: Instance, DmaIn, DmaOut>(
67 &self, 76 &self,
68 _p: &pac::cryp::Cryp, 77 _p: &pac::cryp::Cryp,
69 _cryp: &Cryp<T, D>, 78 _cryp: &Cryp<T, DmaIn, DmaOut>,
70 _dir: Direction, 79 _dir: Direction,
71 _int_data: &mut [u8; AES_BLOCK_SIZE], 80 _int_data: &mut [u8; AES_BLOCK_SIZE],
72 _temp1: [u32; 4], 81 _temp1: [u32; 4],
@@ -74,6 +83,21 @@ pub trait Cipher<'c> {
74 ) { 83 ) {
75 } 84 }
76 85
86 /// Called after processing the last data block for cipher-specific operations.
87 async fn post_final<T: Instance, DmaIn, DmaOut>(
88 &self,
89 _p: &pac::cryp::Cryp,
90 _cryp: &mut Cryp<'_, T, DmaIn, DmaOut>,
91 _dir: Direction,
92 _int_data: &mut [u8; AES_BLOCK_SIZE],
93 _temp1: [u32; 4],
94 _padding_mask: [u8; 16],
95 )
96 where
97 DmaIn: crate::cryp::DmaIn<T>,
98 DmaOut: crate::cryp::DmaOut<T>,
99 {}
100
77 /// Called prior to processing the first associated data block for cipher-specific operations. 101 /// Called prior to processing the first associated data block for cipher-specific operations.
78 fn get_header_block(&self) -> &[u8] { 102 fn get_header_block(&self) -> &[u8] {
79 return [0; 0].as_slice(); 103 return [0; 0].as_slice();
@@ -449,14 +473,20 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGcm<'c, KEY_SIZE> {
449 p.cr().modify(|w| w.set_algomode3(true)); 473 p.cr().modify(|w| w.set_algomode3(true));
450 } 474 }
451 475
452 fn init_phase<T: Instance, D>(&self, p: &pac::cryp::Cryp, _cryp: &Cryp<T, D>) { 476 fn init_phase_blocking<T: Instance, DmaIn, DmaOut>(&self, p: &pac::cryp::Cryp, _cryp: &Cryp<T, DmaIn, DmaOut>) {
477 p.cr().modify(|w| w.set_gcm_ccmph(0));
478 p.cr().modify(|w| w.set_crypen(true));
479 while p.cr().read().crypen() {}
480 }
481
482 async fn init_phase<T: Instance, DmaIn, DmaOut>(&self, p: &pac::cryp::Cryp, _cryp: &mut Cryp<'_, T, DmaIn, DmaOut>) {
453 p.cr().modify(|w| w.set_gcm_ccmph(0)); 483 p.cr().modify(|w| w.set_gcm_ccmph(0));
454 p.cr().modify(|w| w.set_crypen(true)); 484 p.cr().modify(|w| w.set_crypen(true));
455 while p.cr().read().crypen() {} 485 while p.cr().read().crypen() {}
456 } 486 }
457 487
458 #[cfg(cryp_v2)] 488 #[cfg(cryp_v2)]
459 fn pre_final_block(&self, p: &pac::cryp::Cryp, dir: Direction, _padding_len: usize) -> [u32; 4] { 489 fn pre_final(&self, p: &pac::cryp::Cryp, dir: Direction, _padding_len: usize) -> [u32; 4] {
460 //Handle special GCM partial block process. 490 //Handle special GCM partial block process.
461 if dir == Direction::Encrypt { 491 if dir == Direction::Encrypt {
462 p.cr().modify(|w| w.set_crypen(false)); 492 p.cr().modify(|w| w.set_crypen(false));
@@ -477,10 +507,10 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGcm<'c, KEY_SIZE> {
477 } 507 }
478 508
479 #[cfg(cryp_v2)] 509 #[cfg(cryp_v2)]
480 fn post_final_block<T: Instance, D>( 510 fn post_final_blocking<T: Instance, DmaIn, DmaOut>(
481 &self, 511 &self,
482 p: &pac::cryp::Cryp, 512 p: &pac::cryp::Cryp,
483 cryp: &Cryp<T, D>, 513 cryp: &Cryp<T, DmaIn, DmaOut>,
484 dir: Direction, 514 dir: Direction,
485 int_data: &mut [u8; AES_BLOCK_SIZE], 515 int_data: &mut [u8; AES_BLOCK_SIZE],
486 _temp1: [u32; 4], 516 _temp1: [u32; 4],
@@ -501,6 +531,43 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGcm<'c, KEY_SIZE> {
501 cryp.read_bytes_blocking(Self::BLOCK_SIZE, int_data); 531 cryp.read_bytes_blocking(Self::BLOCK_SIZE, int_data);
502 } 532 }
503 } 533 }
534
535 #[cfg(cryp_v2)]
536 async fn post_final<T: Instance, DmaIn, DmaOut>(
537 &self,
538 p: &pac::cryp::Cryp,
539 cryp: &mut Cryp<'_, T, DmaIn, DmaOut>,
540 dir: Direction,
541 int_data: &mut [u8; AES_BLOCK_SIZE],
542 _temp1: [u32; 4],
543 padding_mask: [u8; AES_BLOCK_SIZE],
544 )
545 where
546 DmaIn: crate::cryp::DmaIn<T>,
547 DmaOut: crate::cryp::DmaOut<T>,
548 {
549
550 if dir == Direction::Encrypt {
551 // Handle special GCM partial block process.
552 p.cr().modify(|w| w.set_crypen(false));
553 p.cr().modify(|w| w.set_algomode3(true));
554 p.cr().modify(|w| w.set_algomode0(0));
555 for i in 0..AES_BLOCK_SIZE {
556 int_data[i] = int_data[i] & padding_mask[i];
557 }
558 p.cr().modify(|w| w.set_crypen(true));
559 p.cr().modify(|w| w.set_gcm_ccmph(3));
560
561 let mut out_data: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
562
563 let read = Cryp::<T, DmaIn, DmaOut>::read_bytes(&mut cryp.outdma, Self::BLOCK_SIZE, &mut out_data);
564 let write = Cryp::<T, DmaIn, DmaOut>::write_bytes(&mut cryp.indma, Self::BLOCK_SIZE, int_data);
565
566 embassy_futures::join::join(read, write).await;
567
568 int_data.copy_from_slice(&out_data);
569 }
570 }
504} 571}
505 572
506#[cfg(any(cryp_v2, cryp_v3))] 573#[cfg(any(cryp_v2, cryp_v3))]
@@ -549,14 +616,20 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGmac<'c, KEY_SIZE> {
549 p.cr().modify(|w| w.set_algomode3(true)); 616 p.cr().modify(|w| w.set_algomode3(true));
550 } 617 }
551 618
552 fn init_phase<T: Instance, D>(&self, p: &pac::cryp::Cryp, _cryp: &Cryp<T, D>) { 619 fn init_phase_blocking<T: Instance, DmaIn, DmaOut>(&self, p: &pac::cryp::Cryp, _cryp: &Cryp<T, DmaIn, DmaOut>) {
620 p.cr().modify(|w| w.set_gcm_ccmph(0));
621 p.cr().modify(|w| w.set_crypen(true));
622 while p.cr().read().crypen() {}
623 }
624
625 async fn init_phase<T: Instance, DmaIn, DmaOut>(&self, p: &pac::cryp::Cryp, _cryp: &mut Cryp<'_, T, DmaIn, DmaOut>) {
553 p.cr().modify(|w| w.set_gcm_ccmph(0)); 626 p.cr().modify(|w| w.set_gcm_ccmph(0));
554 p.cr().modify(|w| w.set_crypen(true)); 627 p.cr().modify(|w| w.set_crypen(true));
555 while p.cr().read().crypen() {} 628 while p.cr().read().crypen() {}
556 } 629 }
557 630
558 #[cfg(cryp_v2)] 631 #[cfg(cryp_v2)]
559 fn pre_final_block(&self, p: &pac::cryp::Cryp, dir: Direction, _padding_len: usize) -> [u32; 4] { 632 fn pre_final(&self, p: &pac::cryp::Cryp, dir: Direction, _padding_len: usize) -> [u32; 4] {
560 //Handle special GCM partial block process. 633 //Handle special GCM partial block process.
561 if dir == Direction::Encrypt { 634 if dir == Direction::Encrypt {
562 p.cr().modify(|w| w.set_crypen(false)); 635 p.cr().modify(|w| w.set_crypen(false));
@@ -577,10 +650,10 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGmac<'c, KEY_SIZE> {
577 } 650 }
578 651
579 #[cfg(cryp_v2)] 652 #[cfg(cryp_v2)]
580 fn post_final_block<T: Instance, D>( 653 fn post_final_blocking<T: Instance, DmaIn, DmaOut>(
581 &self, 654 &self,
582 p: &pac::cryp::Cryp, 655 p: &pac::cryp::Cryp,
583 cryp: &Cryp<T, D>, 656 cryp: &Cryp<T, DmaIn, DmaOut>,
584 dir: Direction, 657 dir: Direction,
585 int_data: &mut [u8; AES_BLOCK_SIZE], 658 int_data: &mut [u8; AES_BLOCK_SIZE],
586 _temp1: [u32; 4], 659 _temp1: [u32; 4],
@@ -601,6 +674,41 @@ impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGmac<'c, KEY_SIZE> {
601 cryp.read_bytes_blocking(Self::BLOCK_SIZE, int_data); 674 cryp.read_bytes_blocking(Self::BLOCK_SIZE, int_data);
602 } 675 }
603 } 676 }
677
678 #[cfg(cryp_v2)]
679 async fn post_final<T: Instance, DmaIn, DmaOut>(
680 &self,
681 p: &pac::cryp::Cryp,
682 cryp: &mut Cryp<'_, T, DmaIn, DmaOut>,
683 dir: Direction,
684 int_data: &mut [u8; AES_BLOCK_SIZE],
685 _temp1: [u32; 4],
686 padding_mask: [u8; AES_BLOCK_SIZE],
687 )
688 where
689 DmaIn: crate::cryp::DmaIn<T>,
690 DmaOut: crate::cryp::DmaOut<T>,
691 {
692
693 if dir == Direction::Encrypt {
694 // Handle special GCM partial block process.
695 p.cr().modify(|w| w.set_crypen(false));
696 p.cr().modify(|w| w.set_algomode3(true));
697 p.cr().modify(|w| w.set_algomode0(0));
698 for i in 0..AES_BLOCK_SIZE {
699 int_data[i] = int_data[i] & padding_mask[i];
700 }
701 p.cr().modify(|w| w.set_crypen(true));
702 p.cr().modify(|w| w.set_gcm_ccmph(3));
703
704 let mut out_data: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
705
706 let read = Cryp::<T, DmaIn, DmaOut>::read_bytes(&mut cryp.outdma, Self::BLOCK_SIZE, &mut out_data);
707 let write = Cryp::<T, DmaIn, DmaOut>::write_bytes(&mut cryp.indma, Self::BLOCK_SIZE, int_data);
708
709 embassy_futures::join::join(read, write).await;
710 }
711 }
604} 712}
605 713
606#[cfg(any(cryp_v2, cryp_v3))] 714#[cfg(any(cryp_v2, cryp_v3))]
@@ -707,7 +815,7 @@ impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> Cip
707 p.cr().modify(|w| w.set_algomode3(true)); 815 p.cr().modify(|w| w.set_algomode3(true));
708 } 816 }
709 817
710 fn init_phase<T: Instance, D>(&self, p: &pac::cryp::Cryp, cryp: &Cryp<T, D>) { 818 fn init_phase_blocking<T: Instance, DmaIn, DmaOut>(&self, p: &pac::cryp::Cryp, cryp: &Cryp<T, DmaIn, DmaOut>) {
711 p.cr().modify(|w| w.set_gcm_ccmph(0)); 819 p.cr().modify(|w| w.set_gcm_ccmph(0));
712 820
713 cryp.write_bytes_blocking(Self::BLOCK_SIZE, &self.block0); 821 cryp.write_bytes_blocking(Self::BLOCK_SIZE, &self.block0);
@@ -716,12 +824,25 @@ impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> Cip
716 while p.cr().read().crypen() {} 824 while p.cr().read().crypen() {}
717 } 825 }
718 826
827 async fn init_phase<T: Instance, DmaIn, DmaOut>(&self, p: &pac::cryp::Cryp, cryp: &mut Cryp<'_, T, DmaIn, DmaOut>)
828 where
829 DmaIn: crate::cryp::DmaIn<T>,
830 DmaOut: crate::cryp::DmaOut<T>,
831 {
832 p.cr().modify(|w| w.set_gcm_ccmph(0));
833
834 Cryp::<T, DmaIn, DmaOut>::write_bytes(&mut cryp.indma, Self::BLOCK_SIZE, &self.block0).await;
835
836 p.cr().modify(|w| w.set_crypen(true));
837 while p.cr().read().crypen() {}
838 }
839
719 fn get_header_block(&self) -> &[u8] { 840 fn get_header_block(&self) -> &[u8] {
720 return &self.aad_header[0..self.aad_header_len]; 841 return &self.aad_header[0..self.aad_header_len];
721 } 842 }
722 843
723 #[cfg(cryp_v2)] 844 #[cfg(cryp_v2)]
724 fn pre_final_block(&self, p: &pac::cryp::Cryp, dir: Direction, _padding_len: usize) -> [u32; 4] { 845 fn pre_final(&self, p: &pac::cryp::Cryp, dir: Direction, _padding_len: usize) -> [u32; 4] {
725 //Handle special CCM partial block process. 846 //Handle special CCM partial block process.
726 let mut temp1 = [0; 4]; 847 let mut temp1 = [0; 4];
727 if dir == Direction::Decrypt { 848 if dir == Direction::Decrypt {
@@ -747,10 +868,10 @@ impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> Cip
747 } 868 }
748 869
749 #[cfg(cryp_v2)] 870 #[cfg(cryp_v2)]
750 fn post_final_block<T: Instance, D>( 871 fn post_final_blocking<T: Instance, DmaIn, DmaOut>(
751 &self, 872 &self,
752 p: &pac::cryp::Cryp, 873 p: &pac::cryp::Cryp,
753 cryp: &Cryp<T, D>, 874 cryp: &Cryp<T, DmaIn, DmaOut>,
754 dir: Direction, 875 dir: Direction,
755 int_data: &mut [u8; AES_BLOCK_SIZE], 876 int_data: &mut [u8; AES_BLOCK_SIZE],
756 temp1: [u32; 4], 877 temp1: [u32; 4],
@@ -782,6 +903,47 @@ impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> Cip
782 cryp.write_words_blocking(Self::BLOCK_SIZE, &in_data); 903 cryp.write_words_blocking(Self::BLOCK_SIZE, &in_data);
783 } 904 }
784 } 905 }
906
907 #[cfg(cryp_v2)]
908 async fn post_final<T: Instance, DmaIn, DmaOut>(
909 &self,
910 p: &pac::cryp::Cryp,
911 cryp: &mut Cryp<'_, T, DmaIn, DmaOut>,
912 dir: Direction,
913 int_data: &mut [u8; AES_BLOCK_SIZE],
914 temp1: [u32; 4],
915 padding_mask: [u8; 16],
916 )
917 where
918 DmaIn: crate::cryp::DmaIn<T>,
919 DmaOut: crate::cryp::DmaOut<T>,
920 {
921 if dir == Direction::Decrypt {
922 //Handle special CCM partial block process.
923 let mut temp2 = [0; 4];
924 temp2[0] = p.csgcmccmr(0).read().swap_bytes();
925 temp2[1] = p.csgcmccmr(1).read().swap_bytes();
926 temp2[2] = p.csgcmccmr(2).read().swap_bytes();
927 temp2[3] = p.csgcmccmr(3).read().swap_bytes();
928 p.cr().modify(|w| w.set_algomode3(true));
929 p.cr().modify(|w| w.set_algomode0(1));
930 p.cr().modify(|w| w.set_gcm_ccmph(3));
931 // Header phase
932 p.cr().modify(|w| w.set_gcm_ccmph(1));
933 for i in 0..AES_BLOCK_SIZE {
934 int_data[i] = int_data[i] & padding_mask[i];
935 }
936 let mut in_data: [u32; 4] = [0; 4];
937 for i in 0..in_data.len() {
938 let mut int_bytes: [u8; 4] = [0; 4];
939 int_bytes.copy_from_slice(&int_data[(i * 4)..(i * 4) + 4]);
940 let int_word = u32::from_le_bytes(int_bytes);
941 in_data[i] = int_word;
942 in_data[i] = in_data[i] ^ temp1[i] ^ temp2[i];
943 }
944 Cryp::<T, DmaIn, DmaOut>::write_words(&mut cryp.indma, Self::BLOCK_SIZE, &in_data).await;
945 }
946 }
785} 947}
786 948
787#[cfg(any(cryp_v2, cryp_v3))] 949#[cfg(any(cryp_v2, cryp_v3))]
@@ -849,18 +1011,18 @@ pub enum Direction {
849} 1011}
850 1012
851/// Crypto Accelerator Driver 1013/// Crypto Accelerator Driver
852pub struct Cryp<'d, T: Instance, D = NoDma> { 1014pub struct Cryp<'d, T: Instance, DmaIn = NoDma, DmaOut = NoDma> {
853 _peripheral: PeripheralRef<'d, T>, 1015 _peripheral: PeripheralRef<'d, T>,
854 indma: PeripheralRef<'d, D>, 1016 indma: PeripheralRef<'d, DmaIn>,
855 outdma: PeripheralRef<'d, D>, 1017 outdma: PeripheralRef<'d, DmaOut>,
856} 1018}
857 1019
858impl<'d, T: Instance, D> Cryp<'d, T, D> { 1020impl<'d, T: Instance, DmaIn, DmaOut> Cryp<'d, T, DmaIn, DmaOut> {
859 /// Create a new CRYP driver. 1021 /// Create a new CRYP driver.
860 pub fn new( 1022 pub fn new(
861 peri: impl Peripheral<P = T> + 'd, 1023 peri: impl Peripheral<P = T> + 'd,
862 indma: impl Peripheral<P = D> + 'd, 1024 indma: impl Peripheral<P = DmaIn> + 'd,
863 outdma: impl Peripheral<P = D> + 'd, 1025 outdma: impl Peripheral<P = DmaOut> + 'd,
864 _irq: impl interrupt::typelevel::Binding<T::Interrupt, InterruptHandler<T>> + 'd, 1026 _irq: impl interrupt::typelevel::Binding<T::Interrupt, InterruptHandler<T>> + 'd,
865 ) -> Self { 1027 ) -> Self {
866 T::enable_and_reset(); 1028 T::enable_and_reset();
@@ -881,7 +1043,89 @@ impl<'d, T: Instance, D> Cryp<'d, T, D> {
881 /// Key size must be 128, 192, or 256 bits. 1043 /// Key size must be 128, 192, or 256 bits.
882 /// Initialization vector must only be supplied if necessary. 1044 /// Initialization vector must only be supplied if necessary.
883 /// Panics if there is any mismatch in parameters, such as an incorrect IV length or invalid mode. 1045 /// Panics if there is any mismatch in parameters, such as an incorrect IV length or invalid mode.
884 pub fn start<'c, C: Cipher<'c> + CipherSized + IVSized>(&self, cipher: &'c C, dir: Direction) -> Context<'c, C> { 1046 pub fn start_blocking<'c, C: Cipher<'c> + CipherSized + IVSized>(&self, cipher: &'c C, dir: Direction) -> Context<'c, C> {
1047 let mut ctx: Context<'c, C> = Context {
1048 dir,
1049 last_block_processed: false,
1050 cr: 0,
1051 iv: [0; 4],
1052 csgcmccm: [0; 8],
1053 csgcm: [0; 8],
1054 aad_complete: false,
1055 header_len: 0,
1056 payload_len: 0,
1057 cipher: cipher,
1058 phantom_data: PhantomData,
1059 header_processed: false,
1060 aad_buffer: [0; 16],
1061 aad_buffer_len: 0,
1062 };
1063
1064 T::regs().cr().modify(|w| w.set_crypen(false));
1065
1066 let key = ctx.cipher.key();
1067
1068 if key.len() == (128 / 8) {
1069 T::regs().cr().modify(|w| w.set_keysize(0));
1070 } else if key.len() == (192 / 8) {
1071 T::regs().cr().modify(|w| w.set_keysize(1));
1072 } else if key.len() == (256 / 8) {
1073 T::regs().cr().modify(|w| w.set_keysize(2));
1074 }
1075
1076 self.load_key(key);
1077
1078 // Set data type to 8-bit. This will match software implementations.
1079 T::regs().cr().modify(|w| w.set_datatype(2));
1080
1081 ctx.cipher.prepare_key(&T::regs());
1082
1083 ctx.cipher.set_algomode(&T::regs());
1084
1085 // Set encrypt/decrypt
1086 if dir == Direction::Encrypt {
1087 T::regs().cr().modify(|w| w.set_algodir(false));
1088 } else {
1089 T::regs().cr().modify(|w| w.set_algodir(true));
1090 }
1091
1092 // Load the IV into the registers.
1093 let iv = ctx.cipher.iv();
1094 let mut full_iv: [u8; 16] = [0; 16];
1095 full_iv[0..iv.len()].copy_from_slice(iv);
1096 let mut iv_idx = 0;
1097 let mut iv_word: [u8; 4] = [0; 4];
1098 iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
1099 iv_idx += 4;
1100 T::regs().init(0).ivlr().write_value(u32::from_be_bytes(iv_word));
1101 iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
1102 iv_idx += 4;
1103 T::regs().init(0).ivrr().write_value(u32::from_be_bytes(iv_word));
1104 iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
1105 iv_idx += 4;
1106 T::regs().init(1).ivlr().write_value(u32::from_be_bytes(iv_word));
1107 iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
1108 T::regs().init(1).ivrr().write_value(u32::from_be_bytes(iv_word));
1109
1110 // Flush in/out FIFOs
1111 T::regs().cr().modify(|w| w.fflush());
1112
1113 ctx.cipher.init_phase_blocking(&T::regs(), self);
1114
1115 self.store_context(&mut ctx);
1116
1117 ctx
1118 }
1119
1120 /// Start a new cipher operation.
1121 /// Key size must be 128, 192, or 256 bits.
1122 /// Initialization vector must only be supplied if necessary.
1123 /// Panics if there is any mismatch in parameters, such as an incorrect IV length or invalid mode.
1124 pub async fn start<'c, C: Cipher<'c> + CipherSized + IVSized>(&mut self, cipher: &'c C, dir: Direction) -> Context<'c, C>
1125 where
1126 DmaIn: crate::cryp::DmaIn<T>,
1127 DmaOut: crate::cryp::DmaOut<T>,
1128 {
885 let mut ctx: Context<'c, C> = Context { 1129 let mut ctx: Context<'c, C> = Context {
886 dir, 1130 dir,
887 last_block_processed: false, 1131 last_block_processed: false,
@@ -948,7 +1192,7 @@ impl<'d, T: Instance, D> Cryp<'d, T, D> {
948 // Flush in/out FIFOs 1192 // Flush in/out FIFOs
949 T::regs().cr().modify(|w| w.fflush()); 1193 T::regs().cr().modify(|w| w.fflush());
950 1194
951 ctx.cipher.init_phase(&T::regs(), self); 1195 ctx.cipher.init_phase(&T::regs(), self).await;
952 1196
953 self.store_context(&mut ctx); 1197 self.store_context(&mut ctx);
954 1198
@@ -1053,6 +1297,107 @@ impl<'d, T: Instance, D> Cryp<'d, T, D> {
1053 self.store_context(ctx); 1297 self.store_context(ctx);
1054 } 1298 }
1055 1299
1300 #[cfg(any(cryp_v2, cryp_v3))]
1301 /// Controls the header phase of cipher processing.
1302 /// This function is only valid for GCM, CCM, and GMAC modes.
1303 /// It only needs to be called if using one of these modes and there is associated data.
1304 /// All AAD must be supplied to this function prior to starting the payload phase with `payload_blocking`.
1305 /// The AAD must be supplied in multiples of the block size (128 bits), except when supplying the last block.
1306 /// When supplying the last block of AAD, `last_aad_block` must be `true`.
1307 pub async fn aad<
1308 'c,
1309 const TAG_SIZE: usize,
1310 C: Cipher<'c> + CipherSized + IVSized + CipherAuthenticated<TAG_SIZE>,
1311 >(
1312 &mut self,
1313 ctx: &mut Context<'c, C>,
1314 aad: &[u8],
1315 last_aad_block: bool,
1316 )
1317 where
1318 DmaIn: crate::cryp::DmaIn<T>,
1319 DmaOut: crate::cryp::DmaOut<T>,
1320 {
1321 self.load_context(ctx);
1322
1323 // Perform checks for correctness.
1324 if ctx.aad_complete {
1325 panic!("Cannot update AAD after starting payload!")
1326 }
1327
1328 ctx.header_len += aad.len() as u64;
1329
1330 // Header phase
1331 T::regs().cr().modify(|w| w.set_crypen(false));
1332 T::regs().cr().modify(|w| w.set_gcm_ccmph(1));
1333 T::regs().cr().modify(|w| w.set_crypen(true));
1334
1335 // First write the header B1 block if not yet written.
1336 if !ctx.header_processed {
1337 ctx.header_processed = true;
1338 let header = ctx.cipher.get_header_block();
1339 ctx.aad_buffer[0..header.len()].copy_from_slice(header);
1340 ctx.aad_buffer_len += header.len();
1341 }
1342
1343 // Fill the header block to make a full block.
1344 let len_to_copy = min(aad.len(), C::BLOCK_SIZE - ctx.aad_buffer_len);
1345 ctx.aad_buffer[ctx.aad_buffer_len..ctx.aad_buffer_len + len_to_copy].copy_from_slice(&aad[..len_to_copy]);
1346 ctx.aad_buffer_len += len_to_copy;
1347 ctx.aad_buffer[ctx.aad_buffer_len..].fill(0);
1348 let mut aad_len_remaining = aad.len() - len_to_copy;
1349
1350 if ctx.aad_buffer_len < C::BLOCK_SIZE {
1351 // The buffer isn't full and this is the last buffer, so process it as is (already padded).
1352 if last_aad_block {
1353 Self::write_bytes(&mut self.indma, C::BLOCK_SIZE, &ctx.aad_buffer).await;
1354 assert_eq!(T::regs().sr().read().ifem(), true);
1355
1356 // Switch to payload phase.
1357 ctx.aad_complete = true;
1358 T::regs().cr().modify(|w| w.set_crypen(false));
1359 T::regs().cr().modify(|w| w.set_gcm_ccmph(2));
1360 T::regs().cr().modify(|w| w.fflush());
1361 } else {
1362 // Just return because we don't yet have a full block to process.
1363 return;
1364 }
1365 } else {
1366 // Load the full block from the buffer.
1367 Self::write_bytes(&mut self.indma, C::BLOCK_SIZE, &ctx.aad_buffer).await;
1368 assert_eq!(T::regs().sr().read().ifem(), true);
1369 }
1370
1371 // Handle a partial block that is passed in.
1372 ctx.aad_buffer_len = 0;
1373 let leftovers = aad_len_remaining % C::BLOCK_SIZE;
1374 ctx.aad_buffer[..leftovers].copy_from_slice(&aad[aad.len() - leftovers..aad.len()]);
1375 ctx.aad_buffer_len += leftovers;
1376 ctx.aad_buffer[ctx.aad_buffer_len..].fill(0);
1377 aad_len_remaining -= leftovers;
1378 assert_eq!(aad_len_remaining % C::BLOCK_SIZE, 0);
1379
1380 // Load full data blocks into core.
1381 let num_full_blocks = aad_len_remaining / C::BLOCK_SIZE;
1382 let start_index = len_to_copy;
1383 let end_index = start_index + (C::BLOCK_SIZE * num_full_blocks);
1384 Self::write_bytes(&mut self.indma, C::BLOCK_SIZE, &aad[start_index..end_index]).await;
1385
1386 if last_aad_block {
1387 if leftovers > 0 {
1388 Self::write_bytes(&mut self.indma, C::BLOCK_SIZE, &ctx.aad_buffer).await;
1389 assert_eq!(T::regs().sr().read().ifem(), true);
1390 }
1391 // Switch to payload phase.
1392 ctx.aad_complete = true;
1393 T::regs().cr().modify(|w| w.set_crypen(false));
1394 T::regs().cr().modify(|w| w.set_gcm_ccmph(2));
1395 T::regs().cr().modify(|w| w.fflush());
1396 }
1397
1398 self.store_context(ctx);
1399 }
1400
1056 /// Performs encryption/decryption on the provided context. 1401 /// Performs encryption/decryption on the provided context.
1057 /// The context determines algorithm, mode, and state of the crypto accelerator. 1402 /// The context determines algorithm, mode, and state of the crypto accelerator.
1058 /// When the last piece of data is supplied, `last_block` should be `true`. 1403 /// When the last piece of data is supplied, `last_block` should be `true`.
@@ -1118,7 +1463,7 @@ impl<'d, T: Instance, D> Cryp<'d, T, D> {
1118 // Handle the final block, which is incomplete. 1463 // Handle the final block, which is incomplete.
1119 if last_block_remainder > 0 { 1464 if last_block_remainder > 0 {
1120 let padding_len = C::BLOCK_SIZE - last_block_remainder; 1465 let padding_len = C::BLOCK_SIZE - last_block_remainder;
1121 let temp1 = ctx.cipher.pre_final_block(&T::regs(), ctx.dir, padding_len); 1466 let temp1 = ctx.cipher.pre_final(&T::regs(), ctx.dir, padding_len);
1122 1467
1123 let mut intermediate_data: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE]; 1468 let mut intermediate_data: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
1124 let mut last_block: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE]; 1469 let mut last_block: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
@@ -1134,7 +1479,102 @@ impl<'d, T: Instance, D> Cryp<'d, T, D> {
1134 let mut mask: [u8; 16] = [0; 16]; 1479 let mut mask: [u8; 16] = [0; 16];
1135 mask[..last_block_remainder].fill(0xFF); 1480 mask[..last_block_remainder].fill(0xFF);
1136 ctx.cipher 1481 ctx.cipher
1137 .post_final_block(&T::regs(), self, ctx.dir, &mut intermediate_data, temp1, mask); 1482 .post_final_blocking(&T::regs(), self, ctx.dir, &mut intermediate_data, temp1, mask);
1483 }
1484
1485 ctx.payload_len += input.len() as u64;
1486
1487 self.store_context(ctx);
1488 }
1489
1490 /// Performs encryption/decryption on the provided context.
1491 /// The context determines algorithm, mode, and state of the crypto accelerator.
1492 /// When the last piece of data is supplied, `last_block` should be `true`.
1493 /// This function panics under various mismatches of parameters.
1494 /// Input and output buffer lengths must match.
1495 /// Data must be a multiple of block size (128-bits for AES, 64-bits for DES) for CBC and ECB modes.
1496 /// Padding or ciphertext stealing must be managed by the application for these modes.
1497 /// Data must also be a multiple of block size unless `last_block` is `true`.
1498 pub async fn payload<'c, C: Cipher<'c> + CipherSized + IVSized>(
1499 &mut self,
1500 ctx: &mut Context<'c, C>,
1501 input: &[u8],
1502 output: &mut [u8],
1503 last_block: bool,
1504 )
1505 where
1506 DmaIn: crate::cryp::DmaIn<T>,
1507 DmaOut: crate::cryp::DmaOut<T>,
1508 {
1509 self.load_context(ctx);
1510
1511 let last_block_remainder = input.len() % C::BLOCK_SIZE;
1512
1513 // Perform checks for correctness.
1514 if !ctx.aad_complete && ctx.header_len > 0 {
1515 panic!("Additional associated data must be processed first!");
1516 } else if !ctx.aad_complete {
1517 #[cfg(any(cryp_v2, cryp_v3))]
1518 {
1519 ctx.aad_complete = true;
1520 T::regs().cr().modify(|w| w.set_crypen(false));
1521 T::regs().cr().modify(|w| w.set_gcm_ccmph(2));
1522 T::regs().cr().modify(|w| w.fflush());
1523 T::regs().cr().modify(|w| w.set_crypen(true));
1524 }
1525 }
1526 if ctx.last_block_processed {
1527 panic!("The last block has already been processed!");
1528 }
1529 if input.len() > output.len() {
1530 panic!("Output buffer length must match input length.");
1531 }
1532 if !last_block {
1533 if last_block_remainder != 0 {
1534 panic!("Input length must be a multiple of {} bytes.", C::BLOCK_SIZE);
1535 }
1536 }
1537 if C::REQUIRES_PADDING {
1538 if last_block_remainder != 0 {
1539 panic!("Input must be a multiple of {} bytes in ECB and CBC modes. Consider padding or ciphertext stealing.", C::BLOCK_SIZE);
1540 }
1541 }
1542 if last_block {
1543 ctx.last_block_processed = true;
1544 }
1545
1546 // Load data into core, block by block.
1547 let num_full_blocks = input.len() / C::BLOCK_SIZE;
1548 for block in 0..num_full_blocks {
1549 let index = block * C::BLOCK_SIZE;
1550 // Read block out
1551 let read = Self::read_bytes(&mut self.outdma, C::BLOCK_SIZE, &mut output[index..index + 4]);
1552 // Write block in
1553 let write = Self::write_bytes(&mut self.indma, C::BLOCK_SIZE, &input[index..index + 4]);
1554 embassy_futures::join::join(read, write).await;
1555 }
1556
1557 // Handle the final block, which is incomplete.
1558 if last_block_remainder > 0 {
1559 let padding_len = C::BLOCK_SIZE - last_block_remainder;
1560 let temp1 = ctx.cipher.pre_final(&T::regs(), ctx.dir, padding_len);
1561
1562 let mut intermediate_data: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
1563 let mut last_block: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
1564 last_block[..last_block_remainder].copy_from_slice(&input[input.len() - last_block_remainder..input.len()]);
1565 let read = Self::read_bytes(&mut self.outdma, C::BLOCK_SIZE, &mut intermediate_data);
1566 let write = Self::write_bytes(&mut self.indma, C::BLOCK_SIZE, &last_block);
1567 embassy_futures::join::join(read, write).await;
1568
1569 // Handle the last block depending on mode.
1570 let output_len = output.len();
1571 output[output_len - last_block_remainder..output_len]
1572 .copy_from_slice(&intermediate_data[0..last_block_remainder]);
1573
1574 let mut mask: [u8; 16] = [0; 16];
1575 mask[..last_block_remainder].fill(0xFF);
1576 ctx.cipher
1577 .post_final(&T::regs(), self, ctx.dir, &mut intermediate_data, temp1, mask).await;
1138 } 1578 }
1139 1579
1140 ctx.payload_len += input.len() as u64; 1580 ctx.payload_len += input.len() as u64;
@@ -1188,6 +1628,50 @@ impl<'d, T: Instance, D> Cryp<'d, T, D> {
1188 tag 1628 tag
1189 } 1629 }
1190 1630
1631 #[cfg(any(cryp_v2, cryp_v3))]
1632 /// This function only needs to be called for GCM, CCM, and GMAC modes to
1633 /// generate an authentication tag.
1634 pub async fn finish<'c, const TAG_SIZE: usize, C: Cipher<'c> + CipherSized + IVSized + CipherAuthenticated<TAG_SIZE>>(&mut self, mut ctx: Context<'c, C>) -> [u8; TAG_SIZE]
1635 where
1636 DmaIn: crate::cryp::DmaIn<T>,
1637 DmaOut: crate::cryp::DmaOut<T>,
1638 {
1639 self.load_context(&mut ctx);
1640
1641 T::regs().cr().modify(|w| w.set_crypen(false));
1642 T::regs().cr().modify(|w| w.set_gcm_ccmph(3));
1643 T::regs().cr().modify(|w| w.set_crypen(true));
1644
1645 let headerlen1: u32 = ((ctx.header_len * 8) >> 32) as u32;
1646 let headerlen2: u32 = (ctx.header_len * 8) as u32;
1647 let payloadlen1: u32 = ((ctx.payload_len * 8) >> 32) as u32;
1648 let payloadlen2: u32 = (ctx.payload_len * 8) as u32;
1649
1650 #[cfg(cryp_v2)]
1651 let footer: [u32; 4] = [
1652 headerlen1.swap_bytes(),
1653 headerlen2.swap_bytes(),
1654 payloadlen1.swap_bytes(),
1655 payloadlen2.swap_bytes(),
1656 ];
1657 #[cfg(cryp_v3)]
1658 let footer: [u32; 4] = [headerlen1, headerlen2, payloadlen1, payloadlen2];
1659
1660 let write = Self::write_words(&mut self.indma, C::BLOCK_SIZE, &footer);
1661
1662 let mut full_tag: [u8; 16] = [0; 16];
1663 let read = Self::read_bytes(&mut self.outdma, C::BLOCK_SIZE, &mut full_tag);
1664
1665 embassy_futures::join::join(read, write).await;
1666
1667 let mut tag: [u8; TAG_SIZE] = [0; TAG_SIZE];
1668 tag.copy_from_slice(&full_tag[0..TAG_SIZE]);
1669
1670 T::regs().cr().modify(|w| w.set_crypen(false));
1671
1672 tag
1673 }
1674
1191 fn load_key(&self, key: &[u8]) { 1675 fn load_key(&self, key: &[u8]) {
1192 // Load the key into the registers. 1676 // Load the key into the registers.
1193 let mut keyidx = 0; 1677 let mut keyidx = 0;
@@ -1288,6 +1772,30 @@ impl<'d, T: Instance, D> Cryp<'d, T, D> {
1288 } 1772 }
1289 } 1773 }
1290 1774
1775 async fn write_bytes(dma: &mut PeripheralRef<'_, DmaIn>, block_size: usize, blocks: &[u8])
1776 where
1777 DmaIn: crate::cryp::DmaIn<T>,
1778 {
1779 if blocks.len() == 0 {
1780 return;
1781 }
1782 // Ensure input is a multiple of block size.
1783 assert_eq!(blocks.len() % block_size, 0);
1784 // Configure DMA to transfer input to crypto core.
1785 let dma_request = dma.request();
1786 let dst_ptr = T::regs().din().as_ptr();
1787 let num_words = blocks.len() / 4;
1788 let src_ptr = ptr::slice_from_raw_parts(blocks.as_ptr().cast(), num_words);
1789 let options = TransferOptions {
1790 priority: Priority::High,
1791 ..Default::default()
1792 };
1793 let dma_transfer = unsafe { Transfer::new_write_raw(dma, dma_request, src_ptr, dst_ptr, options) };
1794 T::regs().dmacr().modify(|w| w.set_dien(true));
1795 // Wait for the transfer to complete.
1796 dma_transfer.await;
1797 }
1798
1291 fn write_words_blocking(&self, block_size: usize, blocks: &[u32]) { 1799 fn write_words_blocking(&self, block_size: usize, blocks: &[u32]) {
1292 assert_eq!((blocks.len() * 4) % block_size, 0); 1800 assert_eq!((blocks.len() * 4) % block_size, 0);
1293 let mut byte_counter: usize = 0; 1801 let mut byte_counter: usize = 0;
@@ -1301,6 +1809,30 @@ impl<'d, T: Instance, D> Cryp<'d, T, D> {
1301 } 1809 }
1302 } 1810 }
1303 1811
1812 async fn write_words(dma: &mut PeripheralRef<'_, DmaIn>, block_size: usize, blocks: &[u32])
1813 where
1814 DmaIn: crate::cryp::DmaIn<T>,
1815 {
1816 if blocks.len() == 0 {
1817 return;
1818 }
1819 // Ensure input is a multiple of block size.
1820 assert_eq!((blocks.len() * 4) % block_size, 0);
1821 // Configure DMA to transfer input to crypto core.
1822 let dma_request = dma.request();
1823 let dst_ptr = T::regs().din().as_ptr();
1824 let num_words = blocks.len();
1825 let src_ptr = ptr::slice_from_raw_parts(blocks.as_ptr().cast(), num_words);
1826 let options = TransferOptions {
1827 priority: Priority::High,
1828 ..Default::default()
1829 };
1830 let dma_transfer = unsafe { Transfer::new_write_raw(dma, dma_request, src_ptr, dst_ptr, options) };
1831 T::regs().dmacr().modify(|w| w.set_dien(true));
1832 // Wait for the transfer to complete.
1833 dma_transfer.await;
1834 }
1835
1304 fn read_bytes_blocking(&self, block_size: usize, blocks: &mut [u8]) { 1836 fn read_bytes_blocking(&self, block_size: usize, blocks: &mut [u8]) {
1305 // Block until there is output to read. 1837 // Block until there is output to read.
1306 while !T::regs().sr().read().ofne() {} 1838 while !T::regs().sr().read().ofne() {}
@@ -1315,6 +1847,30 @@ impl<'d, T: Instance, D> Cryp<'d, T, D> {
1315 index += 4; 1847 index += 4;
1316 } 1848 }
1317 } 1849 }
1850
1851 async fn read_bytes(dma: &mut PeripheralRef<'_, DmaOut>, block_size: usize, blocks: &mut [u8])
1852 where
1853 DmaOut: crate::cryp::DmaOut<T>,
1854 {
1855 if blocks.len() == 0 {
1856 return;
1857 }
1858 // Ensure input is a multiple of block size.
1859 assert_eq!(blocks.len() % block_size, 0);
1860 // Configure DMA to get output from crypto core.
1861 let dma_request = dma.request();
1862 let src_ptr = T::regs().dout().as_ptr();
1863 let num_words = blocks.len() / 4;
1864 let dst_ptr = ptr::slice_from_raw_parts_mut(blocks.as_mut_ptr().cast(), num_words);
1865 let options = TransferOptions {
1866 priority: Priority::VeryHigh,
1867 ..Default::default()
1868 };
1869 let dma_transfer = unsafe { Transfer::new_read_raw(dma, dma_request, src_ptr, dst_ptr, options) };
1870 T::regs().dmacr().modify(|w| w.set_doen(true));
1871 // Wait for the transfer to complete.
1872 dma_transfer.await;
1873 }
1318} 1874}
1319 1875
1320pub(crate) mod sealed { 1876pub(crate) mod sealed {
@@ -1344,3 +1900,6 @@ foreach_interrupt!(
1344 } 1900 }
1345 }; 1901 };
1346); 1902);
1903
1904dma_trait!(DmaIn, Instance);
1905dma_trait!(DmaOut, Instance); \ No newline at end of file