embassy_stm32/cryp/
mod.rs

1//! Crypto Accelerator (CRYP)
2#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
3use core::cmp::min;
4use core::marker::PhantomData;
5use core::ptr;
6
7use embassy_hal_internal::{into_ref, PeripheralRef};
8use embassy_sync::waitqueue::AtomicWaker;
9
10use crate::dma::{NoDma, Transfer, TransferOptions};
11use crate::interrupt::typelevel::Interrupt;
12use crate::{interrupt, pac, peripherals, rcc, Peripheral};
13
14const DES_BLOCK_SIZE: usize = 8; // 64 bits
15const AES_BLOCK_SIZE: usize = 16; // 128 bits
16
17static CRYP_WAKER: AtomicWaker = AtomicWaker::new();
18
19/// CRYP interrupt handler.
20pub struct InterruptHandler<T: Instance> {
21    _phantom: PhantomData<T>,
22}
23
24impl<T: Instance> interrupt::typelevel::Handler<T::Interrupt> for InterruptHandler<T> {
25    unsafe fn on_interrupt() {
26        let bits = T::regs().misr().read();
27        if bits.inmis() {
28            T::regs().imscr().modify(|w| w.set_inim(false));
29            CRYP_WAKER.wake();
30        }
31        if bits.outmis() {
32            T::regs().imscr().modify(|w| w.set_outim(false));
33            CRYP_WAKER.wake();
34        }
35    }
36}
37
38/// This trait encapsulates all cipher-specific behavior/
39pub trait Cipher<'c> {
40    /// Processing block size. Determined by the processor and the algorithm.
41    const BLOCK_SIZE: usize;
42
43    /// Indicates whether the cipher requires the application to provide padding.
44    /// If `true`, no partial blocks will be accepted (a panic will occur).
45    const REQUIRES_PADDING: bool = false;
46
47    /// Returns the symmetric key.
48    fn key(&self) -> &[u8];
49
50    /// Returns the initialization vector.
51    fn iv(&self) -> &[u8];
52
53    /// Sets the processor algorithm mode according to the associated cipher.
54    fn set_algomode(&self, p: pac::cryp::Cryp);
55
56    /// Performs any key preparation within the processor, if necessary.
57    fn prepare_key(&self, _p: pac::cryp::Cryp) {}
58
59    /// Performs any cipher-specific initialization.
60    fn init_phase_blocking<T: Instance, DmaIn, DmaOut>(&self, _p: pac::cryp::Cryp, _cryp: &Cryp<T, DmaIn, DmaOut>) {}
61
62    /// Performs any cipher-specific initialization.
63    async fn init_phase<T: Instance, DmaIn, DmaOut>(&self, _p: pac::cryp::Cryp, _cryp: &mut Cryp<'_, T, DmaIn, DmaOut>)
64    where
65        DmaIn: crate::cryp::DmaIn<T>,
66        DmaOut: crate::cryp::DmaOut<T>,
67    {
68    }
69
70    /// Called prior to processing the last data block for cipher-specific operations.
71    fn pre_final(&self, _p: pac::cryp::Cryp, _dir: Direction, _padding_len: usize) -> [u32; 4] {
72        return [0; 4];
73    }
74
75    /// Called after processing the last data block for cipher-specific operations.
76    fn post_final_blocking<T: Instance, DmaIn, DmaOut>(
77        &self,
78        _p: pac::cryp::Cryp,
79        _cryp: &Cryp<T, DmaIn, DmaOut>,
80        _dir: Direction,
81        _int_data: &mut [u8; AES_BLOCK_SIZE],
82        _temp1: [u32; 4],
83        _padding_mask: [u8; 16],
84    ) {
85    }
86
87    /// Called after processing the last data block for cipher-specific operations.
88    async fn post_final<T: Instance, DmaIn, DmaOut>(
89        &self,
90        _p: pac::cryp::Cryp,
91        _cryp: &mut Cryp<'_, T, DmaIn, DmaOut>,
92        _dir: Direction,
93        _int_data: &mut [u8; AES_BLOCK_SIZE],
94        _temp1: [u32; 4],
95        _padding_mask: [u8; 16],
96    ) where
97        DmaIn: crate::cryp::DmaIn<T>,
98        DmaOut: crate::cryp::DmaOut<T>,
99    {
100    }
101
102    /// Returns the AAD header block as required by the cipher.
103    fn get_header_block(&self) -> &[u8] {
104        return [0; 0].as_slice();
105    }
106}
107
108/// This trait enables restriction of ciphers to specific key sizes.
109pub trait CipherSized {}
110
111/// This trait enables restriction of initialization vectors to sizes compatibile with a cipher mode.
112pub trait IVSized {}
113
114/// This trait enables restriction of a header phase to authenticated ciphers only.
115pub trait CipherAuthenticated<const TAG_SIZE: usize> {
116    /// Defines the authentication tag size.
117    const TAG_SIZE: usize = TAG_SIZE;
118}
119
120/// TDES-ECB Cipher Mode
121pub struct TdesEcb<'c, const KEY_SIZE: usize> {
122    iv: &'c [u8; 0],
123    key: &'c [u8; KEY_SIZE],
124}
125
126impl<'c, const KEY_SIZE: usize> TdesEcb<'c, KEY_SIZE> {
127    /// Constructs a new AES-ECB cipher for a cryptographic operation.
128    pub fn new(key: &'c [u8; KEY_SIZE]) -> Self {
129        return Self { key: key, iv: &[0; 0] };
130    }
131}
132
133impl<'c, const KEY_SIZE: usize> Cipher<'c> for TdesEcb<'c, KEY_SIZE> {
134    const BLOCK_SIZE: usize = DES_BLOCK_SIZE;
135    const REQUIRES_PADDING: bool = true;
136
137    fn key(&self) -> &'c [u8] {
138        self.key
139    }
140
141    fn iv(&self) -> &'c [u8] {
142        self.iv
143    }
144
145    fn set_algomode(&self, p: pac::cryp::Cryp) {
146        #[cfg(cryp_v1)]
147        {
148            p.cr().modify(|w| w.set_algomode(0));
149        }
150        #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
151        {
152            p.cr().modify(|w| w.set_algomode0(0));
153            p.cr().modify(|w| w.set_algomode3(false));
154        }
155    }
156}
157
158impl<'c> CipherSized for TdesEcb<'c, { 112 / 8 }> {}
159impl<'c> CipherSized for TdesEcb<'c, { 168 / 8 }> {}
160impl<'c, const KEY_SIZE: usize> IVSized for TdesEcb<'c, KEY_SIZE> {}
161
162/// TDES-CBC Cipher Mode
163pub struct TdesCbc<'c, const KEY_SIZE: usize> {
164    iv: &'c [u8; 8],
165    key: &'c [u8; KEY_SIZE],
166}
167
168impl<'c, const KEY_SIZE: usize> TdesCbc<'c, KEY_SIZE> {
169    /// Constructs a new TDES-CBC cipher for a cryptographic operation.
170    pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 8]) -> Self {
171        return Self { key: key, iv: iv };
172    }
173}
174
175impl<'c, const KEY_SIZE: usize> Cipher<'c> for TdesCbc<'c, KEY_SIZE> {
176    const BLOCK_SIZE: usize = DES_BLOCK_SIZE;
177    const REQUIRES_PADDING: bool = true;
178
179    fn key(&self) -> &'c [u8] {
180        self.key
181    }
182
183    fn iv(&self) -> &'c [u8] {
184        self.iv
185    }
186
187    fn set_algomode(&self, p: pac::cryp::Cryp) {
188        #[cfg(cryp_v1)]
189        {
190            p.cr().modify(|w| w.set_algomode(1));
191        }
192        #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
193        {
194            p.cr().modify(|w| w.set_algomode0(1));
195            p.cr().modify(|w| w.set_algomode3(false));
196        }
197    }
198}
199
200impl<'c> CipherSized for TdesCbc<'c, { 112 / 8 }> {}
201impl<'c> CipherSized for TdesCbc<'c, { 168 / 8 }> {}
202impl<'c, const KEY_SIZE: usize> IVSized for TdesCbc<'c, KEY_SIZE> {}
203
204/// DES-ECB Cipher Mode
205pub struct DesEcb<'c, const KEY_SIZE: usize> {
206    iv: &'c [u8; 0],
207    key: &'c [u8; KEY_SIZE],
208}
209
210impl<'c, const KEY_SIZE: usize> DesEcb<'c, KEY_SIZE> {
211    /// Constructs a new AES-ECB cipher for a cryptographic operation.
212    pub fn new(key: &'c [u8; KEY_SIZE]) -> Self {
213        return Self { key: key, iv: &[0; 0] };
214    }
215}
216
217impl<'c, const KEY_SIZE: usize> Cipher<'c> for DesEcb<'c, KEY_SIZE> {
218    const BLOCK_SIZE: usize = DES_BLOCK_SIZE;
219    const REQUIRES_PADDING: bool = true;
220
221    fn key(&self) -> &'c [u8] {
222        self.key
223    }
224
225    fn iv(&self) -> &'c [u8] {
226        self.iv
227    }
228
229    fn set_algomode(&self, p: pac::cryp::Cryp) {
230        #[cfg(cryp_v1)]
231        {
232            p.cr().modify(|w| w.set_algomode(2));
233        }
234        #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
235        {
236            p.cr().modify(|w| w.set_algomode0(2));
237            p.cr().modify(|w| w.set_algomode3(false));
238        }
239    }
240}
241
242impl<'c> CipherSized for DesEcb<'c, { 56 / 8 }> {}
243impl<'c, const KEY_SIZE: usize> IVSized for DesEcb<'c, KEY_SIZE> {}
244
245/// DES-CBC Cipher Mode
246pub struct DesCbc<'c, const KEY_SIZE: usize> {
247    iv: &'c [u8; 8],
248    key: &'c [u8; KEY_SIZE],
249}
250
251impl<'c, const KEY_SIZE: usize> DesCbc<'c, KEY_SIZE> {
252    /// Constructs a new AES-CBC cipher for a cryptographic operation.
253    pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 8]) -> Self {
254        return Self { key: key, iv: iv };
255    }
256}
257
258impl<'c, const KEY_SIZE: usize> Cipher<'c> for DesCbc<'c, KEY_SIZE> {
259    const BLOCK_SIZE: usize = DES_BLOCK_SIZE;
260    const REQUIRES_PADDING: bool = true;
261
262    fn key(&self) -> &'c [u8] {
263        self.key
264    }
265
266    fn iv(&self) -> &'c [u8] {
267        self.iv
268    }
269
270    fn set_algomode(&self, p: pac::cryp::Cryp) {
271        #[cfg(cryp_v1)]
272        {
273            p.cr().modify(|w| w.set_algomode(3));
274        }
275        #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
276        {
277            p.cr().modify(|w| w.set_algomode0(3));
278            p.cr().modify(|w| w.set_algomode3(false));
279        }
280    }
281}
282
283impl<'c> CipherSized for DesCbc<'c, { 56 / 8 }> {}
284impl<'c, const KEY_SIZE: usize> IVSized for DesCbc<'c, KEY_SIZE> {}
285
286/// AES-ECB Cipher Mode
287pub struct AesEcb<'c, const KEY_SIZE: usize> {
288    iv: &'c [u8; 0],
289    key: &'c [u8; KEY_SIZE],
290}
291
292impl<'c, const KEY_SIZE: usize> AesEcb<'c, KEY_SIZE> {
293    /// Constructs a new AES-ECB cipher for a cryptographic operation.
294    pub fn new(key: &'c [u8; KEY_SIZE]) -> Self {
295        return Self { key: key, iv: &[0; 0] };
296    }
297}
298
299impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesEcb<'c, KEY_SIZE> {
300    const BLOCK_SIZE: usize = AES_BLOCK_SIZE;
301    const REQUIRES_PADDING: bool = true;
302
303    fn key(&self) -> &'c [u8] {
304        self.key
305    }
306
307    fn iv(&self) -> &'c [u8] {
308        self.iv
309    }
310
311    fn prepare_key(&self, p: pac::cryp::Cryp) {
312        #[cfg(cryp_v1)]
313        {
314            p.cr().modify(|w| w.set_algomode(7));
315        }
316        #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
317        {
318            p.cr().modify(|w| w.set_algomode0(7));
319            p.cr().modify(|w| w.set_algomode3(false));
320        }
321        p.cr().modify(|w| w.set_crypen(true));
322        while p.sr().read().busy() {}
323    }
324
325    fn set_algomode(&self, p: pac::cryp::Cryp) {
326        #[cfg(cryp_v1)]
327        {
328            p.cr().modify(|w| w.set_algomode(2));
329        }
330        #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
331        {
332            p.cr().modify(|w| w.set_algomode0(2));
333            p.cr().modify(|w| w.set_algomode3(false));
334        }
335    }
336}
337
338impl<'c> CipherSized for AesEcb<'c, { 128 / 8 }> {}
339impl<'c> CipherSized for AesEcb<'c, { 192 / 8 }> {}
340impl<'c> CipherSized for AesEcb<'c, { 256 / 8 }> {}
341impl<'c, const KEY_SIZE: usize> IVSized for AesEcb<'c, KEY_SIZE> {}
342
343/// AES-CBC Cipher Mode
344pub struct AesCbc<'c, const KEY_SIZE: usize> {
345    iv: &'c [u8; 16],
346    key: &'c [u8; KEY_SIZE],
347}
348
349impl<'c, const KEY_SIZE: usize> AesCbc<'c, KEY_SIZE> {
350    /// Constructs a new AES-CBC cipher for a cryptographic operation.
351    pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 16]) -> Self {
352        return Self { key: key, iv: iv };
353    }
354}
355
356impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCbc<'c, KEY_SIZE> {
357    const BLOCK_SIZE: usize = AES_BLOCK_SIZE;
358    const REQUIRES_PADDING: bool = true;
359
360    fn key(&self) -> &'c [u8] {
361        self.key
362    }
363
364    fn iv(&self) -> &'c [u8] {
365        self.iv
366    }
367
368    fn prepare_key(&self, p: pac::cryp::Cryp) {
369        #[cfg(cryp_v1)]
370        {
371            p.cr().modify(|w| w.set_algomode(7));
372        }
373        #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
374        {
375            p.cr().modify(|w| w.set_algomode0(7));
376            p.cr().modify(|w| w.set_algomode3(false));
377        }
378        p.cr().modify(|w| w.set_crypen(true));
379        while p.sr().read().busy() {}
380    }
381
382    fn set_algomode(&self, p: pac::cryp::Cryp) {
383        #[cfg(cryp_v1)]
384        {
385            p.cr().modify(|w| w.set_algomode(5));
386        }
387        #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
388        {
389            p.cr().modify(|w| w.set_algomode0(5));
390            p.cr().modify(|w| w.set_algomode3(false));
391        }
392    }
393}
394
395impl<'c> CipherSized for AesCbc<'c, { 128 / 8 }> {}
396impl<'c> CipherSized for AesCbc<'c, { 192 / 8 }> {}
397impl<'c> CipherSized for AesCbc<'c, { 256 / 8 }> {}
398impl<'c, const KEY_SIZE: usize> IVSized for AesCbc<'c, KEY_SIZE> {}
399
400/// AES-CTR Cipher Mode
401pub struct AesCtr<'c, const KEY_SIZE: usize> {
402    iv: &'c [u8; 16],
403    key: &'c [u8; KEY_SIZE],
404}
405
406impl<'c, const KEY_SIZE: usize> AesCtr<'c, KEY_SIZE> {
407    /// Constructs a new AES-CTR cipher for a cryptographic operation.
408    pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 16]) -> Self {
409        return Self { key: key, iv: iv };
410    }
411}
412
413impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesCtr<'c, KEY_SIZE> {
414    const BLOCK_SIZE: usize = AES_BLOCK_SIZE;
415
416    fn key(&self) -> &'c [u8] {
417        self.key
418    }
419
420    fn iv(&self) -> &'c [u8] {
421        self.iv
422    }
423
424    fn set_algomode(&self, p: pac::cryp::Cryp) {
425        #[cfg(cryp_v1)]
426        {
427            p.cr().modify(|w| w.set_algomode(6));
428        }
429        #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
430        {
431            p.cr().modify(|w| w.set_algomode0(6));
432            p.cr().modify(|w| w.set_algomode3(false));
433        }
434    }
435}
436
437impl<'c> CipherSized for AesCtr<'c, { 128 / 8 }> {}
438impl<'c> CipherSized for AesCtr<'c, { 192 / 8 }> {}
439impl<'c> CipherSized for AesCtr<'c, { 256 / 8 }> {}
440impl<'c, const KEY_SIZE: usize> IVSized for AesCtr<'c, KEY_SIZE> {}
441
442#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
443///AES-GCM Cipher Mode
444pub struct AesGcm<'c, const KEY_SIZE: usize> {
445    iv: [u8; 16],
446    key: &'c [u8; KEY_SIZE],
447}
448
449#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
450impl<'c, const KEY_SIZE: usize> AesGcm<'c, KEY_SIZE> {
451    /// Constucts a new AES-GCM cipher for a cryptographic operation.
452    pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 12]) -> Self {
453        let mut new_gcm = Self { key: key, iv: [0; 16] };
454        new_gcm.iv[..12].copy_from_slice(iv);
455        new_gcm.iv[15] = 2;
456        new_gcm
457    }
458}
459
460#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
461impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGcm<'c, KEY_SIZE> {
462    const BLOCK_SIZE: usize = AES_BLOCK_SIZE;
463
464    fn key(&self) -> &'c [u8] {
465        self.key
466    }
467
468    fn iv(&self) -> &[u8] {
469        self.iv.as_slice()
470    }
471
472    fn set_algomode(&self, p: pac::cryp::Cryp) {
473        p.cr().modify(|w| w.set_algomode0(0));
474        p.cr().modify(|w| w.set_algomode3(true));
475    }
476
477    fn init_phase_blocking<T: Instance, DmaIn, DmaOut>(&self, p: pac::cryp::Cryp, _cryp: &Cryp<T, DmaIn, DmaOut>) {
478        p.cr().modify(|w| w.set_gcm_ccmph(0));
479        p.cr().modify(|w| w.set_crypen(true));
480        while p.cr().read().crypen() {}
481    }
482
483    async fn init_phase<T: Instance, DmaIn, DmaOut>(&self, p: pac::cryp::Cryp, _cryp: &mut Cryp<'_, T, DmaIn, DmaOut>) {
484        p.cr().modify(|w| w.set_gcm_ccmph(0));
485        p.cr().modify(|w| w.set_crypen(true));
486        while p.cr().read().crypen() {}
487    }
488
489    #[cfg(cryp_v2)]
490    fn pre_final(&self, p: pac::cryp::Cryp, dir: Direction, _padding_len: usize) -> [u32; 4] {
491        //Handle special GCM partial block process.
492        if dir == Direction::Encrypt {
493            p.cr().modify(|w| w.set_crypen(false));
494            p.cr().modify(|w| w.set_algomode3(false));
495            p.cr().modify(|w| w.set_algomode0(6));
496            let iv1r = p.csgcmccmr(7).read() - 1;
497            p.init(1).ivrr().write_value(iv1r);
498            p.cr().modify(|w| w.set_crypen(true));
499        }
500        [0; 4]
501    }
502
503    #[cfg(any(cryp_v3, cryp_v4))]
504    fn pre_final(&self, p: pac::cryp::Cryp, _dir: Direction, padding_len: usize) -> [u32; 4] {
505        //Handle special GCM partial block process.
506        p.cr().modify(|w| w.set_npblb(padding_len as u8));
507        [0; 4]
508    }
509
510    #[cfg(cryp_v2)]
511    fn post_final_blocking<T: Instance, DmaIn, DmaOut>(
512        &self,
513        p: pac::cryp::Cryp,
514        cryp: &Cryp<T, DmaIn, DmaOut>,
515        dir: Direction,
516        int_data: &mut [u8; AES_BLOCK_SIZE],
517        _temp1: [u32; 4],
518        padding_mask: [u8; AES_BLOCK_SIZE],
519    ) {
520        if dir == Direction::Encrypt {
521            //Handle special GCM partial block process.
522            p.cr().modify(|w| w.set_crypen(false));
523            p.cr().modify(|w| w.set_algomode3(true));
524            p.cr().modify(|w| w.set_algomode0(0));
525            for i in 0..AES_BLOCK_SIZE {
526                int_data[i] = int_data[i] & padding_mask[i];
527            }
528            p.cr().modify(|w| w.set_crypen(true));
529            p.cr().modify(|w| w.set_gcm_ccmph(3));
530
531            cryp.write_bytes_blocking(Self::BLOCK_SIZE, int_data);
532            cryp.read_bytes_blocking(Self::BLOCK_SIZE, int_data);
533        }
534    }
535
536    #[cfg(cryp_v2)]
537    async fn post_final<T: Instance, DmaIn, DmaOut>(
538        &self,
539        p: pac::cryp::Cryp,
540        cryp: &mut Cryp<'_, T, DmaIn, DmaOut>,
541        dir: Direction,
542        int_data: &mut [u8; AES_BLOCK_SIZE],
543        _temp1: [u32; 4],
544        padding_mask: [u8; AES_BLOCK_SIZE],
545    ) where
546        DmaIn: crate::cryp::DmaIn<T>,
547        DmaOut: crate::cryp::DmaOut<T>,
548    {
549        if dir == Direction::Encrypt {
550            // Handle special GCM partial block process.
551            p.cr().modify(|w| w.set_crypen(false));
552            p.cr().modify(|w| w.set_algomode3(true));
553            p.cr().modify(|w| w.set_algomode0(0));
554            for i in 0..AES_BLOCK_SIZE {
555                int_data[i] = int_data[i] & padding_mask[i];
556            }
557            p.cr().modify(|w| w.set_crypen(true));
558            p.cr().modify(|w| w.set_gcm_ccmph(3));
559
560            let mut out_data: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
561
562            let read = Cryp::<T, DmaIn, DmaOut>::read_bytes(&mut cryp.outdma, Self::BLOCK_SIZE, &mut out_data);
563            let write = Cryp::<T, DmaIn, DmaOut>::write_bytes(&mut cryp.indma, Self::BLOCK_SIZE, int_data);
564
565            embassy_futures::join::join(read, write).await;
566
567            int_data.copy_from_slice(&out_data);
568        }
569    }
570}
571
572#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
573impl<'c> CipherSized for AesGcm<'c, { 128 / 8 }> {}
574#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
575impl<'c> CipherSized for AesGcm<'c, { 192 / 8 }> {}
576#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
577impl<'c> CipherSized for AesGcm<'c, { 256 / 8 }> {}
578#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
579impl<'c, const KEY_SIZE: usize> CipherAuthenticated<16> for AesGcm<'c, KEY_SIZE> {}
580#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
581impl<'c, const KEY_SIZE: usize> IVSized for AesGcm<'c, KEY_SIZE> {}
582
583#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
584/// AES-GMAC Cipher Mode
585pub struct AesGmac<'c, const KEY_SIZE: usize> {
586    iv: [u8; 16],
587    key: &'c [u8; KEY_SIZE],
588}
589
590#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
591impl<'c, const KEY_SIZE: usize> AesGmac<'c, KEY_SIZE> {
592    /// Constructs a new AES-GMAC cipher for a cryptographic operation.
593    pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; 12]) -> Self {
594        let mut new_gmac = Self { key: key, iv: [0; 16] };
595        new_gmac.iv[..12].copy_from_slice(iv);
596        new_gmac.iv[15] = 2;
597        new_gmac
598    }
599}
600
601#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
602impl<'c, const KEY_SIZE: usize> Cipher<'c> for AesGmac<'c, KEY_SIZE> {
603    const BLOCK_SIZE: usize = AES_BLOCK_SIZE;
604
605    fn key(&self) -> &'c [u8] {
606        self.key
607    }
608
609    fn iv(&self) -> &[u8] {
610        self.iv.as_slice()
611    }
612
613    fn set_algomode(&self, p: pac::cryp::Cryp) {
614        p.cr().modify(|w| w.set_algomode0(0));
615        p.cr().modify(|w| w.set_algomode3(true));
616    }
617
618    fn init_phase_blocking<T: Instance, DmaIn, DmaOut>(&self, p: pac::cryp::Cryp, _cryp: &Cryp<T, DmaIn, DmaOut>) {
619        p.cr().modify(|w| w.set_gcm_ccmph(0));
620        p.cr().modify(|w| w.set_crypen(true));
621        while p.cr().read().crypen() {}
622    }
623
624    async fn init_phase<T: Instance, DmaIn, DmaOut>(&self, p: pac::cryp::Cryp, _cryp: &mut Cryp<'_, T, DmaIn, DmaOut>) {
625        p.cr().modify(|w| w.set_gcm_ccmph(0));
626        p.cr().modify(|w| w.set_crypen(true));
627        while p.cr().read().crypen() {}
628    }
629
630    #[cfg(cryp_v2)]
631    fn pre_final(&self, p: pac::cryp::Cryp, dir: Direction, _padding_len: usize) -> [u32; 4] {
632        //Handle special GCM partial block process.
633        if dir == Direction::Encrypt {
634            p.cr().modify(|w| w.set_crypen(false));
635            p.cr().modify(|w| w.set_algomode3(false));
636            p.cr().modify(|w| w.set_algomode0(6));
637            let iv1r = p.csgcmccmr(7).read() - 1;
638            p.init(1).ivrr().write_value(iv1r);
639            p.cr().modify(|w| w.set_crypen(true));
640        }
641        [0; 4]
642    }
643
644    #[cfg(any(cryp_v3, cryp_v4))]
645    fn pre_final(&self, p: pac::cryp::Cryp, _dir: Direction, padding_len: usize) -> [u32; 4] {
646        //Handle special GCM partial block process.
647        p.cr().modify(|w| w.set_npblb(padding_len as u8));
648        [0; 4]
649    }
650
651    #[cfg(cryp_v2)]
652    fn post_final_blocking<T: Instance, DmaIn, DmaOut>(
653        &self,
654        p: pac::cryp::Cryp,
655        cryp: &Cryp<T, DmaIn, DmaOut>,
656        dir: Direction,
657        int_data: &mut [u8; AES_BLOCK_SIZE],
658        _temp1: [u32; 4],
659        padding_mask: [u8; AES_BLOCK_SIZE],
660    ) {
661        if dir == Direction::Encrypt {
662            //Handle special GCM partial block process.
663            p.cr().modify(|w| w.set_crypen(false));
664            p.cr().modify(|w| w.set_algomode3(true));
665            p.cr().modify(|w| w.set_algomode0(0));
666            for i in 0..AES_BLOCK_SIZE {
667                int_data[i] = int_data[i] & padding_mask[i];
668            }
669            p.cr().modify(|w| w.set_crypen(true));
670            p.cr().modify(|w| w.set_gcm_ccmph(3));
671
672            cryp.write_bytes_blocking(Self::BLOCK_SIZE, int_data);
673            cryp.read_bytes_blocking(Self::BLOCK_SIZE, int_data);
674        }
675    }
676
677    #[cfg(cryp_v2)]
678    async fn post_final<T: Instance, DmaIn, DmaOut>(
679        &self,
680        p: pac::cryp::Cryp,
681        cryp: &mut Cryp<'_, T, DmaIn, DmaOut>,
682        dir: Direction,
683        int_data: &mut [u8; AES_BLOCK_SIZE],
684        _temp1: [u32; 4],
685        padding_mask: [u8; AES_BLOCK_SIZE],
686    ) where
687        DmaIn: crate::cryp::DmaIn<T>,
688        DmaOut: crate::cryp::DmaOut<T>,
689    {
690        if dir == Direction::Encrypt {
691            // Handle special GCM partial block process.
692            p.cr().modify(|w| w.set_crypen(false));
693            p.cr().modify(|w| w.set_algomode3(true));
694            p.cr().modify(|w| w.set_algomode0(0));
695            for i in 0..AES_BLOCK_SIZE {
696                int_data[i] = int_data[i] & padding_mask[i];
697            }
698            p.cr().modify(|w| w.set_crypen(true));
699            p.cr().modify(|w| w.set_gcm_ccmph(3));
700
701            let mut out_data: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
702
703            let read = Cryp::<T, DmaIn, DmaOut>::read_bytes(&mut cryp.outdma, Self::BLOCK_SIZE, &mut out_data);
704            let write = Cryp::<T, DmaIn, DmaOut>::write_bytes(&mut cryp.indma, Self::BLOCK_SIZE, int_data);
705
706            embassy_futures::join::join(read, write).await;
707        }
708    }
709}
710
711#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
712impl<'c> CipherSized for AesGmac<'c, { 128 / 8 }> {}
713#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
714impl<'c> CipherSized for AesGmac<'c, { 192 / 8 }> {}
715#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
716impl<'c> CipherSized for AesGmac<'c, { 256 / 8 }> {}
717#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
718impl<'c, const KEY_SIZE: usize> CipherAuthenticated<16> for AesGmac<'c, KEY_SIZE> {}
719#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
720impl<'c, const KEY_SIZE: usize> IVSized for AesGmac<'c, KEY_SIZE> {}
721
722#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
723/// AES-CCM Cipher Mode
724pub struct AesCcm<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> {
725    key: &'c [u8; KEY_SIZE],
726    aad_header: [u8; 6],
727    aad_header_len: usize,
728    block0: [u8; 16],
729    ctr: [u8; 16],
730}
731
732#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
733impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> AesCcm<'c, KEY_SIZE, TAG_SIZE, IV_SIZE> {
734    /// Constructs a new AES-CCM cipher for a cryptographic operation.
735    pub fn new(key: &'c [u8; KEY_SIZE], iv: &'c [u8; IV_SIZE], aad_len: usize, payload_len: usize) -> Self {
736        let mut aad_header: [u8; 6] = [0; 6];
737        let mut aad_header_len = 0;
738        let mut block0: [u8; 16] = [0; 16];
739        if aad_len != 0 {
740            if aad_len < 65280 {
741                aad_header[0] = (aad_len >> 8) as u8 & 0xFF;
742                aad_header[1] = aad_len as u8 & 0xFF;
743                aad_header_len = 2;
744            } else {
745                aad_header[0] = 0xFF;
746                aad_header[1] = 0xFE;
747                let aad_len_bytes: [u8; 4] = (aad_len as u32).to_be_bytes();
748                aad_header[2] = aad_len_bytes[0];
749                aad_header[3] = aad_len_bytes[1];
750                aad_header[4] = aad_len_bytes[2];
751                aad_header[5] = aad_len_bytes[3];
752                aad_header_len = 6;
753            }
754        }
755        let total_aad_len = aad_header_len + aad_len;
756        let mut aad_padding_len = 16 - (total_aad_len % 16);
757        if aad_padding_len == 16 {
758            aad_padding_len = 0;
759        }
760        aad_header_len += aad_padding_len;
761        let total_aad_len_padded = aad_header_len + aad_len;
762        if total_aad_len_padded > 0 {
763            block0[0] = 0x40;
764        }
765        block0[0] |= ((((TAG_SIZE as u8) - 2) >> 1) & 0x07) << 3;
766        block0[0] |= ((15 - (iv.len() as u8)) - 1) & 0x07;
767        block0[1..1 + iv.len()].copy_from_slice(iv);
768        let payload_len_bytes: [u8; 4] = (payload_len as u32).to_be_bytes();
769        if iv.len() <= 11 {
770            block0[12] = payload_len_bytes[0];
771        } else if payload_len_bytes[0] > 0 {
772            panic!("Message is too large for given IV size.");
773        }
774        if iv.len() <= 12 {
775            block0[13] = payload_len_bytes[1];
776        } else if payload_len_bytes[1] > 0 {
777            panic!("Message is too large for given IV size.");
778        }
779        block0[14] = payload_len_bytes[2];
780        block0[15] = payload_len_bytes[3];
781        let mut ctr: [u8; 16] = [0; 16];
782        ctr[0] = block0[0] & 0x07;
783        ctr[1..1 + iv.len()].copy_from_slice(&block0[1..1 + iv.len()]);
784        ctr[15] = 0x01;
785
786        return Self {
787            key: key,
788            aad_header: aad_header,
789            aad_header_len: aad_header_len,
790            block0: block0,
791            ctr: ctr,
792        };
793    }
794}
795
796#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
797impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize, const IV_SIZE: usize> Cipher<'c>
798    for AesCcm<'c, KEY_SIZE, TAG_SIZE, IV_SIZE>
799{
800    const BLOCK_SIZE: usize = AES_BLOCK_SIZE;
801
802    fn key(&self) -> &'c [u8] {
803        self.key
804    }
805
806    fn iv(&self) -> &[u8] {
807        self.ctr.as_slice()
808    }
809
810    fn set_algomode(&self, p: pac::cryp::Cryp) {
811        p.cr().modify(|w| w.set_algomode0(1));
812        p.cr().modify(|w| w.set_algomode3(true));
813    }
814
815    fn init_phase_blocking<T: Instance, DmaIn, DmaOut>(&self, p: pac::cryp::Cryp, cryp: &Cryp<T, DmaIn, DmaOut>) {
816        p.cr().modify(|w| w.set_gcm_ccmph(0));
817
818        cryp.write_bytes_blocking(Self::BLOCK_SIZE, &self.block0);
819
820        p.cr().modify(|w| w.set_crypen(true));
821        while p.cr().read().crypen() {}
822    }
823
824    async fn init_phase<T: Instance, DmaIn, DmaOut>(&self, p: pac::cryp::Cryp, cryp: &mut Cryp<'_, T, DmaIn, DmaOut>)
825    where
826        DmaIn: crate::cryp::DmaIn<T>,
827        DmaOut: crate::cryp::DmaOut<T>,
828    {
829        p.cr().modify(|w| w.set_gcm_ccmph(0));
830
831        Cryp::<T, DmaIn, DmaOut>::write_bytes(&mut cryp.indma, Self::BLOCK_SIZE, &self.block0).await;
832
833        p.cr().modify(|w| w.set_crypen(true));
834        while p.cr().read().crypen() {}
835    }
836
837    fn get_header_block(&self) -> &[u8] {
838        return &self.aad_header[0..self.aad_header_len];
839    }
840
841    #[cfg(cryp_v2)]
842    fn pre_final(&self, p: pac::cryp::Cryp, dir: Direction, _padding_len: usize) -> [u32; 4] {
843        //Handle special CCM partial block process.
844        let mut temp1 = [0; 4];
845        if dir == Direction::Decrypt {
846            p.cr().modify(|w| w.set_crypen(false));
847            let iv1temp = p.init(1).ivrr().read();
848            temp1[0] = p.csgcmccmr(0).read().swap_bytes();
849            temp1[1] = p.csgcmccmr(1).read().swap_bytes();
850            temp1[2] = p.csgcmccmr(2).read().swap_bytes();
851            temp1[3] = p.csgcmccmr(3).read().swap_bytes();
852            p.init(1).ivrr().write_value(iv1temp);
853            p.cr().modify(|w| w.set_algomode3(false));
854            p.cr().modify(|w| w.set_algomode0(6));
855            p.cr().modify(|w| w.set_crypen(true));
856        }
857        return temp1;
858    }
859
860    #[cfg(any(cryp_v3, cryp_v4))]
861    fn pre_final(&self, p: pac::cryp::Cryp, _dir: Direction, padding_len: usize) -> [u32; 4] {
862        //Handle special GCM partial block process.
863        p.cr().modify(|w| w.set_npblb(padding_len as u8));
864        [0; 4]
865    }
866
867    #[cfg(cryp_v2)]
868    fn post_final_blocking<T: Instance, DmaIn, DmaOut>(
869        &self,
870        p: pac::cryp::Cryp,
871        cryp: &Cryp<T, DmaIn, DmaOut>,
872        dir: Direction,
873        int_data: &mut [u8; AES_BLOCK_SIZE],
874        temp1: [u32; 4],
875        padding_mask: [u8; 16],
876    ) {
877        if dir == Direction::Decrypt {
878            //Handle special CCM partial block process.
879            let mut temp2 = [0; 4];
880            temp2[0] = p.csgcmccmr(0).read().swap_bytes();
881            temp2[1] = p.csgcmccmr(1).read().swap_bytes();
882            temp2[2] = p.csgcmccmr(2).read().swap_bytes();
883            temp2[3] = p.csgcmccmr(3).read().swap_bytes();
884            p.cr().modify(|w| w.set_algomode3(true));
885            p.cr().modify(|w| w.set_algomode0(1));
886            p.cr().modify(|w| w.set_gcm_ccmph(3));
887            // Header phase
888            p.cr().modify(|w| w.set_gcm_ccmph(1));
889            for i in 0..AES_BLOCK_SIZE {
890                int_data[i] = int_data[i] & padding_mask[i];
891            }
892            let mut in_data: [u32; 4] = [0; 4];
893            for i in 0..in_data.len() {
894                let mut int_bytes: [u8; 4] = [0; 4];
895                int_bytes.copy_from_slice(&int_data[(i * 4)..(i * 4) + 4]);
896                let int_word = u32::from_le_bytes(int_bytes);
897                in_data[i] = int_word;
898                in_data[i] = in_data[i] ^ temp1[i] ^ temp2[i];
899            }
900            cryp.write_words_blocking(Self::BLOCK_SIZE, &in_data);
901        }
902    }
903
904    #[cfg(cryp_v2)]
905    async fn post_final<T: Instance, DmaIn, DmaOut>(
906        &self,
907        p: pac::cryp::Cryp,
908        cryp: &mut Cryp<'_, T, DmaIn, DmaOut>,
909        dir: Direction,
910        int_data: &mut [u8; AES_BLOCK_SIZE],
911        temp1: [u32; 4],
912        padding_mask: [u8; 16],
913    ) where
914        DmaIn: crate::cryp::DmaIn<T>,
915        DmaOut: crate::cryp::DmaOut<T>,
916    {
917        if dir == Direction::Decrypt {
918            //Handle special CCM partial block process.
919            let mut temp2 = [0; 4];
920            temp2[0] = p.csgcmccmr(0).read().swap_bytes();
921            temp2[1] = p.csgcmccmr(1).read().swap_bytes();
922            temp2[2] = p.csgcmccmr(2).read().swap_bytes();
923            temp2[3] = p.csgcmccmr(3).read().swap_bytes();
924            p.cr().modify(|w| w.set_algomode3(true));
925            p.cr().modify(|w| w.set_algomode0(1));
926            p.cr().modify(|w| w.set_gcm_ccmph(3));
927            // Header phase
928            p.cr().modify(|w| w.set_gcm_ccmph(1));
929            for i in 0..AES_BLOCK_SIZE {
930                int_data[i] = int_data[i] & padding_mask[i];
931            }
932            let mut in_data: [u32; 4] = [0; 4];
933            for i in 0..in_data.len() {
934                let mut int_bytes: [u8; 4] = [0; 4];
935                int_bytes.copy_from_slice(&int_data[(i * 4)..(i * 4) + 4]);
936                let int_word = u32::from_le_bytes(int_bytes);
937                in_data[i] = int_word;
938                in_data[i] = in_data[i] ^ temp1[i] ^ temp2[i];
939            }
940            Cryp::<T, DmaIn, DmaOut>::write_words(&mut cryp.indma, Self::BLOCK_SIZE, &in_data).await;
941        }
942    }
943}
944
945#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
946impl<'c, const TAG_SIZE: usize, const IV_SIZE: usize> CipherSized for AesCcm<'c, { 128 / 8 }, TAG_SIZE, IV_SIZE> {}
947#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
948impl<'c, const TAG_SIZE: usize, const IV_SIZE: usize> CipherSized for AesCcm<'c, { 192 / 8 }, TAG_SIZE, IV_SIZE> {}
949#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
950impl<'c, const TAG_SIZE: usize, const IV_SIZE: usize> CipherSized for AesCcm<'c, { 256 / 8 }, TAG_SIZE, IV_SIZE> {}
951#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
952impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<4> for AesCcm<'c, KEY_SIZE, 4, IV_SIZE> {}
953#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
954impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<6> for AesCcm<'c, KEY_SIZE, 6, IV_SIZE> {}
955#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
956impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<8> for AesCcm<'c, KEY_SIZE, 8, IV_SIZE> {}
957#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
958impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<10> for AesCcm<'c, KEY_SIZE, 10, IV_SIZE> {}
959#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
960impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<12> for AesCcm<'c, KEY_SIZE, 12, IV_SIZE> {}
961#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
962impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<14> for AesCcm<'c, KEY_SIZE, 14, IV_SIZE> {}
963#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
964impl<'c, const KEY_SIZE: usize, const IV_SIZE: usize> CipherAuthenticated<16> for AesCcm<'c, KEY_SIZE, 16, IV_SIZE> {}
965#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
966impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 7> {}
967#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
968impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 8> {}
969#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
970impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 9> {}
971#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
972impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 10> {}
973#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
974impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 11> {}
975#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
976impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 12> {}
977#[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
978impl<'c, const KEY_SIZE: usize, const TAG_SIZE: usize> IVSized for AesCcm<'c, KEY_SIZE, TAG_SIZE, 13> {}
979
980#[allow(dead_code)]
981/// Holds the state information for a cipher operation.
982/// Allows suspending/resuming of cipher operations.
983pub struct Context<'c, C: Cipher<'c> + CipherSized> {
984    phantom_data: PhantomData<&'c C>,
985    cipher: &'c C,
986    dir: Direction,
987    last_block_processed: bool,
988    header_processed: bool,
989    aad_complete: bool,
990    cr: u32,
991    iv: [u32; 4],
992    csgcmccm: [u32; 8],
993    csgcm: [u32; 8],
994    header_len: u64,
995    payload_len: u64,
996    aad_buffer: [u8; 16],
997    aad_buffer_len: usize,
998}
999
1000/// Selects whether the crypto processor operates in encryption or decryption mode.
1001#[derive(PartialEq, Clone, Copy)]
1002pub enum Direction {
1003    /// Encryption mode
1004    Encrypt,
1005    /// Decryption mode
1006    Decrypt,
1007}
1008
1009/// Crypto Accelerator Driver
1010pub struct Cryp<'d, T: Instance, DmaIn = NoDma, DmaOut = NoDma> {
1011    _peripheral: PeripheralRef<'d, T>,
1012    indma: PeripheralRef<'d, DmaIn>,
1013    outdma: PeripheralRef<'d, DmaOut>,
1014}
1015
1016impl<'d, T: Instance, DmaIn, DmaOut> Cryp<'d, T, DmaIn, DmaOut> {
1017    /// Create a new CRYP driver.
1018    pub fn new(
1019        peri: impl Peripheral<P = T> + 'd,
1020        indma: impl Peripheral<P = DmaIn> + 'd,
1021        outdma: impl Peripheral<P = DmaOut> + 'd,
1022        _irq: impl interrupt::typelevel::Binding<T::Interrupt, InterruptHandler<T>> + 'd,
1023    ) -> Self {
1024        rcc::enable_and_reset::<T>();
1025        into_ref!(peri, indma, outdma);
1026        let instance = Self {
1027            _peripheral: peri,
1028            indma: indma,
1029            outdma: outdma,
1030        };
1031
1032        T::Interrupt::unpend();
1033        unsafe { T::Interrupt::enable() };
1034
1035        instance
1036    }
1037
1038    /// Start a new encrypt or decrypt operation for the given cipher.
1039    pub fn start_blocking<'c, C: Cipher<'c> + CipherSized + IVSized>(
1040        &self,
1041        cipher: &'c C,
1042        dir: Direction,
1043    ) -> Context<'c, C> {
1044        let mut ctx: Context<'c, C> = Context {
1045            dir,
1046            last_block_processed: false,
1047            cr: 0,
1048            iv: [0; 4],
1049            csgcmccm: [0; 8],
1050            csgcm: [0; 8],
1051            aad_complete: false,
1052            header_len: 0,
1053            payload_len: 0,
1054            cipher: cipher,
1055            phantom_data: PhantomData,
1056            header_processed: false,
1057            aad_buffer: [0; 16],
1058            aad_buffer_len: 0,
1059        };
1060
1061        T::regs().cr().modify(|w| w.set_crypen(false));
1062
1063        let key = ctx.cipher.key();
1064
1065        if key.len() == (128 / 8) {
1066            T::regs().cr().modify(|w| w.set_keysize(0));
1067        } else if key.len() == (192 / 8) {
1068            T::regs().cr().modify(|w| w.set_keysize(1));
1069        } else if key.len() == (256 / 8) {
1070            T::regs().cr().modify(|w| w.set_keysize(2));
1071        }
1072
1073        self.load_key(key);
1074
1075        // Set data type to 8-bit. This will match software implementations.
1076        T::regs().cr().modify(|w| w.set_datatype(2));
1077
1078        ctx.cipher.prepare_key(T::regs());
1079
1080        ctx.cipher.set_algomode(T::regs());
1081
1082        // Set encrypt/decrypt
1083        if dir == Direction::Encrypt {
1084            T::regs().cr().modify(|w| w.set_algodir(false));
1085        } else {
1086            T::regs().cr().modify(|w| w.set_algodir(true));
1087        }
1088
1089        // Load the IV into the registers.
1090        let iv = ctx.cipher.iv();
1091        let mut full_iv: [u8; 16] = [0; 16];
1092        full_iv[0..iv.len()].copy_from_slice(iv);
1093        let mut iv_idx = 0;
1094        let mut iv_word: [u8; 4] = [0; 4];
1095        iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
1096        iv_idx += 4;
1097        T::regs().init(0).ivlr().write_value(u32::from_be_bytes(iv_word));
1098        iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
1099        iv_idx += 4;
1100        T::regs().init(0).ivrr().write_value(u32::from_be_bytes(iv_word));
1101        iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
1102        iv_idx += 4;
1103        T::regs().init(1).ivlr().write_value(u32::from_be_bytes(iv_word));
1104        iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
1105        T::regs().init(1).ivrr().write_value(u32::from_be_bytes(iv_word));
1106
1107        // Flush in/out FIFOs
1108        T::regs().cr().modify(|w| w.fflush());
1109
1110        ctx.cipher.init_phase_blocking(T::regs(), self);
1111
1112        self.store_context(&mut ctx);
1113
1114        ctx
1115    }
1116
1117    /// Start a new encrypt or decrypt operation for the given cipher.
1118    pub async fn start<'c, C: Cipher<'c> + CipherSized + IVSized>(
1119        &mut self,
1120        cipher: &'c C,
1121        dir: Direction,
1122    ) -> Context<'c, C>
1123    where
1124        DmaIn: crate::cryp::DmaIn<T>,
1125        DmaOut: crate::cryp::DmaOut<T>,
1126    {
1127        let mut ctx: Context<'c, C> = Context {
1128            dir,
1129            last_block_processed: false,
1130            cr: 0,
1131            iv: [0; 4],
1132            csgcmccm: [0; 8],
1133            csgcm: [0; 8],
1134            aad_complete: false,
1135            header_len: 0,
1136            payload_len: 0,
1137            cipher: cipher,
1138            phantom_data: PhantomData,
1139            header_processed: false,
1140            aad_buffer: [0; 16],
1141            aad_buffer_len: 0,
1142        };
1143
1144        T::regs().cr().modify(|w| w.set_crypen(false));
1145
1146        let key = ctx.cipher.key();
1147
1148        if key.len() == (128 / 8) {
1149            T::regs().cr().modify(|w| w.set_keysize(0));
1150        } else if key.len() == (192 / 8) {
1151            T::regs().cr().modify(|w| w.set_keysize(1));
1152        } else if key.len() == (256 / 8) {
1153            T::regs().cr().modify(|w| w.set_keysize(2));
1154        }
1155
1156        self.load_key(key);
1157
1158        // Set data type to 8-bit. This will match software implementations.
1159        T::regs().cr().modify(|w| w.set_datatype(2));
1160
1161        ctx.cipher.prepare_key(T::regs());
1162
1163        ctx.cipher.set_algomode(T::regs());
1164
1165        // Set encrypt/decrypt
1166        if dir == Direction::Encrypt {
1167            T::regs().cr().modify(|w| w.set_algodir(false));
1168        } else {
1169            T::regs().cr().modify(|w| w.set_algodir(true));
1170        }
1171
1172        // Load the IV into the registers.
1173        let iv = ctx.cipher.iv();
1174        let mut full_iv: [u8; 16] = [0; 16];
1175        full_iv[0..iv.len()].copy_from_slice(iv);
1176        let mut iv_idx = 0;
1177        let mut iv_word: [u8; 4] = [0; 4];
1178        iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
1179        iv_idx += 4;
1180        T::regs().init(0).ivlr().write_value(u32::from_be_bytes(iv_word));
1181        iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
1182        iv_idx += 4;
1183        T::regs().init(0).ivrr().write_value(u32::from_be_bytes(iv_word));
1184        iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
1185        iv_idx += 4;
1186        T::regs().init(1).ivlr().write_value(u32::from_be_bytes(iv_word));
1187        iv_word.copy_from_slice(&full_iv[iv_idx..iv_idx + 4]);
1188        T::regs().init(1).ivrr().write_value(u32::from_be_bytes(iv_word));
1189
1190        // Flush in/out FIFOs
1191        T::regs().cr().modify(|w| w.fflush());
1192
1193        ctx.cipher.init_phase(T::regs(), self).await;
1194
1195        self.store_context(&mut ctx);
1196
1197        ctx
1198    }
1199
1200    #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
1201    /// Controls the header phase of cipher processing.
1202    /// This function is only valid for authenticated ciphers including GCM, CCM, and GMAC.
1203    /// All additional associated data (AAD) must be supplied to this function prior to starting the payload phase with `payload_blocking`.
1204    /// The AAD must be supplied in multiples of the block size (128-bits for AES, 64-bits for DES), except when supplying the last block.
1205    /// When supplying the last block of AAD, `last_aad_block` must be `true`.
1206    pub fn aad_blocking<
1207        'c,
1208        const TAG_SIZE: usize,
1209        C: Cipher<'c> + CipherSized + IVSized + CipherAuthenticated<TAG_SIZE>,
1210    >(
1211        &self,
1212        ctx: &mut Context<'c, C>,
1213        aad: &[u8],
1214        last_aad_block: bool,
1215    ) {
1216        self.load_context(ctx);
1217
1218        // Perform checks for correctness.
1219        if ctx.aad_complete {
1220            panic!("Cannot update AAD after starting payload!")
1221        }
1222
1223        ctx.header_len += aad.len() as u64;
1224
1225        // Header phase
1226        T::regs().cr().modify(|w| w.set_crypen(false));
1227        T::regs().cr().modify(|w| w.set_gcm_ccmph(1));
1228        T::regs().cr().modify(|w| w.set_crypen(true));
1229
1230        // First write the header B1 block if not yet written.
1231        if !ctx.header_processed {
1232            ctx.header_processed = true;
1233            let header = ctx.cipher.get_header_block();
1234            ctx.aad_buffer[0..header.len()].copy_from_slice(header);
1235            ctx.aad_buffer_len += header.len();
1236        }
1237
1238        // Fill the header block to make a full block.
1239        let len_to_copy = min(aad.len(), C::BLOCK_SIZE - ctx.aad_buffer_len);
1240        ctx.aad_buffer[ctx.aad_buffer_len..ctx.aad_buffer_len + len_to_copy].copy_from_slice(&aad[..len_to_copy]);
1241        ctx.aad_buffer_len += len_to_copy;
1242        ctx.aad_buffer[ctx.aad_buffer_len..].fill(0);
1243        let mut aad_len_remaining = aad.len() - len_to_copy;
1244
1245        if ctx.aad_buffer_len < C::BLOCK_SIZE {
1246            // The buffer isn't full and this is the last buffer, so process it as is (already padded).
1247            if last_aad_block {
1248                self.write_bytes_blocking(C::BLOCK_SIZE, &ctx.aad_buffer);
1249                // Block until input FIFO is empty.
1250                while !T::regs().sr().read().ifem() {}
1251
1252                // Switch to payload phase.
1253                ctx.aad_complete = true;
1254                T::regs().cr().modify(|w| w.set_crypen(false));
1255                T::regs().cr().modify(|w| w.set_gcm_ccmph(2));
1256                T::regs().cr().modify(|w| w.fflush());
1257            } else {
1258                // Just return because we don't yet have a full block to process.
1259                return;
1260            }
1261        } else {
1262            // Load the full block from the buffer.
1263            self.write_bytes_blocking(C::BLOCK_SIZE, &ctx.aad_buffer);
1264            // Block until input FIFO is empty.
1265            while !T::regs().sr().read().ifem() {}
1266        }
1267
1268        // Handle a partial block that is passed in.
1269        ctx.aad_buffer_len = 0;
1270        let leftovers = aad_len_remaining % C::BLOCK_SIZE;
1271        ctx.aad_buffer[..leftovers].copy_from_slice(&aad[aad.len() - leftovers..aad.len()]);
1272        ctx.aad_buffer_len += leftovers;
1273        ctx.aad_buffer[ctx.aad_buffer_len..].fill(0);
1274        aad_len_remaining -= leftovers;
1275        assert_eq!(aad_len_remaining % C::BLOCK_SIZE, 0);
1276
1277        // Load full data blocks into core.
1278        let num_full_blocks = aad_len_remaining / C::BLOCK_SIZE;
1279        let start_index = len_to_copy;
1280        let end_index = start_index + (C::BLOCK_SIZE * num_full_blocks);
1281        self.write_bytes_blocking(C::BLOCK_SIZE, &aad[start_index..end_index]);
1282
1283        if last_aad_block {
1284            if leftovers > 0 {
1285                self.write_bytes_blocking(C::BLOCK_SIZE, &ctx.aad_buffer);
1286            }
1287            // Switch to payload phase.
1288            ctx.aad_complete = true;
1289            T::regs().cr().modify(|w| w.set_crypen(false));
1290            T::regs().cr().modify(|w| w.set_gcm_ccmph(2));
1291            T::regs().cr().modify(|w| w.fflush());
1292        }
1293
1294        self.store_context(ctx);
1295    }
1296
1297    #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
1298    /// Controls the header phase of cipher processing.
1299    /// This function is only valid for authenticated ciphers including GCM, CCM, and GMAC.
1300    /// All additional associated data (AAD) must be supplied to this function prior to starting the payload phase with `payload`.
1301    /// The AAD must be supplied in multiples of the block size (128-bits for AES, 64-bits for DES), except when supplying the last block.
1302    /// When supplying the last block of AAD, `last_aad_block` must be `true`.
1303    pub async fn aad<'c, const TAG_SIZE: usize, C: Cipher<'c> + CipherSized + IVSized + CipherAuthenticated<TAG_SIZE>>(
1304        &mut self,
1305        ctx: &mut Context<'c, C>,
1306        aad: &[u8],
1307        last_aad_block: bool,
1308    ) where
1309        DmaIn: crate::cryp::DmaIn<T>,
1310        DmaOut: crate::cryp::DmaOut<T>,
1311    {
1312        self.load_context(ctx);
1313
1314        // Perform checks for correctness.
1315        if ctx.aad_complete {
1316            panic!("Cannot update AAD after starting payload!")
1317        }
1318
1319        ctx.header_len += aad.len() as u64;
1320
1321        // Header phase
1322        T::regs().cr().modify(|w| w.set_crypen(false));
1323        T::regs().cr().modify(|w| w.set_gcm_ccmph(1));
1324        T::regs().cr().modify(|w| w.set_crypen(true));
1325
1326        // First write the header B1 block if not yet written.
1327        if !ctx.header_processed {
1328            ctx.header_processed = true;
1329            let header = ctx.cipher.get_header_block();
1330            ctx.aad_buffer[0..header.len()].copy_from_slice(header);
1331            ctx.aad_buffer_len += header.len();
1332        }
1333
1334        // Fill the header block to make a full block.
1335        let len_to_copy = min(aad.len(), C::BLOCK_SIZE - ctx.aad_buffer_len);
1336        ctx.aad_buffer[ctx.aad_buffer_len..ctx.aad_buffer_len + len_to_copy].copy_from_slice(&aad[..len_to_copy]);
1337        ctx.aad_buffer_len += len_to_copy;
1338        ctx.aad_buffer[ctx.aad_buffer_len..].fill(0);
1339        let mut aad_len_remaining = aad.len() - len_to_copy;
1340
1341        if ctx.aad_buffer_len < C::BLOCK_SIZE {
1342            // The buffer isn't full and this is the last buffer, so process it as is (already padded).
1343            if last_aad_block {
1344                Self::write_bytes(&mut self.indma, C::BLOCK_SIZE, &ctx.aad_buffer).await;
1345                assert_eq!(T::regs().sr().read().ifem(), true);
1346
1347                // Switch to payload phase.
1348                ctx.aad_complete = true;
1349                T::regs().cr().modify(|w| w.set_crypen(false));
1350                T::regs().cr().modify(|w| w.set_gcm_ccmph(2));
1351                T::regs().cr().modify(|w| w.fflush());
1352            } else {
1353                // Just return because we don't yet have a full block to process.
1354                return;
1355            }
1356        } else {
1357            // Load the full block from the buffer.
1358            Self::write_bytes(&mut self.indma, C::BLOCK_SIZE, &ctx.aad_buffer).await;
1359            assert_eq!(T::regs().sr().read().ifem(), true);
1360        }
1361
1362        // Handle a partial block that is passed in.
1363        ctx.aad_buffer_len = 0;
1364        let leftovers = aad_len_remaining % C::BLOCK_SIZE;
1365        ctx.aad_buffer[..leftovers].copy_from_slice(&aad[aad.len() - leftovers..aad.len()]);
1366        ctx.aad_buffer_len += leftovers;
1367        ctx.aad_buffer[ctx.aad_buffer_len..].fill(0);
1368        aad_len_remaining -= leftovers;
1369        assert_eq!(aad_len_remaining % C::BLOCK_SIZE, 0);
1370
1371        // Load full data blocks into core.
1372        let num_full_blocks = aad_len_remaining / C::BLOCK_SIZE;
1373        let start_index = len_to_copy;
1374        let end_index = start_index + (C::BLOCK_SIZE * num_full_blocks);
1375        Self::write_bytes(&mut self.indma, C::BLOCK_SIZE, &aad[start_index..end_index]).await;
1376
1377        if last_aad_block {
1378            if leftovers > 0 {
1379                Self::write_bytes(&mut self.indma, C::BLOCK_SIZE, &ctx.aad_buffer).await;
1380                assert_eq!(T::regs().sr().read().ifem(), true);
1381            }
1382            // Switch to payload phase.
1383            ctx.aad_complete = true;
1384            T::regs().cr().modify(|w| w.set_crypen(false));
1385            T::regs().cr().modify(|w| w.set_gcm_ccmph(2));
1386            T::regs().cr().modify(|w| w.fflush());
1387        }
1388
1389        self.store_context(ctx);
1390    }
1391
1392    /// Performs encryption/decryption on the provided context.
1393    /// The context determines algorithm, mode, and state of the crypto accelerator.
1394    /// When the last piece of data is supplied, `last_block` should be `true`.
1395    /// This function panics under various mismatches of parameters.
1396    /// Output buffer must be at least as long as the input buffer.
1397    /// Data must be a multiple of block size (128-bits for AES, 64-bits for DES) for CBC and ECB modes.
1398    /// Padding or ciphertext stealing must be managed by the application for these modes.
1399    /// Data must also be a multiple of block size unless `last_block` is `true`.
1400    pub fn payload_blocking<'c, C: Cipher<'c> + CipherSized + IVSized>(
1401        &self,
1402        ctx: &mut Context<'c, C>,
1403        input: &[u8],
1404        output: &mut [u8],
1405        last_block: bool,
1406    ) {
1407        self.load_context(ctx);
1408
1409        let last_block_remainder = input.len() % C::BLOCK_SIZE;
1410
1411        // Perform checks for correctness.
1412        if !ctx.aad_complete && ctx.header_len > 0 {
1413            panic!("Additional associated data must be processed first!");
1414        } else if !ctx.aad_complete {
1415            #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
1416            {
1417                ctx.aad_complete = true;
1418                T::regs().cr().modify(|w| w.set_crypen(false));
1419                T::regs().cr().modify(|w| w.set_gcm_ccmph(2));
1420                T::regs().cr().modify(|w| w.fflush());
1421                T::regs().cr().modify(|w| w.set_crypen(true));
1422            }
1423        }
1424        if ctx.last_block_processed {
1425            panic!("The last block has already been processed!");
1426        }
1427        if input.len() > output.len() {
1428            panic!("Output buffer length must match input length.");
1429        }
1430        if !last_block {
1431            if last_block_remainder != 0 {
1432                panic!("Input length must be a multiple of {} bytes.", C::BLOCK_SIZE);
1433            }
1434        }
1435        if C::REQUIRES_PADDING {
1436            if last_block_remainder != 0 {
1437                panic!("Input must be a multiple of {} bytes in ECB and CBC modes. Consider padding or ciphertext stealing.", C::BLOCK_SIZE);
1438            }
1439        }
1440        if last_block {
1441            ctx.last_block_processed = true;
1442        }
1443
1444        // Load data into core, block by block.
1445        let num_full_blocks = input.len() / C::BLOCK_SIZE;
1446        for block in 0..num_full_blocks {
1447            let index = block * C::BLOCK_SIZE;
1448            // Write block in
1449            self.write_bytes_blocking(C::BLOCK_SIZE, &input[index..index + C::BLOCK_SIZE]);
1450            // Read block out
1451            self.read_bytes_blocking(C::BLOCK_SIZE, &mut output[index..index + C::BLOCK_SIZE]);
1452        }
1453
1454        // Handle the final block, which is incomplete.
1455        if last_block_remainder > 0 {
1456            let padding_len = C::BLOCK_SIZE - last_block_remainder;
1457            let temp1 = ctx.cipher.pre_final(T::regs(), ctx.dir, padding_len);
1458
1459            let mut intermediate_data: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
1460            let mut last_block: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
1461            last_block[..last_block_remainder].copy_from_slice(&input[input.len() - last_block_remainder..input.len()]);
1462            self.write_bytes_blocking(C::BLOCK_SIZE, &last_block);
1463            self.read_bytes_blocking(C::BLOCK_SIZE, &mut intermediate_data);
1464
1465            // Handle the last block depending on mode.
1466            let output_len = output.len();
1467            output[output_len - last_block_remainder..output_len]
1468                .copy_from_slice(&intermediate_data[0..last_block_remainder]);
1469
1470            let mut mask: [u8; 16] = [0; 16];
1471            mask[..last_block_remainder].fill(0xFF);
1472            ctx.cipher
1473                .post_final_blocking(T::regs(), self, ctx.dir, &mut intermediate_data, temp1, mask);
1474        }
1475
1476        ctx.payload_len += input.len() as u64;
1477
1478        self.store_context(ctx);
1479    }
1480
1481    /// Performs encryption/decryption on the provided context.
1482    /// The context determines algorithm, mode, and state of the crypto accelerator.
1483    /// When the last piece of data is supplied, `last_block` should be `true`.
1484    /// This function panics under various mismatches of parameters.
1485    /// Output buffer must be at least as long as the input buffer.
1486    /// Data must be a multiple of block size (128-bits for AES, 64-bits for DES) for CBC and ECB modes.
1487    /// Padding or ciphertext stealing must be managed by the application for these modes.
1488    /// Data must also be a multiple of block size unless `last_block` is `true`.
1489    pub async fn payload<'c, C: Cipher<'c> + CipherSized + IVSized>(
1490        &mut self,
1491        ctx: &mut Context<'c, C>,
1492        input: &[u8],
1493        output: &mut [u8],
1494        last_block: bool,
1495    ) where
1496        DmaIn: crate::cryp::DmaIn<T>,
1497        DmaOut: crate::cryp::DmaOut<T>,
1498    {
1499        self.load_context(ctx);
1500
1501        let last_block_remainder = input.len() % C::BLOCK_SIZE;
1502
1503        // Perform checks for correctness.
1504        if !ctx.aad_complete && ctx.header_len > 0 {
1505            panic!("Additional associated data must be processed first!");
1506        } else if !ctx.aad_complete {
1507            #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
1508            {
1509                ctx.aad_complete = true;
1510                T::regs().cr().modify(|w| w.set_crypen(false));
1511                T::regs().cr().modify(|w| w.set_gcm_ccmph(2));
1512                T::regs().cr().modify(|w| w.fflush());
1513                T::regs().cr().modify(|w| w.set_crypen(true));
1514            }
1515        }
1516        if ctx.last_block_processed {
1517            panic!("The last block has already been processed!");
1518        }
1519        if input.len() > output.len() {
1520            panic!("Output buffer length must match input length.");
1521        }
1522        if !last_block {
1523            if last_block_remainder != 0 {
1524                panic!("Input length must be a multiple of {} bytes.", C::BLOCK_SIZE);
1525            }
1526        }
1527        if C::REQUIRES_PADDING {
1528            if last_block_remainder != 0 {
1529                panic!("Input must be a multiple of {} bytes in ECB and CBC modes. Consider padding or ciphertext stealing.", C::BLOCK_SIZE);
1530            }
1531        }
1532        if last_block {
1533            ctx.last_block_processed = true;
1534        }
1535
1536        // Load data into core, block by block.
1537        let num_full_blocks = input.len() / C::BLOCK_SIZE;
1538        for block in 0..num_full_blocks {
1539            let index = block * C::BLOCK_SIZE;
1540            // Read block out
1541            let read = Self::read_bytes(
1542                &mut self.outdma,
1543                C::BLOCK_SIZE,
1544                &mut output[index..index + C::BLOCK_SIZE],
1545            );
1546            // Write block in
1547            let write = Self::write_bytes(&mut self.indma, C::BLOCK_SIZE, &input[index..index + C::BLOCK_SIZE]);
1548            embassy_futures::join::join(read, write).await;
1549        }
1550
1551        // Handle the final block, which is incomplete.
1552        if last_block_remainder > 0 {
1553            let padding_len = C::BLOCK_SIZE - last_block_remainder;
1554            let temp1 = ctx.cipher.pre_final(T::regs(), ctx.dir, padding_len);
1555
1556            let mut intermediate_data: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
1557            let mut last_block: [u8; AES_BLOCK_SIZE] = [0; AES_BLOCK_SIZE];
1558            last_block[..last_block_remainder].copy_from_slice(&input[input.len() - last_block_remainder..input.len()]);
1559            let read = Self::read_bytes(&mut self.outdma, C::BLOCK_SIZE, &mut intermediate_data);
1560            let write = Self::write_bytes(&mut self.indma, C::BLOCK_SIZE, &last_block);
1561            embassy_futures::join::join(read, write).await;
1562
1563            // Handle the last block depending on mode.
1564            let output_len = output.len();
1565            output[output_len - last_block_remainder..output_len]
1566                .copy_from_slice(&intermediate_data[0..last_block_remainder]);
1567
1568            let mut mask: [u8; 16] = [0; 16];
1569            mask[..last_block_remainder].fill(0xFF);
1570            ctx.cipher
1571                .post_final(T::regs(), self, ctx.dir, &mut intermediate_data, temp1, mask)
1572                .await;
1573        }
1574
1575        ctx.payload_len += input.len() as u64;
1576
1577        self.store_context(ctx);
1578    }
1579
1580    #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
1581    /// Generates an authentication tag for authenticated ciphers including GCM, CCM, and GMAC.
1582    /// Called after the all data has been encrypted/decrypted by `payload`.
1583    pub fn finish_blocking<
1584        'c,
1585        const TAG_SIZE: usize,
1586        C: Cipher<'c> + CipherSized + IVSized + CipherAuthenticated<TAG_SIZE>,
1587    >(
1588        &self,
1589        mut ctx: Context<'c, C>,
1590    ) -> [u8; TAG_SIZE] {
1591        self.load_context(&mut ctx);
1592
1593        T::regs().cr().modify(|w| w.set_crypen(false));
1594        T::regs().cr().modify(|w| w.set_gcm_ccmph(3));
1595        T::regs().cr().modify(|w| w.set_crypen(true));
1596
1597        let headerlen1: u32 = ((ctx.header_len * 8) >> 32) as u32;
1598        let headerlen2: u32 = (ctx.header_len * 8) as u32;
1599        let payloadlen1: u32 = ((ctx.payload_len * 8) >> 32) as u32;
1600        let payloadlen2: u32 = (ctx.payload_len * 8) as u32;
1601
1602        #[cfg(cryp_v2)]
1603        let footer: [u32; 4] = [
1604            headerlen1.swap_bytes(),
1605            headerlen2.swap_bytes(),
1606            payloadlen1.swap_bytes(),
1607            payloadlen2.swap_bytes(),
1608        ];
1609        #[cfg(any(cryp_v3, cryp_v4))]
1610        let footer: [u32; 4] = [headerlen1, headerlen2, payloadlen1, payloadlen2];
1611
1612        self.write_words_blocking(C::BLOCK_SIZE, &footer);
1613
1614        while !T::regs().sr().read().ofne() {}
1615
1616        let mut full_tag: [u8; 16] = [0; 16];
1617        self.read_bytes_blocking(C::BLOCK_SIZE, &mut full_tag);
1618        let mut tag: [u8; TAG_SIZE] = [0; TAG_SIZE];
1619        tag.copy_from_slice(&full_tag[0..TAG_SIZE]);
1620
1621        T::regs().cr().modify(|w| w.set_crypen(false));
1622
1623        tag
1624    }
1625
1626    #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
1627    // Generates an authentication tag for authenticated ciphers including GCM, CCM, and GMAC.
1628    /// Called after the all data has been encrypted/decrypted by `payload`.
1629    pub async fn finish<
1630        'c,
1631        const TAG_SIZE: usize,
1632        C: Cipher<'c> + CipherSized + IVSized + CipherAuthenticated<TAG_SIZE>,
1633    >(
1634        &mut self,
1635        mut ctx: Context<'c, C>,
1636    ) -> [u8; TAG_SIZE]
1637    where
1638        DmaIn: crate::cryp::DmaIn<T>,
1639        DmaOut: crate::cryp::DmaOut<T>,
1640    {
1641        self.load_context(&mut ctx);
1642
1643        T::regs().cr().modify(|w| w.set_crypen(false));
1644        T::regs().cr().modify(|w| w.set_gcm_ccmph(3));
1645        T::regs().cr().modify(|w| w.set_crypen(true));
1646
1647        let headerlen1: u32 = ((ctx.header_len * 8) >> 32) as u32;
1648        let headerlen2: u32 = (ctx.header_len * 8) as u32;
1649        let payloadlen1: u32 = ((ctx.payload_len * 8) >> 32) as u32;
1650        let payloadlen2: u32 = (ctx.payload_len * 8) as u32;
1651
1652        #[cfg(cryp_v2)]
1653        let footer: [u32; 4] = [
1654            headerlen1.swap_bytes(),
1655            headerlen2.swap_bytes(),
1656            payloadlen1.swap_bytes(),
1657            payloadlen2.swap_bytes(),
1658        ];
1659        #[cfg(any(cryp_v3, cryp_v4))]
1660        let footer: [u32; 4] = [headerlen1, headerlen2, payloadlen1, payloadlen2];
1661
1662        let write = Self::write_words(&mut self.indma, C::BLOCK_SIZE, &footer);
1663
1664        let mut full_tag: [u8; 16] = [0; 16];
1665        let read = Self::read_bytes(&mut self.outdma, C::BLOCK_SIZE, &mut full_tag);
1666
1667        embassy_futures::join::join(read, write).await;
1668
1669        let mut tag: [u8; TAG_SIZE] = [0; TAG_SIZE];
1670        tag.copy_from_slice(&full_tag[0..TAG_SIZE]);
1671
1672        T::regs().cr().modify(|w| w.set_crypen(false));
1673
1674        tag
1675    }
1676
1677    fn load_key(&self, key: &[u8]) {
1678        // Load the key into the registers.
1679        let mut keyidx = 0;
1680        let mut keyword: [u8; 4] = [0; 4];
1681        let keylen = key.len() * 8;
1682        if keylen > 192 {
1683            keyword.copy_from_slice(&key[keyidx..keyidx + 4]);
1684            keyidx += 4;
1685            T::regs().key(0).klr().write_value(u32::from_be_bytes(keyword));
1686            keyword.copy_from_slice(&key[keyidx..keyidx + 4]);
1687            keyidx += 4;
1688            T::regs().key(0).krr().write_value(u32::from_be_bytes(keyword));
1689        }
1690        if keylen > 128 {
1691            keyword.copy_from_slice(&key[keyidx..keyidx + 4]);
1692            keyidx += 4;
1693            T::regs().key(1).klr().write_value(u32::from_be_bytes(keyword));
1694            keyword.copy_from_slice(&key[keyidx..keyidx + 4]);
1695            keyidx += 4;
1696            T::regs().key(1).krr().write_value(u32::from_be_bytes(keyword));
1697        }
1698        if keylen > 64 {
1699            keyword.copy_from_slice(&key[keyidx..keyidx + 4]);
1700            keyidx += 4;
1701            T::regs().key(2).klr().write_value(u32::from_be_bytes(keyword));
1702            keyword.copy_from_slice(&key[keyidx..keyidx + 4]);
1703            keyidx += 4;
1704            T::regs().key(2).krr().write_value(u32::from_be_bytes(keyword));
1705        }
1706        keyword.copy_from_slice(&key[keyidx..keyidx + 4]);
1707        keyidx += 4;
1708        T::regs().key(3).klr().write_value(u32::from_be_bytes(keyword));
1709        keyword = [0; 4];
1710        keyword[0..key.len() - keyidx].copy_from_slice(&key[keyidx..key.len()]);
1711        T::regs().key(3).krr().write_value(u32::from_be_bytes(keyword));
1712    }
1713
1714    fn store_context<'c, C: Cipher<'c> + CipherSized>(&self, ctx: &mut Context<'c, C>) {
1715        // Wait for data block processing to finish.
1716        while !T::regs().sr().read().ifem() {}
1717        while T::regs().sr().read().ofne() {}
1718        while T::regs().sr().read().busy() {}
1719
1720        // Disable crypto processor.
1721        T::regs().cr().modify(|w| w.set_crypen(false));
1722
1723        // Save the peripheral state.
1724        ctx.cr = T::regs().cr().read().0;
1725        ctx.iv[0] = T::regs().init(0).ivlr().read();
1726        ctx.iv[1] = T::regs().init(0).ivrr().read();
1727        ctx.iv[2] = T::regs().init(1).ivlr().read();
1728        ctx.iv[3] = T::regs().init(1).ivrr().read();
1729
1730        #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
1731        for i in 0..8 {
1732            ctx.csgcmccm[i] = T::regs().csgcmccmr(i).read();
1733            ctx.csgcm[i] = T::regs().csgcmr(i).read();
1734        }
1735    }
1736
1737    fn load_context<'c, C: Cipher<'c> + CipherSized>(&self, ctx: &Context<'c, C>) {
1738        // Reload state registers.
1739        T::regs().cr().write(|w| w.0 = ctx.cr);
1740        T::regs().init(0).ivlr().write_value(ctx.iv[0]);
1741        T::regs().init(0).ivrr().write_value(ctx.iv[1]);
1742        T::regs().init(1).ivlr().write_value(ctx.iv[2]);
1743        T::regs().init(1).ivrr().write_value(ctx.iv[3]);
1744
1745        #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
1746        for i in 0..8 {
1747            T::regs().csgcmccmr(i).write_value(ctx.csgcmccm[i]);
1748            T::regs().csgcmr(i).write_value(ctx.csgcm[i]);
1749        }
1750        self.load_key(ctx.cipher.key());
1751
1752        // Prepare key if applicable.
1753        ctx.cipher.prepare_key(T::regs());
1754        T::regs().cr().write(|w| w.0 = ctx.cr);
1755
1756        // Enable crypto processor.
1757        T::regs().cr().modify(|w| w.set_crypen(true));
1758    }
1759
1760    fn write_bytes_blocking(&self, block_size: usize, blocks: &[u8]) {
1761        // Ensure input is a multiple of block size.
1762        assert_eq!(blocks.len() % block_size, 0);
1763        let mut index = 0;
1764        let end_index = blocks.len();
1765        while index < end_index {
1766            let mut in_word: [u8; 4] = [0; 4];
1767            in_word.copy_from_slice(&blocks[index..index + 4]);
1768            T::regs().din().write_value(u32::from_ne_bytes(in_word));
1769            index += 4;
1770            if index % block_size == 0 {
1771                // Block until input FIFO is empty.
1772                while !T::regs().sr().read().ifem() {}
1773            }
1774        }
1775    }
1776
1777    async fn write_bytes(dma: &mut PeripheralRef<'_, DmaIn>, block_size: usize, blocks: &[u8])
1778    where
1779        DmaIn: crate::cryp::DmaIn<T>,
1780    {
1781        if blocks.len() == 0 {
1782            return;
1783        }
1784        // Ensure input is a multiple of block size.
1785        assert_eq!(blocks.len() % block_size, 0);
1786        // Configure DMA to transfer input to crypto core.
1787        let dma_request = dma.request();
1788        let dst_ptr = T::regs().din().as_ptr();
1789        let num_words = blocks.len() / 4;
1790        let src_ptr = ptr::slice_from_raw_parts(blocks.as_ptr().cast(), num_words);
1791        let options = TransferOptions {
1792            #[cfg(not(gpdma))]
1793            priority: crate::dma::Priority::High,
1794            ..Default::default()
1795        };
1796        let dma_transfer = unsafe { Transfer::new_write_raw(dma, dma_request, src_ptr, dst_ptr, options) };
1797        T::regs().dmacr().modify(|w| w.set_dien(true));
1798        // Wait for the transfer to complete.
1799        dma_transfer.await;
1800    }
1801
1802    #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
1803    fn write_words_blocking(&self, block_size: usize, blocks: &[u32]) {
1804        assert_eq!((blocks.len() * 4) % block_size, 0);
1805        let mut byte_counter: usize = 0;
1806        for word in blocks {
1807            T::regs().din().write_value(*word);
1808            byte_counter += 4;
1809            if byte_counter % block_size == 0 {
1810                // Block until input FIFO is empty.
1811                while !T::regs().sr().read().ifem() {}
1812            }
1813        }
1814    }
1815
1816    #[cfg(any(cryp_v2, cryp_v3, cryp_v4))]
1817    async fn write_words(dma: &mut PeripheralRef<'_, DmaIn>, block_size: usize, blocks: &[u32])
1818    where
1819        DmaIn: crate::cryp::DmaIn<T>,
1820    {
1821        if blocks.len() == 0 {
1822            return;
1823        }
1824        // Ensure input is a multiple of block size.
1825        assert_eq!((blocks.len() * 4) % block_size, 0);
1826        // Configure DMA to transfer input to crypto core.
1827        let dma_request = dma.request();
1828        let dst_ptr = T::regs().din().as_ptr();
1829        let num_words = blocks.len();
1830        let src_ptr = ptr::slice_from_raw_parts(blocks.as_ptr().cast(), num_words);
1831        let options = TransferOptions {
1832            #[cfg(not(gpdma))]
1833            priority: crate::dma::Priority::High,
1834            ..Default::default()
1835        };
1836        let dma_transfer = unsafe { Transfer::new_write_raw(dma, dma_request, src_ptr, dst_ptr, options) };
1837        T::regs().dmacr().modify(|w| w.set_dien(true));
1838        // Wait for the transfer to complete.
1839        dma_transfer.await;
1840    }
1841
1842    fn read_bytes_blocking(&self, block_size: usize, blocks: &mut [u8]) {
1843        // Block until there is output to read.
1844        while !T::regs().sr().read().ofne() {}
1845        // Ensure input is a multiple of block size.
1846        assert_eq!(blocks.len() % block_size, 0);
1847        // Read block out
1848        let mut index = 0;
1849        let end_index = blocks.len();
1850        while index < end_index {
1851            let out_word: u32 = T::regs().dout().read();
1852            blocks[index..index + 4].copy_from_slice(u32::to_ne_bytes(out_word).as_slice());
1853            index += 4;
1854        }
1855    }
1856
1857    async fn read_bytes(dma: &mut PeripheralRef<'_, DmaOut>, block_size: usize, blocks: &mut [u8])
1858    where
1859        DmaOut: crate::cryp::DmaOut<T>,
1860    {
1861        if blocks.len() == 0 {
1862            return;
1863        }
1864        // Ensure input is a multiple of block size.
1865        assert_eq!(blocks.len() % block_size, 0);
1866        // Configure DMA to get output from crypto core.
1867        let dma_request = dma.request();
1868        let src_ptr = T::regs().dout().as_ptr();
1869        let num_words = blocks.len() / 4;
1870        let dst_ptr = ptr::slice_from_raw_parts_mut(blocks.as_mut_ptr().cast(), num_words);
1871        let options = TransferOptions {
1872            #[cfg(not(gpdma))]
1873            priority: crate::dma::Priority::VeryHigh,
1874            ..Default::default()
1875        };
1876        let dma_transfer = unsafe { Transfer::new_read_raw(dma, dma_request, src_ptr, dst_ptr, options) };
1877        T::regs().dmacr().modify(|w| w.set_doen(true));
1878        // Wait for the transfer to complete.
1879        dma_transfer.await;
1880    }
1881}
1882
1883trait SealedInstance {
1884    fn regs() -> pac::cryp::Cryp;
1885}
1886
1887/// CRYP instance trait.
1888#[allow(private_bounds)]
1889pub trait Instance: SealedInstance + Peripheral<P = Self> + crate::rcc::RccPeripheral + 'static + Send {
1890    /// Interrupt for this CRYP instance.
1891    type Interrupt: interrupt::typelevel::Interrupt;
1892}
1893
1894foreach_interrupt!(
1895    ($inst:ident, cryp, CRYP, GLOBAL, $irq:ident) => {
1896        impl Instance for peripherals::$inst {
1897            type Interrupt = crate::interrupt::typelevel::$irq;
1898        }
1899
1900        impl SealedInstance for peripherals::$inst {
1901            fn regs() -> crate::pac::cryp::Cryp {
1902                crate::pac::$inst
1903            }
1904        }
1905    };
1906);
1907
1908dma_trait!(DmaIn, Instance);
1909dma_trait!(DmaOut, Instance);