1use core::future::{poll_fn, Future};
2use core::pin::Pin;
3use core::sync::atomic::{fence, AtomicUsize, Ordering};
4use core::task::{Context, Poll, Waker};
5
6use embassy_hal_internal::{into_ref, Peripheral, PeripheralRef};
7use embassy_sync::waitqueue::AtomicWaker;
8
9use super::ringbuffer::{DmaCtrl, Error, ReadableDmaRingBuffer, WritableDmaRingBuffer};
10use super::word::{Word, WordSize};
11use super::{AnyChannel, Channel, Dir, Request, STATE};
12use crate::interrupt::typelevel::Interrupt;
13use crate::{interrupt, pac};
14
15pub(crate) struct ChannelInfo {
16 pub(crate) dma: DmaInfo,
17 pub(crate) num: usize,
18 #[cfg(feature = "_dual-core")]
19 pub(crate) irq: pac::Interrupt,
20 #[cfg(dmamux)]
21 pub(crate) dmamux: super::DmamuxInfo,
22}
23
24#[derive(Clone, Copy)]
25pub(crate) enum DmaInfo {
26 #[cfg(dma)]
27 Dma(pac::dma::Dma),
28 #[cfg(bdma)]
29 Bdma(pac::bdma::Dma),
30}
31
32#[derive(Debug, Copy, Clone, PartialEq, Eq)]
34#[cfg_attr(feature = "defmt", derive(defmt::Format))]
35#[non_exhaustive]
36pub struct TransferOptions {
37 #[cfg(dma)]
39 pub pburst: Burst,
40 #[cfg(dma)]
42 pub mburst: Burst,
43 #[cfg(dma)]
45 pub flow_ctrl: FlowControl,
46 #[cfg(dma)]
48 pub fifo_threshold: Option<FifoThreshold>,
49 pub priority: Priority,
51 pub circular: bool,
57 pub half_transfer_ir: bool,
59 pub complete_transfer_ir: bool,
61}
62
63impl Default for TransferOptions {
64 fn default() -> Self {
65 Self {
66 #[cfg(dma)]
67 pburst: Burst::Single,
68 #[cfg(dma)]
69 mburst: Burst::Single,
70 #[cfg(dma)]
71 flow_ctrl: FlowControl::Dma,
72 #[cfg(dma)]
73 fifo_threshold: None,
74 priority: Priority::VeryHigh,
75 circular: false,
76 half_transfer_ir: false,
77 complete_transfer_ir: true,
78 }
79 }
80}
81
82#[derive(Debug, Copy, Clone, PartialEq, Eq)]
84#[cfg_attr(feature = "defmt", derive(defmt::Format))]
85pub enum Priority {
86 Low,
88 Medium,
90 High,
92 VeryHigh,
94}
95
96#[cfg(dma)]
97impl From<Priority> for pac::dma::vals::Pl {
98 fn from(value: Priority) -> Self {
99 match value {
100 Priority::Low => pac::dma::vals::Pl::LOW,
101 Priority::Medium => pac::dma::vals::Pl::MEDIUM,
102 Priority::High => pac::dma::vals::Pl::HIGH,
103 Priority::VeryHigh => pac::dma::vals::Pl::VERY_HIGH,
104 }
105 }
106}
107
108#[cfg(bdma)]
109impl From<Priority> for pac::bdma::vals::Pl {
110 fn from(value: Priority) -> Self {
111 match value {
112 Priority::Low => pac::bdma::vals::Pl::LOW,
113 Priority::Medium => pac::bdma::vals::Pl::MEDIUM,
114 Priority::High => pac::bdma::vals::Pl::HIGH,
115 Priority::VeryHigh => pac::bdma::vals::Pl::VERY_HIGH,
116 }
117 }
118}
119
120#[cfg(dma)]
121pub use dma_only::*;
122#[cfg(dma)]
123mod dma_only {
124 use pac::dma::vals;
125
126 use super::*;
127
128 impl From<WordSize> for vals::Size {
129 fn from(raw: WordSize) -> Self {
130 match raw {
131 WordSize::OneByte => Self::BITS8,
132 WordSize::TwoBytes => Self::BITS16,
133 WordSize::FourBytes => Self::BITS32,
134 }
135 }
136 }
137
138 impl From<Dir> for vals::Dir {
139 fn from(raw: Dir) -> Self {
140 match raw {
141 Dir::MemoryToPeripheral => Self::MEMORY_TO_PERIPHERAL,
142 Dir::PeripheralToMemory => Self::PERIPHERAL_TO_MEMORY,
143 }
144 }
145 }
146
147 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
149 #[cfg_attr(feature = "defmt", derive(defmt::Format))]
150 pub enum Burst {
151 Single,
153 Incr4,
155 Incr8,
157 Incr16,
159 }
160
161 impl From<Burst> for vals::Burst {
162 fn from(burst: Burst) -> Self {
163 match burst {
164 Burst::Single => vals::Burst::SINGLE,
165 Burst::Incr4 => vals::Burst::INCR4,
166 Burst::Incr8 => vals::Burst::INCR8,
167 Burst::Incr16 => vals::Burst::INCR16,
168 }
169 }
170 }
171
172 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
174 #[cfg_attr(feature = "defmt", derive(defmt::Format))]
175 pub enum FlowControl {
176 Dma,
178 Peripheral,
180 }
181
182 impl From<FlowControl> for vals::Pfctrl {
183 fn from(flow: FlowControl) -> Self {
184 match flow {
185 FlowControl::Dma => vals::Pfctrl::DMA,
186 FlowControl::Peripheral => vals::Pfctrl::PERIPHERAL,
187 }
188 }
189 }
190
191 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
193 #[cfg_attr(feature = "defmt", derive(defmt::Format))]
194 pub enum FifoThreshold {
195 Quarter,
197 Half,
199 ThreeQuarters,
201 Full,
203 }
204
205 impl From<FifoThreshold> for vals::Fth {
206 fn from(value: FifoThreshold) -> Self {
207 match value {
208 FifoThreshold::Quarter => vals::Fth::QUARTER,
209 FifoThreshold::Half => vals::Fth::HALF,
210 FifoThreshold::ThreeQuarters => vals::Fth::THREE_QUARTERS,
211 FifoThreshold::Full => vals::Fth::FULL,
212 }
213 }
214 }
215}
216
217#[cfg(bdma)]
218mod bdma_only {
219 use pac::bdma::vals;
220
221 use super::*;
222
223 impl From<WordSize> for vals::Size {
224 fn from(raw: WordSize) -> Self {
225 match raw {
226 WordSize::OneByte => Self::BITS8,
227 WordSize::TwoBytes => Self::BITS16,
228 WordSize::FourBytes => Self::BITS32,
229 }
230 }
231 }
232
233 impl From<Dir> for vals::Dir {
234 fn from(raw: Dir) -> Self {
235 match raw {
236 Dir::MemoryToPeripheral => Self::FROM_MEMORY,
237 Dir::PeripheralToMemory => Self::FROM_PERIPHERAL,
238 }
239 }
240 }
241}
242
243pub(crate) struct ChannelState {
244 waker: AtomicWaker,
245 complete_count: AtomicUsize,
246}
247
248impl ChannelState {
249 pub(crate) const NEW: Self = Self {
250 waker: AtomicWaker::new(),
251 complete_count: AtomicUsize::new(0),
252 };
253}
254
255pub(crate) unsafe fn init(
257 cs: critical_section::CriticalSection,
258 #[cfg(dma)] dma_priority: interrupt::Priority,
259 #[cfg(bdma)] bdma_priority: interrupt::Priority,
260) {
261 foreach_interrupt! {
262 ($peri:ident, dma, $block:ident, $signal_name:ident, $irq:ident) => {
263 crate::interrupt::typelevel::$irq::set_priority_with_cs(cs, dma_priority);
264 #[cfg(not(feature = "_dual-core"))]
265 crate::interrupt::typelevel::$irq::enable();
266 };
267 ($peri:ident, bdma, $block:ident, $signal_name:ident, $irq:ident) => {
268 crate::interrupt::typelevel::$irq::set_priority_with_cs(cs, bdma_priority);
269 #[cfg(not(feature = "_dual-core"))]
270 crate::interrupt::typelevel::$irq::enable();
271 };
272 }
273 crate::_generated::init_dma();
274 crate::_generated::init_bdma();
275}
276
277impl AnyChannel {
278 pub(crate) unsafe fn on_irq(&self) {
280 let info = self.info();
281 let state = &STATE[self.id as usize];
282 match self.info().dma {
283 #[cfg(dma)]
284 DmaInfo::Dma(r) => {
285 let cr = r.st(info.num).cr();
286 let isr = r.isr(info.num / 4).read();
287
288 if isr.teif(info.num % 4) {
289 panic!("DMA: error on DMA@{:08x} channel {}", r.as_ptr() as u32, info.num);
290 }
291
292 if isr.htif(info.num % 4) && cr.read().htie() {
293 r.ifcr(info.num / 4).write(|w| w.set_htif(info.num % 4, true));
295 } else if isr.tcif(info.num % 4) && cr.read().tcie() {
296 r.ifcr(info.num / 4).write(|w| w.set_tcif(info.num % 4, true));
298 state.complete_count.fetch_add(1, Ordering::Release);
299 } else {
300 return;
301 }
302 state.waker.wake();
303 }
304 #[cfg(bdma)]
305 DmaInfo::Bdma(r) => {
306 let isr = r.isr().read();
307 let cr = r.ch(info.num).cr();
308
309 if isr.teif(info.num) {
310 panic!("DMA: error on BDMA@{:08x} channel {}", r.as_ptr() as u32, info.num);
311 }
312
313 if isr.htif(info.num) && cr.read().htie() {
314 r.ifcr().write(|w| w.set_htif(info.num, true));
316 } else if isr.tcif(info.num) && cr.read().tcie() {
317 r.ifcr().write(|w| w.set_tcif(info.num, true));
319 #[cfg(not(armv6m))]
320 state.complete_count.fetch_add(1, Ordering::Release);
321 #[cfg(armv6m)]
322 critical_section::with(|_| {
323 let x = state.complete_count.load(Ordering::Relaxed);
324 state.complete_count.store(x + 1, Ordering::Release);
325 })
326 } else {
327 return;
328 }
329
330 state.waker.wake();
331 }
332 }
333 }
334
335 unsafe fn configure(
336 &self,
337 _request: Request,
338 dir: Dir,
339 peri_addr: *const u32,
340 mem_addr: *mut u32,
341 mem_len: usize,
342 incr_mem: bool,
343 data_size: WordSize,
344 options: TransferOptions,
345 ) {
346 let info = self.info();
347 #[cfg(feature = "_dual-core")]
348 {
349 use embassy_hal_internal::interrupt::InterruptExt as _;
350 info.irq.enable();
351 }
352
353 #[cfg(dmamux)]
354 super::dmamux::configure_dmamux(&info.dmamux, _request);
355
356 assert!(mem_len > 0 && mem_len <= 0xFFFF);
357
358 match self.info().dma {
359 #[cfg(dma)]
360 DmaInfo::Dma(r) => {
361 let ch = r.st(info.num);
362
363 fence(Ordering::SeqCst);
365
366 self.clear_irqs();
367
368 ch.par().write_value(peri_addr as u32);
369 ch.m0ar().write_value(mem_addr as u32);
370 ch.ndtr().write_value(pac::dma::regs::Ndtr(mem_len as _));
371 ch.fcr().write(|w| {
372 if let Some(fth) = options.fifo_threshold {
373 w.set_dmdis(pac::dma::vals::Dmdis::DISABLED);
375 w.set_fth(fth.into());
376 } else {
377 w.set_dmdis(pac::dma::vals::Dmdis::ENABLED);
379 }
380 });
381 ch.cr().write(|w| {
382 w.set_dir(dir.into());
383 w.set_msize(data_size.into());
384 w.set_psize(data_size.into());
385 w.set_pl(options.priority.into());
386 w.set_minc(incr_mem);
387 w.set_pinc(false);
388 w.set_teie(true);
389 w.set_htie(options.half_transfer_ir);
390 w.set_tcie(options.complete_transfer_ir);
391 w.set_circ(options.circular);
392 #[cfg(dma_v1)]
393 w.set_trbuff(true);
394 #[cfg(dma_v2)]
395 w.set_chsel(_request);
396 w.set_pburst(options.pburst.into());
397 w.set_mburst(options.mburst.into());
398 w.set_pfctrl(options.flow_ctrl.into());
399 w.set_en(false); });
401 }
402 #[cfg(bdma)]
403 DmaInfo::Bdma(r) => {
404 #[cfg(bdma_v2)]
405 critical_section::with(|_| r.cselr().modify(|w| w.set_cs(info.num, _request)));
406
407 let state: &ChannelState = &STATE[self.id as usize];
408 let ch = r.ch(info.num);
409
410 state.complete_count.store(0, Ordering::Release);
411 self.clear_irqs();
412
413 ch.par().write_value(peri_addr as u32);
414 ch.mar().write_value(mem_addr as u32);
415 ch.ndtr().write(|w| w.set_ndt(mem_len as u16));
416 ch.cr().write(|w| {
417 w.set_psize(data_size.into());
418 w.set_msize(data_size.into());
419 w.set_minc(incr_mem);
420 w.set_dir(dir.into());
421 w.set_teie(true);
422 w.set_tcie(options.complete_transfer_ir);
423 w.set_htie(options.half_transfer_ir);
424 w.set_circ(options.circular);
425 w.set_pl(options.priority.into());
426 w.set_en(false); });
428 }
429 }
430 }
431
432 fn start(&self) {
433 let info = self.info();
434 match self.info().dma {
435 #[cfg(dma)]
436 DmaInfo::Dma(r) => {
437 let ch = r.st(info.num);
438 ch.cr().modify(|w| w.set_en(true))
439 }
440 #[cfg(bdma)]
441 DmaInfo::Bdma(r) => {
442 let ch = r.ch(info.num);
443 ch.cr().modify(|w| w.set_en(true));
444 }
445 }
446 }
447
448 fn clear_irqs(&self) {
449 let info = self.info();
450 match self.info().dma {
451 #[cfg(dma)]
452 DmaInfo::Dma(r) => {
453 let isrn = info.num / 4;
454 let isrbit = info.num % 4;
455
456 r.ifcr(isrn).write(|w| {
457 w.set_htif(isrbit, true);
458 w.set_tcif(isrbit, true);
459 w.set_teif(isrbit, true);
460 });
461 }
462 #[cfg(bdma)]
463 DmaInfo::Bdma(r) => {
464 r.ifcr().write(|w| {
465 w.set_htif(info.num, true);
466 w.set_tcif(info.num, true);
467 w.set_teif(info.num, true);
468 });
469 }
470 }
471 }
472
473 fn request_stop(&self) {
474 let info = self.info();
475 match self.info().dma {
476 #[cfg(dma)]
477 DmaInfo::Dma(r) => {
478 r.st(info.num).cr().write(|w| {
480 w.set_teie(true);
481 w.set_tcie(true);
482 });
483 }
484 #[cfg(bdma)]
485 DmaInfo::Bdma(r) => {
486 r.ch(info.num).cr().write(|w| {
488 w.set_teie(true);
489 w.set_tcie(true);
490 });
491 }
492 }
493 }
494
495 fn request_pause(&self) {
496 let info = self.info();
497 match self.info().dma {
498 #[cfg(dma)]
499 DmaInfo::Dma(r) => {
500 r.st(info.num).cr().modify(|w| {
502 w.set_en(false);
503 });
504 }
505 #[cfg(bdma)]
506 DmaInfo::Bdma(r) => {
507 r.ch(info.num).cr().modify(|w| {
509 w.set_en(false);
510 });
511 }
512 }
513 }
514
515 fn is_running(&self) -> bool {
516 let info = self.info();
517 match self.info().dma {
518 #[cfg(dma)]
519 DmaInfo::Dma(r) => r.st(info.num).cr().read().en(),
520 #[cfg(bdma)]
521 DmaInfo::Bdma(r) => {
522 let state: &ChannelState = &STATE[self.id as usize];
523 let ch = r.ch(info.num);
524 let en = ch.cr().read().en();
525 let circular = ch.cr().read().circ();
526 let tcif = state.complete_count.load(Ordering::Acquire) != 0;
527 en && (circular || !tcif)
528 }
529 }
530 }
531
532 fn get_remaining_transfers(&self) -> u16 {
533 let info = self.info();
534 match self.info().dma {
535 #[cfg(dma)]
536 DmaInfo::Dma(r) => r.st(info.num).ndtr().read().ndt(),
537 #[cfg(bdma)]
538 DmaInfo::Bdma(r) => r.ch(info.num).ndtr().read().ndt(),
539 }
540 }
541
542 fn disable_circular_mode(&self) {
543 let info = self.info();
544 match self.info().dma {
545 #[cfg(dma)]
546 DmaInfo::Dma(regs) => regs.st(info.num).cr().modify(|w| {
547 w.set_circ(false);
548 }),
549 #[cfg(bdma)]
550 DmaInfo::Bdma(regs) => regs.ch(info.num).cr().modify(|w| {
551 w.set_circ(false);
552 }),
553 }
554 }
555
556 fn poll_stop(&self) -> Poll<()> {
557 use core::sync::atomic::compiler_fence;
558 compiler_fence(Ordering::SeqCst);
559
560 if !self.is_running() {
561 Poll::Ready(())
562 } else {
563 Poll::Pending
564 }
565 }
566}
567
568#[must_use = "futures do nothing unless you `.await` or poll them"]
570pub struct Transfer<'a> {
571 channel: PeripheralRef<'a, AnyChannel>,
572}
573
574impl<'a> Transfer<'a> {
575 pub unsafe fn new_read<W: Word>(
577 channel: impl Peripheral<P = impl Channel> + 'a,
578 request: Request,
579 peri_addr: *mut W,
580 buf: &'a mut [W],
581 options: TransferOptions,
582 ) -> Self {
583 Self::new_read_raw(channel, request, peri_addr, buf, options)
584 }
585
586 pub unsafe fn new_read_raw<W: Word>(
588 channel: impl Peripheral<P = impl Channel> + 'a,
589 request: Request,
590 peri_addr: *mut W,
591 buf: *mut [W],
592 options: TransferOptions,
593 ) -> Self {
594 into_ref!(channel);
595
596 Self::new_inner(
597 channel.map_into(),
598 request,
599 Dir::PeripheralToMemory,
600 peri_addr as *const u32,
601 buf as *mut W as *mut u32,
602 buf.len(),
603 true,
604 W::size(),
605 options,
606 )
607 }
608
609 pub unsafe fn new_write<W: Word>(
611 channel: impl Peripheral<P = impl Channel> + 'a,
612 request: Request,
613 buf: &'a [W],
614 peri_addr: *mut W,
615 options: TransferOptions,
616 ) -> Self {
617 Self::new_write_raw(channel, request, buf, peri_addr, options)
618 }
619
620 pub unsafe fn new_write_raw<W: Word>(
622 channel: impl Peripheral<P = impl Channel> + 'a,
623 request: Request,
624 buf: *const [W],
625 peri_addr: *mut W,
626 options: TransferOptions,
627 ) -> Self {
628 into_ref!(channel);
629
630 Self::new_inner(
631 channel.map_into(),
632 request,
633 Dir::MemoryToPeripheral,
634 peri_addr as *const u32,
635 buf as *const W as *mut u32,
636 buf.len(),
637 true,
638 W::size(),
639 options,
640 )
641 }
642
643 pub unsafe fn new_write_repeated<W: Word>(
645 channel: impl Peripheral<P = impl Channel> + 'a,
646 request: Request,
647 repeated: &'a W,
648 count: usize,
649 peri_addr: *mut W,
650 options: TransferOptions,
651 ) -> Self {
652 into_ref!(channel);
653
654 Self::new_inner(
655 channel.map_into(),
656 request,
657 Dir::MemoryToPeripheral,
658 peri_addr as *const u32,
659 repeated as *const W as *mut u32,
660 count,
661 false,
662 W::size(),
663 options,
664 )
665 }
666
667 unsafe fn new_inner(
668 channel: PeripheralRef<'a, AnyChannel>,
669 _request: Request,
670 dir: Dir,
671 peri_addr: *const u32,
672 mem_addr: *mut u32,
673 mem_len: usize,
674 incr_mem: bool,
675 data_size: WordSize,
676 options: TransferOptions,
677 ) -> Self {
678 assert!(mem_len > 0 && mem_len <= 0xFFFF);
679
680 channel.configure(
681 _request, dir, peri_addr, mem_addr, mem_len, incr_mem, data_size, options,
682 );
683 channel.start();
684
685 Self { channel }
686 }
687
688 pub fn request_stop(&mut self) {
694 self.channel.request_stop()
695 }
696
697 pub fn request_pause(&mut self) {
702 self.channel.request_pause()
703 }
704
705 pub fn is_running(&mut self) -> bool {
710 self.channel.is_running()
711 }
712
713 pub fn get_remaining_transfers(&self) -> u16 {
716 self.channel.get_remaining_transfers()
717 }
718
719 pub fn blocking_wait(mut self) {
721 while self.is_running() {}
722
723 fence(Ordering::SeqCst);
725
726 core::mem::forget(self);
727 }
728}
729
730impl<'a> Drop for Transfer<'a> {
731 fn drop(&mut self) {
732 self.request_stop();
733 while self.is_running() {}
734
735 fence(Ordering::SeqCst);
737 }
738}
739
740impl<'a> Unpin for Transfer<'a> {}
741impl<'a> Future for Transfer<'a> {
742 type Output = ();
743 fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
744 let state: &ChannelState = &STATE[self.channel.id as usize];
745
746 state.waker.register(cx.waker());
747
748 if self.is_running() {
749 Poll::Pending
750 } else {
751 Poll::Ready(())
752 }
753 }
754}
755
756struct DmaCtrlImpl<'a>(PeripheralRef<'a, AnyChannel>);
759
760impl<'a> DmaCtrl for DmaCtrlImpl<'a> {
761 fn get_remaining_transfers(&self) -> usize {
762 self.0.get_remaining_transfers() as _
763 }
764
765 fn reset_complete_count(&mut self) -> usize {
766 let state = &STATE[self.0.id as usize];
767 #[cfg(not(armv6m))]
768 return state.complete_count.swap(0, Ordering::AcqRel);
769 #[cfg(armv6m)]
770 return critical_section::with(|_| {
771 let x = state.complete_count.load(Ordering::Acquire);
772 state.complete_count.store(0, Ordering::Release);
773 x
774 });
775 }
776
777 fn set_waker(&mut self, waker: &Waker) {
778 STATE[self.0.id as usize].waker.register(waker);
779 }
780}
781
782pub struct ReadableRingBuffer<'a, W: Word> {
784 channel: PeripheralRef<'a, AnyChannel>,
785 ringbuf: ReadableDmaRingBuffer<'a, W>,
786}
787
788impl<'a, W: Word> ReadableRingBuffer<'a, W> {
789 pub unsafe fn new(
791 channel: impl Peripheral<P = impl Channel> + 'a,
792 _request: Request,
793 peri_addr: *mut W,
794 buffer: &'a mut [W],
795 mut options: TransferOptions,
796 ) -> Self {
797 into_ref!(channel);
798 let channel: PeripheralRef<'a, AnyChannel> = channel.map_into();
799
800 let buffer_ptr = buffer.as_mut_ptr();
801 let len = buffer.len();
802 let dir = Dir::PeripheralToMemory;
803 let data_size = W::size();
804
805 options.half_transfer_ir = true;
806 options.complete_transfer_ir = true;
807 options.circular = true;
808
809 channel.configure(
810 _request,
811 dir,
812 peri_addr as *mut u32,
813 buffer_ptr as *mut u32,
814 len,
815 true,
816 data_size,
817 options,
818 );
819
820 Self {
821 channel,
822 ringbuf: ReadableDmaRingBuffer::new(buffer),
823 }
824 }
825
826 pub fn start(&mut self) {
830 self.channel.start();
831 }
832
833 pub fn clear(&mut self) {
835 self.ringbuf.reset(&mut DmaCtrlImpl(self.channel.reborrow()));
836 }
837
838 pub fn read(&mut self, buf: &mut [W]) -> Result<(usize, usize), Error> {
844 self.ringbuf.read(&mut DmaCtrlImpl(self.channel.reborrow()), buf)
845 }
846
847 pub async fn read_exact(&mut self, buffer: &mut [W]) -> Result<usize, Error> {
859 self.ringbuf
860 .read_exact(&mut DmaCtrlImpl(self.channel.reborrow()), buffer)
861 .await
862 }
863
864 pub fn len(&mut self) -> Result<usize, Error> {
866 Ok(self.ringbuf.len(&mut DmaCtrlImpl(self.channel.reborrow()))?)
867 }
868
869 pub const fn capacity(&self) -> usize {
871 self.ringbuf.cap()
872 }
873
874 pub fn set_waker(&mut self, waker: &Waker) {
876 DmaCtrlImpl(self.channel.reborrow()).set_waker(waker);
877 }
878
879 pub fn request_stop(&mut self) {
885 self.channel.request_stop()
886 }
887
888 pub fn request_pause(&mut self) {
893 self.channel.request_pause()
894 }
895
896 pub fn is_running(&mut self) -> bool {
901 self.channel.is_running()
902 }
903
904 pub async fn stop(&mut self) {
914 self.channel.disable_circular_mode();
915 poll_fn(|cx| {
917 self.set_waker(cx.waker());
918 self.channel.poll_stop()
919 })
920 .await
921 }
922}
923
924impl<'a, W: Word> Drop for ReadableRingBuffer<'a, W> {
925 fn drop(&mut self) {
926 self.request_stop();
927 while self.is_running() {}
928
929 fence(Ordering::SeqCst);
931 }
932}
933
934pub struct WritableRingBuffer<'a, W: Word> {
936 channel: PeripheralRef<'a, AnyChannel>,
937 ringbuf: WritableDmaRingBuffer<'a, W>,
938}
939
940impl<'a, W: Word> WritableRingBuffer<'a, W> {
941 pub unsafe fn new(
943 channel: impl Peripheral<P = impl Channel> + 'a,
944 _request: Request,
945 peri_addr: *mut W,
946 buffer: &'a mut [W],
947 mut options: TransferOptions,
948 ) -> Self {
949 into_ref!(channel);
950 let channel: PeripheralRef<'a, AnyChannel> = channel.map_into();
951
952 let len = buffer.len();
953 let dir = Dir::MemoryToPeripheral;
954 let data_size = W::size();
955 let buffer_ptr = buffer.as_mut_ptr();
956
957 options.half_transfer_ir = true;
958 options.complete_transfer_ir = true;
959 options.circular = true;
960
961 channel.configure(
962 _request,
963 dir,
964 peri_addr as *mut u32,
965 buffer_ptr as *mut u32,
966 len,
967 true,
968 data_size,
969 options,
970 );
971
972 Self {
973 channel,
974 ringbuf: WritableDmaRingBuffer::new(buffer),
975 }
976 }
977
978 pub fn start(&mut self) {
982 self.channel.start();
983 }
984
985 pub fn clear(&mut self) {
987 self.ringbuf.reset(&mut DmaCtrlImpl(self.channel.reborrow()));
988 }
989
990 pub fn write_immediate(&mut self, buf: &[W]) -> Result<(usize, usize), Error> {
993 self.ringbuf.write_immediate(buf)
994 }
995
996 pub fn write(&mut self, buf: &[W]) -> Result<(usize, usize), Error> {
999 self.ringbuf.write(&mut DmaCtrlImpl(self.channel.reborrow()), buf)
1000 }
1001
1002 pub async fn write_exact(&mut self, buffer: &[W]) -> Result<usize, Error> {
1004 self.ringbuf
1005 .write_exact(&mut DmaCtrlImpl(self.channel.reborrow()), buffer)
1006 .await
1007 }
1008
1009 pub async fn wait_write_error(&mut self) -> Result<usize, Error> {
1011 self.ringbuf
1012 .wait_write_error(&mut DmaCtrlImpl(self.channel.reborrow()))
1013 .await
1014 }
1015
1016 pub fn len(&mut self) -> Result<usize, Error> {
1018 Ok(self.ringbuf.len(&mut DmaCtrlImpl(self.channel.reborrow()))?)
1019 }
1020
1021 pub const fn capacity(&self) -> usize {
1023 self.ringbuf.cap()
1024 }
1025
1026 pub fn set_waker(&mut self, waker: &Waker) {
1028 DmaCtrlImpl(self.channel.reborrow()).set_waker(waker);
1029 }
1030
1031 pub fn request_stop(&mut self) {
1037 self.channel.request_stop()
1038 }
1039
1040 pub fn request_pause(&mut self) {
1045 self.channel.request_pause()
1046 }
1047
1048 pub fn is_running(&mut self) -> bool {
1053 self.channel.is_running()
1054 }
1055
1056 pub async fn stop(&mut self) {
1064 self.channel.disable_circular_mode();
1065 poll_fn(|cx| {
1067 self.set_waker(cx.waker());
1068 self.channel.poll_stop()
1069 })
1070 .await
1071 }
1072}
1073
1074impl<'a, W: Word> Drop for WritableRingBuffer<'a, W> {
1075 fn drop(&mut self) {
1076 self.request_stop();
1077 while self.is_running() {}
1078
1079 fence(Ordering::SeqCst);
1081 }
1082}