diff options
| author | Dario Nieuwenhuis <[email protected]> | 2023-04-18 16:16:33 +0200 |
|---|---|---|
| committer | Dario Nieuwenhuis <[email protected]> | 2023-04-18 16:41:24 +0200 |
| commit | efc70debb3bbf7fb7e9b1a23a42e5db149de8ed6 (patch) | |
| tree | 26fd40195c0d9e5d546b78de697e42c3c36f4789 /embassy-stm32/src/dcmi.rs | |
| parent | 173c65b5430e57548cc747f0387dd001e30b1ac1 (diff) | |
stm32/dma: add double buffered mode for DMA, update DCMI.
Diffstat (limited to 'embassy-stm32/src/dcmi.rs')
| -rw-r--r-- | embassy-stm32/src/dcmi.rs | 37 |
1 files changed, 24 insertions, 13 deletions
diff --git a/embassy-stm32/src/dcmi.rs b/embassy-stm32/src/dcmi.rs index 0b34553cf..c19be86c6 100644 --- a/embassy-stm32/src/dcmi.rs +++ b/embassy-stm32/src/dcmi.rs | |||
| @@ -434,9 +434,13 @@ where | |||
| 434 | result | 434 | result |
| 435 | } | 435 | } |
| 436 | 436 | ||
| 437 | #[cfg(not(dma))] | ||
| 437 | async fn capture_giant(&mut self, _buffer: &mut [u32]) -> Result<(), Error> { | 438 | async fn capture_giant(&mut self, _buffer: &mut [u32]) -> Result<(), Error> { |
| 438 | todo!() | 439 | panic!("capturing to buffers larger than 0xffff is only supported on DMA for now, not on BDMA or GPDMA."); |
| 439 | /* | 440 | } |
| 441 | |||
| 442 | #[cfg(dma)] | ||
| 443 | async fn capture_giant(&mut self, buffer: &mut [u32]) -> Result<(), Error> { | ||
| 440 | use crate::dma::TransferOptions; | 444 | use crate::dma::TransferOptions; |
| 441 | 445 | ||
| 442 | let data_len = buffer.len(); | 446 | let data_len = buffer.len(); |
| @@ -460,16 +464,24 @@ where | |||
| 460 | let r = self.inner.regs(); | 464 | let r = self.inner.regs(); |
| 461 | let src = r.dr().ptr() as *mut u32; | 465 | let src = r.dr().ptr() as *mut u32; |
| 462 | 466 | ||
| 463 | unsafe { | 467 | let mut transfer = unsafe { |
| 464 | channel.start_double_buffered_read(request, src, m0ar, m1ar, chunk_size, TransferOptions::default()); | 468 | crate::dma::DoubleBuffered::new_read( |
| 465 | } | 469 | &mut self.dma, |
| 470 | request, | ||
| 471 | src, | ||
| 472 | m0ar, | ||
| 473 | m1ar, | ||
| 474 | chunk_size, | ||
| 475 | TransferOptions::default(), | ||
| 476 | ) | ||
| 477 | }; | ||
| 466 | 478 | ||
| 467 | let mut last_chunk_set_for_transfer = false; | 479 | let mut last_chunk_set_for_transfer = false; |
| 468 | let mut buffer0_last_accessible = false; | 480 | let mut buffer0_last_accessible = false; |
| 469 | let dma_result = poll_fn(|cx| { | 481 | let dma_result = poll_fn(|cx| { |
| 470 | channel.set_waker(cx.waker()); | 482 | transfer.set_waker(cx.waker()); |
| 471 | 483 | ||
| 472 | let buffer0_currently_accessible = unsafe { channel.is_buffer0_accessible() }; | 484 | let buffer0_currently_accessible = transfer.is_buffer0_accessible(); |
| 473 | 485 | ||
| 474 | // check if the accessible buffer changed since last poll | 486 | // check if the accessible buffer changed since last poll |
| 475 | if buffer0_last_accessible == buffer0_currently_accessible { | 487 | if buffer0_last_accessible == buffer0_currently_accessible { |
| @@ -480,21 +492,21 @@ where | |||
| 480 | if remaining_chunks != 0 { | 492 | if remaining_chunks != 0 { |
| 481 | if remaining_chunks % 2 == 0 && buffer0_currently_accessible { | 493 | if remaining_chunks % 2 == 0 && buffer0_currently_accessible { |
| 482 | m0ar = unsafe { m0ar.add(2 * chunk_size) }; | 494 | m0ar = unsafe { m0ar.add(2 * chunk_size) }; |
| 483 | unsafe { channel.set_buffer0(m0ar) } | 495 | unsafe { transfer.set_buffer0(m0ar) } |
| 484 | remaining_chunks -= 1; | 496 | remaining_chunks -= 1; |
| 485 | } else if !buffer0_currently_accessible { | 497 | } else if !buffer0_currently_accessible { |
| 486 | m1ar = unsafe { m1ar.add(2 * chunk_size) }; | 498 | m1ar = unsafe { m1ar.add(2 * chunk_size) }; |
| 487 | unsafe { channel.set_buffer1(m1ar) }; | 499 | unsafe { transfer.set_buffer1(m1ar) }; |
| 488 | remaining_chunks -= 1; | 500 | remaining_chunks -= 1; |
| 489 | } | 501 | } |
| 490 | } else { | 502 | } else { |
| 491 | if buffer0_currently_accessible { | 503 | if buffer0_currently_accessible { |
| 492 | unsafe { channel.set_buffer0(buffer.as_mut_ptr()) } | 504 | unsafe { transfer.set_buffer0(buffer.as_mut_ptr()) } |
| 493 | } else { | 505 | } else { |
| 494 | unsafe { channel.set_buffer1(buffer.as_mut_ptr()) } | 506 | unsafe { transfer.set_buffer1(buffer.as_mut_ptr()) } |
| 495 | } | 507 | } |
| 496 | if last_chunk_set_for_transfer { | 508 | if last_chunk_set_for_transfer { |
| 497 | channel.request_stop(); | 509 | transfer.request_stop(); |
| 498 | return Poll::Ready(()); | 510 | return Poll::Ready(()); |
| 499 | } | 511 | } |
| 500 | last_chunk_set_for_transfer = true; | 512 | last_chunk_set_for_transfer = true; |
| @@ -542,7 +554,6 @@ where | |||
| 542 | unsafe { Self::toggle(false) }; | 554 | unsafe { Self::toggle(false) }; |
| 543 | 555 | ||
| 544 | result | 556 | result |
| 545 | */ | ||
| 546 | } | 557 | } |
| 547 | } | 558 | } |
| 548 | 559 | ||
