1 /*
2 * Copyright (c) 2022 ASR Microelectronics (Shanghai) Co., Ltd. All rights reserved.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "duet_dma.h"
17
18 duet_dma_callback_func g_duet_dma_callback_handler[DMA_MAX_CHAN_NUM] = {0};
19
duet_dma_ctrl_block_init(void)20 Chan_Cfg_TypeDef *duet_dma_ctrl_block_init(void)
21 {
22 return (Chan_Cfg_TypeDef *)(0X4000C000);
23 }
24
duet_dma_init(void)25 void duet_dma_init(void)
26 {
27 uint32_t tmp_value = 0;
28 DMA_HANDSHAKE_CFG0 = 0XFFFFFFFF;
29 DMA_HANDSHAKE_CFG1 = 0XFFFFFFFF;
30 // OPEN DMA CLOCK
31 tmp_value = REG_RD(PERI_CLK_EN_REG0) & (~DMA_CLK_EN);
32 REG_WR(PERI_CLK_EN_REG0, (tmp_value | (DMA_CLK_EN)));
33 // open DMA interrupt
34 tmp_value = REG_RD(DUTE_IRQ_EN_REG) & (~DMA_IRQ_BIT);
35 REG_WR(DUTE_IRQ_EN_REG, (tmp_value | (DMA_IRQ_BIT)));
36 }
37
duet_dma_interrupt_clear(uint32_t chan_idx)38 void duet_dma_interrupt_clear(uint32_t chan_idx)
39 {
40 DMA_INT_CLR |= (1 << chan_idx); // write 1 to clear, then reset to 0
41 DMA_INT_CLR &= ~(1 << chan_idx);
42 }
43
duet_dma_get_interrupt_status(uint32_t chan_idx)44 ITstatus duet_dma_get_interrupt_status(uint32_t chan_idx)
45 {
46 if (DMA_INT_STAT & (1 << chan_idx)) {
47 return SET;
48 } else {
49 return RESET;
50 }
51 }
52
duet_dma_interrupt_config(uint32_t chan_idx,uint8_t new_state)53 void duet_dma_interrupt_config(uint32_t chan_idx, uint8_t new_state)
54 {
55 if (new_state == ENABLE) {
56 DMA_INT_MASK |= (1 << chan_idx); // write 1 to unmask
57 } else {
58 DMA_INT_MASK &= ~(1 << chan_idx);
59 }
60 }
61
duet_dma_channel_cmd(uint32_t chan_idx,uint8_t new_state)62 void duet_dma_channel_cmd(uint32_t chan_idx, uint8_t new_state)
63 {
64 if (new_state == ENABLE) {
65 DMA->CHAN_EN_SET |= (1 << chan_idx);
66 } else {
67 DMA->CHAN_EN_CLR |= (1 << chan_idx);
68 }
69 }
70
duet_dma_alt_channel_cmd(uint32_t chan_idx,uint8_t new_state)71 void duet_dma_alt_channel_cmd(uint32_t chan_idx, uint8_t new_state)
72 {
73 if (new_state == ENABLE) {
74 DMA->CHAN_PRI_ALT_SET |= (1 << chan_idx);
75 } else {
76 DMA->CHAN_PRI_ALT_CLR |= (1 << chan_idx);
77 }
78 }
79
duet_dma_generate_sw_req(uint32_t chan_idx)80 void duet_dma_generate_sw_req(uint32_t chan_idx)
81 {
82 DMA->CHAN_SW_REQ |= (1 << chan_idx);
83 }
duet_dma_generate_perip_req(uint32_t chan_idx)84 void duet_dma_generate_perip_req(uint32_t chan_idx)
85 {
86 DMA_WAIT_ON_REQ |= (1 << chan_idx);
87 }
88
duet_dma_mem2mem(uint8_t chan_num,uint8_t * mem_src,uint8_t * mem_dst,uint16_t len)89 void duet_dma_mem2mem(uint8_t chan_num, uint8_t *mem_src, uint8_t *mem_dst, uint16_t len)
90 {
91 uint8_t dma_chan = chan_num;
92 Chan_Cfg_TypeDef *pChan_Cfg_Align = duet_dma_ctrl_block_init();
93 Chan_Ctl_Data_TypeDef ch_ctl_data;
94 Chan_Cfg_TypeDef ch_cfg;
95
96 ch_ctl_data.cycle_ctl = DMA_OP_MODE_AUTO_REQ;
97 ch_ctl_data.n_minus_1 = len - 1;
98 ch_ctl_data.R_pow = 1;
99 ch_ctl_data.src_inc = DMA_SRC_ADDR_INC_BYTE;
100 ch_ctl_data.dst_inc = DMA_DST_ADDR_INC_BYTE;
101 ch_ctl_data.src_size = DMA_SRC_DATA_WIDTH_BYTE;
102 ch_ctl_data.dst_size = DMA_DST_DATA_WIDTH_BYTE;
103
104 ch_cfg.chan_ctr = ch_ctl_data;
105 ch_cfg.chan_src_end_ptr = (uint32_t)&mem_src[len - 1];
106 ch_cfg.chan_dst_end_ptr = (uint32_t)&mem_dst[len - 1];
107
108 (pChan_Cfg_Align + dma_chan)->chan_ctr = ch_cfg.chan_ctr;
109 (pChan_Cfg_Align + dma_chan)->chan_src_end_ptr = ch_cfg.chan_src_end_ptr;
110 (pChan_Cfg_Align + dma_chan)->chan_dst_end_ptr = ch_cfg.chan_dst_end_ptr;
111 DMA->CFG |= 0x1; // dma enable
112 DMA->CHAN_PRI_ALT_CLR |= (1 << dma_chan);
113 DMA->CTL_BASE_PTR = (uint32_t)pChan_Cfg_Align;
114 // DMA->CHAN_EN_CLR |= ~(1<<dma_chan); // disable other channels
115 DMA->CHAN_EN_SET |= (1 << dma_chan); // enbale channel 0
116 NVIC_EnableIRQ(DMA_IRQn);
117 DMA_INT_MASK |= (1 << dma_chan); // dma interrupt unmask, write 1
118 // manually generate software request for channel 0 for mem2mem transfer
119 DMA->CHAN_SW_REQ |= (1 << dma_chan);
120 }
121
duet_dma_uart_rx(uint8_t uart_idx,uint8_t * data,uint16_t len)122 void duet_dma_uart_rx(uint8_t uart_idx, uint8_t *data, uint16_t len)
123 {
124 uint8_t dma_chan;
125 UART_TypeDef *UARTx;
126 UARTx = NULL;
127 dma_chan = 0;
128 if (uart_idx == 2) {
129 dma_chan = 5; // uart2 rx channel
130 UARTx = UART2;
131 } else if (uart_idx == 1) {
132 dma_chan = 3; // uart1 rx channel
133 UARTx = UART1;
134 } else if (uart_idx == 0) {
135 dma_chan = 1; // uart0 rx channel
136 UARTx = UART0;
137 } else {
138 return;
139 }
140
141 Chan_Cfg_TypeDef *pChan_Cfg_Align = duet_dma_ctrl_block_init();
142 Chan_Ctl_Data_TypeDef ch_ctl_data;
143 Chan_Cfg_TypeDef ch_cfg;
144
145 ch_ctl_data.cycle_ctl = DMA_OP_MODE_BASIC;
146 ch_ctl_data.n_minus_1 = len - 1;
147 ch_ctl_data.R_pow = 1;
148 ch_ctl_data.src_inc = DMA_SRC_ADDR_INC_FIX;
149 ch_ctl_data.dst_inc = DMA_DST_ADDR_INC_BYTE;
150 ch_ctl_data.src_size = DMA_SRC_DATA_WIDTH_BYTE;
151 ch_ctl_data.dst_size = DMA_DST_DATA_WIDTH_BYTE;
152
153 ch_cfg.chan_ctr = ch_ctl_data;
154 ch_cfg.chan_src_end_ptr = (uint32_t) & (UARTx->DR);
155 ch_cfg.chan_dst_end_ptr = (uint32_t)(data + len - 1);
156
157 (pChan_Cfg_Align + dma_chan)->chan_ctr = ch_cfg.chan_ctr;
158 (pChan_Cfg_Align + dma_chan)->chan_src_end_ptr = ch_cfg.chan_src_end_ptr;
159 (pChan_Cfg_Align + dma_chan)->chan_dst_end_ptr = ch_cfg.chan_dst_end_ptr;
160 NVIC_EnableIRQ(DMA_IRQn);
161 DMA_WAIT_ON_REQ |= (1 << dma_chan);
162 DMA->CFG |= 0x1; // dma enable
163 DMA_INT_MASK |= (1 << dma_chan); // dma interrupt unmask, write 1
164 DMA->CHAN_PRI_ALT_CLR |= (1 << dma_chan);
165 DMA->CTL_BASE_PTR = (uint32_t)pChan_Cfg_Align;
166 // DMA->CHAN_EN_CLR |= ~(1<<dma_chan); // disable other channels
167 DMA->CHAN_EN_SET |= (1 << dma_chan); // enable channel
168 }
169
duet_dma_uart_tx(uint8_t uart_idx,uint8_t * data,uint16_t len)170 void duet_dma_uart_tx(uint8_t uart_idx, uint8_t *data, uint16_t len)
171 {
172 uint8_t dma_chan = 0;
173 UART_TypeDef *UARTx = NULL;
174
175 if (uart_idx == 2) {
176 dma_chan = 4; // uart2 tx channel
177 UARTx = UART2;
178 } else if (uart_idx == 1) {
179 dma_chan = 2; // uart1 tx channel
180 UARTx = UART1;
181 } else if (uart_idx == 0) {
182 dma_chan = 0; // uart0 tx channel
183 UARTx = UART0;
184 } else {
185 return;
186 }
187 // malloc for channel descriptor
188 Chan_Cfg_TypeDef *pChan_Cfg_Align = duet_dma_ctrl_block_init();
189
190 Chan_Ctl_Data_TypeDef ch_ctl_data;
191 Chan_Cfg_TypeDef ch_cfg;
192
193 ch_ctl_data.cycle_ctl = DMA_OP_MODE_BASIC;
194 ch_ctl_data.n_minus_1 = len - 1;
195 ch_ctl_data.R_pow = 2;
196 ch_ctl_data.src_inc = DMA_SRC_ADDR_INC_BYTE;
197 ch_ctl_data.dst_inc = DMA_DST_ADDR_INC_FIX;
198 ch_ctl_data.src_size = DMA_SRC_DATA_WIDTH_BYTE;
199 ch_ctl_data.dst_size = DMA_DST_DATA_WIDTH_BYTE;
200 ch_cfg.chan_ctr = ch_ctl_data;
201 ch_cfg.chan_src_end_ptr = (uint32_t)(data + len - 1);
202 ch_cfg.chan_dst_end_ptr = (uint32_t) & (UARTx->DR);
203
204 (pChan_Cfg_Align + dma_chan)->chan_ctr = ch_cfg.chan_ctr;
205 (pChan_Cfg_Align + dma_chan)->chan_src_end_ptr = ch_cfg.chan_src_end_ptr;
206 (pChan_Cfg_Align + dma_chan)->chan_dst_end_ptr = ch_cfg.chan_dst_end_ptr;
207 NVIC_EnableIRQ(DMA_IRQn);
208 DMA->CFG |= 0x1; // dma enable
209 DMA_INT_MASK |= (1 << dma_chan); // dma interrupt unmask, write 1
210 DMA->CHAN_PRI_ALT_CLR |= (1 << dma_chan);
211 DMA->CTL_BASE_PTR = (uint32_t)pChan_Cfg_Align;
212 // DMA->CHAN_EN_CLR |= ~(1<<dma_chan); // disable other channels
213 DMA->CHAN_EN_SET |= (1 << dma_chan); // enable channel
214 }
215
duet_dma_spi_tx(uint8_t ssp_idx,uint8_t * data,uint16_t len)216 void duet_dma_spi_tx(uint8_t ssp_idx, uint8_t *data, uint16_t len)
217 {
218 uint8_t dma_chan = 0;
219 // malloc for channel descriptor
220 Chan_Cfg_TypeDef *pChan_Cfg_Align = duet_dma_ctrl_block_init();
221 SPI_TypeDef *SPIx = NULL;
222 if (ssp_idx == 0) {
223 dma_chan = 6; // SPI0 tx channel
224 SPIx = SPI0;
225 } else if (ssp_idx == 1) {
226 dma_chan = 8; // SPI1 tx channel
227 SPIx = SPI1;
228 } else if (ssp_idx == 2) {
229 dma_chan = 10; // SPI2 tx channel
230 SPIx = SPI2;
231 } else {
232 return;
233 }
234 Chan_Ctl_Data_TypeDef ch_ctl_data;
235 Chan_Cfg_TypeDef ch_cfg;
236
237 ch_ctl_data.cycle_ctl = DMA_OP_MODE_BASIC;
238 ch_ctl_data.n_minus_1 = len - 1;
239 ch_ctl_data.R_pow = 1;
240 ch_ctl_data.src_inc = DMA_SRC_ADDR_INC_BYTE;
241 ch_ctl_data.dst_inc = DMA_DST_ADDR_INC_FIX;
242 ch_ctl_data.src_size = DMA_SRC_DATA_WIDTH_BYTE;
243 ch_ctl_data.dst_size = DMA_DST_DATA_WIDTH_BYTE;
244
245 ch_cfg.chan_ctr = ch_ctl_data;
246 ch_cfg.chan_src_end_ptr = (uint32_t)(data + len - 1);
247 ch_cfg.chan_dst_end_ptr = (uint32_t) & (SPIx->DR);
248
249 (pChan_Cfg_Align + dma_chan)->chan_ctr = ch_cfg.chan_ctr;
250 (pChan_Cfg_Align + dma_chan)->chan_src_end_ptr = ch_cfg.chan_src_end_ptr;
251 (pChan_Cfg_Align + dma_chan)->chan_dst_end_ptr = ch_cfg.chan_dst_end_ptr;
252
253 NVIC_EnableIRQ(DMA_IRQn);
254 DMA->CFG |= 0x1; // dma enable
255 DMA_INT_MASK |= (1 << dma_chan); // dma interrupt unmask, write 1
256 DMA->CHAN_PRI_ALT_CLR |= (1 << dma_chan);
257 DMA->CTL_BASE_PTR = (uint32_t)pChan_Cfg_Align;
258 // set channel useburst bit to diasble sreq from generating dma request
259 DMA->CHAN_USE_BURST_SET |= (1 << dma_chan);
260 // DMA->CHAN_EN_CLR |= ~(1<<dma_chan); // disable other channels
261 DMA->CHAN_EN_SET |= (1 << dma_chan); // enable channel
262 }
263
duet_dma_spi_rx(uint8_t ssp_idx,uint8_t * data,uint16_t len)264 void duet_dma_spi_rx(uint8_t ssp_idx, uint8_t *data, uint16_t len)
265 {
266 uint8_t dma_chan = 0;
267 Chan_Cfg_TypeDef *pChan_Cfg_Align = duet_dma_ctrl_block_init();
268 Chan_Ctl_Data_TypeDef ch_ctl_data;
269 Chan_Cfg_TypeDef ch_cfg;
270 SPI_TypeDef *SPIx = NULL;
271 if (ssp_idx == 0) {
272 dma_chan = 7; // SPI0 Rx channel
273 SPIx = SPI0;
274 } else if (ssp_idx == 1) {
275 dma_chan = 9; // SPI1 Rx channel
276 SPIx = SPI1;
277 } else if (ssp_idx == 2) {
278 dma_chan = 11; // SPI2 Rx channel
279 SPIx = SPI2;
280 } else {
281 return;
282 }
283
284 ch_ctl_data.cycle_ctl = DMA_OP_MODE_BASIC;
285 ch_ctl_data.n_minus_1 = len - 1;
286 ch_ctl_data.R_pow = 1;
287 ch_ctl_data.src_inc = DMA_DST_ADDR_INC_FIX;
288 ch_ctl_data.dst_inc = DMA_SRC_ADDR_INC_BYTE;
289 ch_ctl_data.src_size = DMA_SRC_DATA_WIDTH_BYTE;
290 ch_ctl_data.dst_size = DMA_DST_DATA_WIDTH_BYTE;
291
292 ch_cfg.chan_ctr = ch_ctl_data;
293 ch_cfg.chan_src_end_ptr = (uint32_t) & (SPIx->DR);
294 ch_cfg.chan_dst_end_ptr = (uint32_t)(data + len - 1);
295
296 (pChan_Cfg_Align + dma_chan)->chan_ctr = ch_cfg.chan_ctr;
297 (pChan_Cfg_Align + dma_chan)->chan_src_end_ptr = ch_cfg.chan_src_end_ptr;
298 (pChan_Cfg_Align + dma_chan)->chan_dst_end_ptr = ch_cfg.chan_dst_end_ptr;
299
300 NVIC_EnableIRQ(DMA_IRQn);
301 DMA->CFG |= 0x1; // dma enable
302 DMA_WAIT_ON_REQ |= (1 << dma_chan);
303 DMA_INT_MASK |= (1 << dma_chan); // dma interrupt unmask, write 1
304 DMA->CHAN_PRI_ALT_CLR |= (1 << dma_chan);
305 DMA->CTL_BASE_PTR = (uint32_t)pChan_Cfg_Align;
306
307 DMA->CHAN_USE_BURST_CLR |= (1 << dma_chan);
308 // DMA->CHAN_EN_CLR |= ~(1<<dma_chan); // disable other channels
309 DMA->CHAN_EN_SET |= (1 << dma_chan); // enable channel
310 }
311
duet_dma_callback_register(uint8_t chn_idx,duet_dma_callback_func func)312 void duet_dma_callback_register(uint8_t chn_idx, duet_dma_callback_func func)
313 {
314 g_duet_dma_callback_handler[chn_idx] = func;
315
316 }
317
DMA_IRQHandler(void)318 void DMA_IRQHandler(void)
319 {
320 uint8_t i;
321 uint32_t chan_used = DMA_INT_STAT; // get all enabled channels
322 for (i = 0; i < DMA_MAX_CHAN_NUM; i++) {
323 if (chan_used & (1 << i)) {
324 duet_dma_interrupt_clear(i);
325 if (g_duet_dma_callback_handler[i] != NULL) {
326 g_duet_dma_callback_handler[i](i);
327 }
328 }
329 }
330 }
331
332