1 /*
2 * Copyright (c) 2021 HPMicro
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 *
6 */
7
8 #include "hpm_spi.h"
9
hpm_spi_tx_trigger_dma(DMA_Type * dma_ptr,uint8_t ch_num,SPI_Type * spi_ptr,uint32_t src,uint8_t data_width,uint32_t size)10 static hpm_stat_t hpm_spi_tx_trigger_dma(DMA_Type *dma_ptr, uint8_t ch_num, SPI_Type *spi_ptr, uint32_t src, uint8_t data_width, uint32_t size)
11 {
12 dma_handshake_config_t config;
13
14 dma_default_handshake_config(dma_ptr, &config);
15 config.ch_index = ch_num;
16 config.dst = (uint32_t)&spi_ptr->DATA;
17 config.dst_fixed = true;
18 config.src = src;
19 config.src_fixed = false;
20 config.data_width = data_width;
21 config.size_in_byte = size;
22
23 return dma_setup_handshake(dma_ptr, &config, true);
24 }
25
hpm_spi_rx_trigger_dma(DMA_Type * dma_ptr,uint8_t ch_num,SPI_Type * spi_ptr,uint32_t dst,uint8_t data_width,uint32_t size)26 static hpm_stat_t hpm_spi_rx_trigger_dma(DMA_Type *dma_ptr, uint8_t ch_num, SPI_Type *spi_ptr, uint32_t dst, uint8_t data_width, uint32_t size)
27 {
28 dma_handshake_config_t config;
29
30 dma_default_handshake_config(dma_ptr, &config);
31 config.ch_index = ch_num;
32 config.dst = dst;
33 config.dst_fixed = false;
34 config.src = (uint32_t)&spi_ptr->DATA;
35 config.src_fixed = true;
36 config.data_width = data_width;
37 config.size_in_byte = size;
38
39 return dma_setup_handshake(dma_ptr, &config, true);
40 }
41
42
hpm_spi_prepare_dma_tx_descriptors(spi_context_t * context,spi_control_config_t * config,uint32_t trans_count,uint32_t * spi_transctrl,dma_linked_descriptor_t * tx_dma_descriptors)43 void hpm_spi_prepare_dma_tx_descriptors(spi_context_t *context, spi_control_config_t *config, uint32_t trans_count,
44 uint32_t *spi_transctrl, dma_linked_descriptor_t *tx_dma_descriptors)
45 {
46 SPI_Type *ptr = context->ptr;
47 uint32_t dma_transfer_size[trans_count];
48 uint32_t tx_count = context->tx_count;
49 uint32_t per_trans_size = context->per_trans_max;
50 uint32_t dma_ch = context->dma_context.tx_dma_ch;
51 uint8_t *tx_buff = context->tx_buff;
52 dma_channel_config_t dma_ch_config;
53
54 static uint8_t dummy_cmd = 0xff;
55
56 uint32_t temp32;
57 uint32_t tx_buff_index = 0;
58
59 dma_default_channel_config(context->dma_context.dma_ptr, &dma_ch_config);
60 for (uint32_t i = 0; i < trans_count; i++) {
61 if (tx_count > per_trans_size) {
62 temp32 = per_trans_size;
63 tx_count -= per_trans_size;
64 } else {
65 temp32 = tx_count;
66 }
67
68 *(spi_transctrl + i) = SPI_TRANSCTRL_TRANSMODE_SET(config->common_config.trans_mode == spi_trans_write_read_together ?
69 spi_trans_write_read_together : spi_trans_write_only)
70 | SPI_TRANSCTRL_DUALQUAD_SET(config->common_config.data_phase_fmt)
71 | SPI_TRANSCTRL_WRTRANCNT_SET(temp32 - 1)
72 | SPI_TRANSCTRL_RDTRANCNT_SET(temp32 - 1);
73
74 if (i == 0) {
75 /* Set the count of data transferred by dma to be one more than that of spi */
76 /* when dma transfer finished, there are data in SPI fifo, dma should not execute the dma descriptor which changes SPI CTRL register */
77 temp32 = temp32 + 1;
78 }
79 if (i == trans_count - 1) {
80 temp32 = temp32 - 1;
81 }
82 dma_transfer_size[i] = temp32;
83
84 /* SPI CTRL */
85 dma_ch_config.size_in_byte = 4;
86 dma_ch_config.src_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(spi_transctrl + i));
87 dma_ch_config.dst_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&ptr->TRANSCTRL);
88 dma_ch_config.src_width = DMA_TRANSFER_WIDTH_WORD;
89 dma_ch_config.dst_width = DMA_TRANSFER_WIDTH_WORD;
90 dma_ch_config.src_burst_size = DMA_NUM_TRANSFER_PER_BURST_1T;
91 dma_ch_config.src_mode = DMA_HANDSHAKE_MODE_NORMAL;
92 dma_ch_config.dst_mode = DMA_HANDSHAKE_MODE_NORMAL;
93 dma_ch_config.src_addr_ctrl = DMA_ADDRESS_CONTROL_FIXED;
94 dma_ch_config.dst_addr_ctrl = DMA_ADDRESS_CONTROL_FIXED;
95 dma_ch_config.linked_ptr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 1));
96 dma_config_linked_descriptor(context->dma_context.dma_ptr, tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS, dma_ch, &dma_ch_config);
97
98 /* SPI CMD */
99 dma_ch_config.size_in_byte = 1;
100 dma_ch_config.src_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&dummy_cmd);
101 dma_ch_config.dst_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&ptr->CMD);
102 dma_ch_config.src_width = DMA_TRANSFER_WIDTH_BYTE;
103 dma_ch_config.dst_width = DMA_TRANSFER_WIDTH_BYTE;
104 dma_ch_config.src_burst_size = DMA_NUM_TRANSFER_PER_BURST_1T;
105 dma_ch_config.src_mode = DMA_HANDSHAKE_MODE_NORMAL;
106 dma_ch_config.dst_mode = DMA_HANDSHAKE_MODE_NORMAL;
107 dma_ch_config.src_addr_ctrl = DMA_ADDRESS_CONTROL_FIXED;
108 dma_ch_config.dst_addr_ctrl = DMA_ADDRESS_CONTROL_FIXED;
109 dma_ch_config.linked_ptr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 2));
110 dma_config_linked_descriptor(context->dma_context.dma_ptr, tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 1, dma_ch, &dma_ch_config);
111
112 /* SPI DATA */
113 dma_ch_config.size_in_byte = dma_transfer_size[i] << context->dma_context.data_width;
114 dma_ch_config.src_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(tx_buff + tx_buff_index));
115 dma_ch_config.dst_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&ptr->DATA);
116 dma_ch_config.src_width = context->dma_context.data_width;
117 dma_ch_config.dst_width = context->dma_context.data_width;
118 dma_ch_config.src_burst_size = DMA_NUM_TRANSFER_PER_BURST_1T;
119 dma_ch_config.src_mode = DMA_HANDSHAKE_MODE_NORMAL;
120 dma_ch_config.dst_mode = DMA_HANDSHAKE_MODE_HANDSHAKE;
121 dma_ch_config.src_addr_ctrl = DMA_ADDRESS_CONTROL_INCREMENT;
122 dma_ch_config.dst_addr_ctrl = DMA_ADDRESS_CONTROL_FIXED;
123 if (i == trans_count - 1) {
124 dma_ch_config.linked_ptr = 0;
125 } else {
126 dma_ch_config.linked_ptr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(tx_dma_descriptors + (i + 1) * SPI_DMA_DESC_COUNT_PER_TRANS));
127 }
128 dma_config_linked_descriptor(context->dma_context.dma_ptr, tx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 2, dma_ch, &dma_ch_config);
129
130 tx_buff_index += temp32 * context->data_len_in_byte;
131 }
132 }
133
hpm_prepare_dma_rx_descriptors(spi_context_t * context,spi_control_config_t * config,uint32_t trans_count,uint32_t * spi_transctrl,dma_linked_descriptor_t * rx_dma_descriptors)134 void hpm_prepare_dma_rx_descriptors(spi_context_t *context, spi_control_config_t *config, uint32_t trans_count,
135 uint32_t *spi_transctrl, dma_linked_descriptor_t *rx_dma_descriptors)
136 {
137 SPI_Type *ptr = context->ptr;
138 uint32_t dma_transfer_size[trans_count];
139 uint32_t rx_count = context->rx_count;
140 uint32_t per_trans_size = context->per_trans_max;
141 uint32_t dma_ch = context->dma_context.rx_dma_ch;
142 uint8_t *rx_buff = context->rx_buff;
143 dma_channel_config_t dma_ch_config;
144
145 static uint8_t dummy_cmd = 0xff;
146
147 uint32_t temp32;
148 uint32_t rx_buff_index = 0;
149
150 dma_default_channel_config(context->dma_context.dma_ptr, &dma_ch_config);
151 for (uint32_t i = 0; i < trans_count; i++) {
152 if (rx_count > per_trans_size) {
153 temp32 = per_trans_size;
154 rx_count -= per_trans_size;
155 } else {
156 temp32 = rx_count;
157 }
158
159 *(spi_transctrl + i) = SPI_TRANSCTRL_TRANSMODE_SET(spi_trans_read_only) |
160 SPI_TRANSCTRL_DUALQUAD_SET(config->common_config.data_phase_fmt) |
161 SPI_TRANSCTRL_WRTRANCNT_SET(temp32 - 1) |
162 SPI_TRANSCTRL_RDTRANCNT_SET(temp32 - 1);
163 dma_transfer_size[i] = temp32;
164
165 /* SPI CTRL */
166 dma_ch_config.size_in_byte = 4;
167 dma_ch_config.src_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(spi_transctrl + i));
168 dma_ch_config.dst_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&ptr->TRANSCTRL);
169 dma_ch_config.src_width = DMA_TRANSFER_WIDTH_WORD;
170 dma_ch_config.dst_width = DMA_TRANSFER_WIDTH_WORD;
171 dma_ch_config.src_burst_size = DMA_NUM_TRANSFER_PER_BURST_1T;
172 dma_ch_config.src_mode = DMA_HANDSHAKE_MODE_NORMAL;
173 dma_ch_config.dst_mode = DMA_HANDSHAKE_MODE_NORMAL;
174 dma_ch_config.src_addr_ctrl = DMA_ADDRESS_CONTROL_FIXED;
175 dma_ch_config.dst_addr_ctrl = DMA_ADDRESS_CONTROL_FIXED;
176 dma_ch_config.linked_ptr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 1));
177 dma_config_linked_descriptor(context->dma_context.dma_ptr, rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS, dma_ch, &dma_ch_config);
178
179 /* SPI CMD */
180 dma_ch_config.size_in_byte = 1;
181 dma_ch_config.src_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&dummy_cmd);
182 dma_ch_config.dst_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&ptr->CMD);
183 dma_ch_config.src_width = DMA_TRANSFER_WIDTH_BYTE;
184 dma_ch_config.dst_width = DMA_TRANSFER_WIDTH_BYTE;
185 dma_ch_config.src_burst_size = DMA_NUM_TRANSFER_PER_BURST_1T;
186 dma_ch_config.src_mode = DMA_HANDSHAKE_MODE_NORMAL;
187 dma_ch_config.dst_mode = DMA_HANDSHAKE_MODE_NORMAL;
188 dma_ch_config.src_addr_ctrl = DMA_ADDRESS_CONTROL_FIXED;
189 dma_ch_config.dst_addr_ctrl = DMA_ADDRESS_CONTROL_FIXED;
190 dma_ch_config.linked_ptr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 2));
191 dma_config_linked_descriptor(context->dma_context.dma_ptr, rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 1, dma_ch, &dma_ch_config);
192
193 /* SPI DATA */
194 dma_ch_config.size_in_byte = dma_transfer_size[i] << context->dma_context.data_width;
195 dma_ch_config.src_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&ptr->DATA);
196 dma_ch_config.dst_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(rx_buff + rx_buff_index));
197 dma_ch_config.src_width = context->dma_context.data_width;
198 dma_ch_config.dst_width = context->dma_context.data_width;
199 dma_ch_config.src_burst_size = DMA_NUM_TRANSFER_PER_BURST_1T;
200 dma_ch_config.src_mode = DMA_HANDSHAKE_MODE_HANDSHAKE;
201 dma_ch_config.dst_mode = DMA_HANDSHAKE_MODE_NORMAL;
202 dma_ch_config.src_addr_ctrl = DMA_ADDRESS_CONTROL_FIXED;
203 dma_ch_config.dst_addr_ctrl = DMA_ADDRESS_CONTROL_INCREMENT;
204 if (i == trans_count - 1) {
205 dma_ch_config.linked_ptr = 0;
206 } else {
207 dma_ch_config.linked_ptr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(rx_dma_descriptors + (i + 1) * SPI_DMA_DESC_COUNT_PER_TRANS));
208 }
209 dma_config_linked_descriptor(context->dma_context.dma_ptr, rx_dma_descriptors + i * SPI_DMA_DESC_COUNT_PER_TRANS + 2, dma_ch, &dma_ch_config);
210
211 rx_buff_index += temp32 * context->data_len_in_byte;
212 }
213 }
214
hpm_spi_get_trans_count(spi_context_t * context,spi_control_config_t * config)215 static uint32_t hpm_spi_get_trans_count(spi_context_t *context, spi_control_config_t *config)
216 {
217 uint32_t total_trans_count, per_trans_count, trans_count;
218
219 per_trans_count = context->per_trans_max;
220 if (config->common_config.trans_mode == spi_trans_write_only || config->common_config.trans_mode == spi_trans_dummy_write) {
221 total_trans_count = context->tx_count;
222 } else if (config->common_config.trans_mode == spi_trans_read_only || config->common_config.trans_mode == spi_trans_dummy_read) {
223 total_trans_count = context->rx_count;
224 } else {
225 /* write read together */
226 assert(context->tx_count == context->rx_count);
227 total_trans_count = context->tx_count;
228 }
229 trans_count = (total_trans_count + per_trans_count - 1) / per_trans_count;
230
231 return trans_count;
232 }
233
234 /**
235 * spi with dma chain workflow
236 *
237 * 1. call spi_setup_dma_transfer to config SPI for first transmission
238 * 2. execute data transmission phase in dma chain descriptor
239 * 3. execute setting SPI CTRL register phase in dma chain descriptor
240 * 4. execute writing SPI CMD register phase in dma chain descriptor
241 * 5. Repeat steps 2-4 until finish the transmission
242 */
spi_setup_trans_with_dma_chain(spi_context_t * context,spi_control_config_t * config)243 static hpm_stat_t spi_setup_trans_with_dma_chain(spi_context_t *context, spi_control_config_t *config)
244 {
245 hpm_stat_t stat = status_success;
246 SPI_Type *spi_ptr = context->ptr;
247 DMA_Type *dma_ptr = context->dma_context.dma_ptr;
248 DMAMUX_Type *dmamux_ptr = context->dma_context.dmamux_ptr;
249 dma_linked_descriptor_t *dma_linked_descriptor = context->dma_linked_descriptor;
250 uint32_t *spi_transctrl = context->spi_transctrl;
251 uint32_t dma_channel = 0;
252 uint32_t trans_count;
253 dma_channel_config_t dma_ch_config = {0};
254
255 /* use a dummy dma transfer to start SPI trans dma chain */
256 static uint32_t dummy_data1 = 0xff, dummy_data2 = 0xff;
257
258 trans_count = hpm_spi_get_trans_count(context, config);
259
260 /* active spi cs pin */
261 context->write_cs(context->cs_pin, SPI_CS_ACTIVE);
262
263 /* config SPI for first dma transmission */
264 stat = spi_setup_dma_transfer(spi_ptr,
265 config,
266 &context->cmd,
267 &context->addr,
268 MIN(context->tx_count, context->per_trans_max),
269 MIN(context->rx_count, context->per_trans_max));
270 if (stat != status_success) {
271 return stat;
272 }
273
274 if (config->common_config.trans_mode == spi_trans_write_only || config->common_config.trans_mode == spi_trans_dummy_write) {
275 /* write only */
276 hpm_spi_prepare_dma_tx_descriptors(context, config, trans_count, spi_transctrl, dma_linked_descriptor);
277 dma_channel = context->dma_context.tx_dma_ch;
278 dmamux_config(dmamux_ptr, context->dma_context.tx_dmamux_ch, context->dma_context.tx_req, true);
279 } else if (config->common_config.trans_mode == spi_trans_read_only || config->common_config.trans_mode == spi_trans_dummy_read) {
280 /* read only */
281 hpm_prepare_dma_rx_descriptors(context, config, trans_count, spi_transctrl, dma_linked_descriptor);
282 dma_channel = context->dma_context.rx_dma_ch;
283 dmamux_config(dmamux_ptr, context->dma_context.rx_dmamux_ch, context->dma_context.rx_req, true);
284 } else if (config->common_config.trans_mode == spi_trans_write_read_together) {
285 /* write and read together */
286 hpm_spi_prepare_dma_tx_descriptors(context, config, trans_count, spi_transctrl, dma_linked_descriptor);
287 dma_channel = context->dma_context.tx_dma_ch;
288 dmamux_config(dmamux_ptr, context->dma_context.tx_dmamux_ch, context->dma_context.tx_req, true);
289 dmamux_config(dmamux_ptr, context->dma_context.rx_dmamux_ch, context->dma_context.rx_req, true);
290 /* spi tx use chained dma descriptor, spi rx use unchained dma */
291 stat = hpm_spi_rx_trigger_dma(dma_ptr,
292 context->dma_context.rx_dma_ch,
293 spi_ptr,
294 core_local_mem_to_sys_address(context->running_core, (uint32_t)context->rx_buff),
295 context->dma_context.data_width,
296 context->rx_size);
297 if (stat != status_success) {
298 return stat;
299 }
300 } else {
301 return status_invalid_argument;
302 }
303
304 dma_default_channel_config(context->dma_context.dma_ptr, &dma_ch_config);
305 dma_ch_config.src_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&dummy_data1);
306 dma_ch_config.dst_addr = core_local_mem_to_sys_address(context->running_core, (uint32_t)&dummy_data2);
307 dma_ch_config.src_burst_size = DMA_NUM_TRANSFER_PER_BURST_1T;
308 dma_ch_config.src_width = DMA_TRANSFER_WIDTH_WORD;
309 dma_ch_config.dst_width = DMA_TRANSFER_WIDTH_WORD;
310 dma_ch_config.size_in_byte = 4;
311 /* start data transmission phase in dma chain */
312 dma_ch_config.linked_ptr = core_local_mem_to_sys_address(context->running_core, (uint32_t)(dma_linked_descriptor + SPI_DMA_DESC_COUNT_PER_TRANS - 1));
313
314 stat = dma_setup_channel(dma_ptr, dma_channel, &dma_ch_config, true);
315 if (stat != status_success) {
316 return stat;
317 }
318
319 return stat;
320 }
321
spi_setup_trans_with_dma(spi_context_t * context,spi_control_config_t * config)322 static hpm_stat_t spi_setup_trans_with_dma(spi_context_t *context, spi_control_config_t *config)
323 {
324 hpm_stat_t stat = status_success;
325 SPI_Type *spi_ptr = context->ptr;
326 DMA_Type *dma_ptr = context->dma_context.dma_ptr;
327 DMAMUX_Type *dmamux_ptr = context->dma_context.dmamux_ptr;
328 uint32_t trans_mode = config->common_config.trans_mode;
329
330 if (context->write_cs != NULL) {
331 context->write_cs(context->cs_pin, SPI_CS_ACTIVE);
332 }
333 stat = spi_setup_dma_transfer(spi_ptr, config,
334 &context->cmd, &context->addr,
335 context->tx_count, context->rx_count);
336 if (stat != status_success) {
337 return stat;
338 }
339
340 if (trans_mode != spi_trans_write_only && trans_mode != spi_trans_dummy_write && trans_mode != spi_trans_no_data) {
341 dmamux_config(dmamux_ptr, context->dma_context.rx_dmamux_ch, context->dma_context.rx_req, true);
342 stat = hpm_spi_rx_trigger_dma(dma_ptr,
343 context->dma_context.rx_dma_ch,
344 spi_ptr,
345 core_local_mem_to_sys_address(context->running_core, (uint32_t)context->rx_buff),
346 context->dma_context.data_width,
347 context->rx_size);
348 if (stat != status_success) {
349 return stat;
350 }
351 }
352 if (trans_mode != spi_trans_read_only && trans_mode != spi_trans_dummy_read && trans_mode != spi_trans_no_data) {
353 dmamux_config(dmamux_ptr, context->dma_context.tx_dmamux_ch, context->dma_context.tx_req, true);
354 stat = hpm_spi_tx_trigger_dma(dma_ptr,
355 context->dma_context.tx_dma_ch,
356 spi_ptr,
357 core_local_mem_to_sys_address(context->running_core, (uint32_t)context->tx_buff),
358 context->dma_context.data_width,
359 context->tx_size);
360 if (stat != status_success) {
361 return stat;
362 }
363 }
364
365 return stat;
366 }
367
hpm_spi_setup_dma_transfer(spi_context_t * context,spi_control_config_t * config)368 hpm_stat_t hpm_spi_setup_dma_transfer(spi_context_t *context, spi_control_config_t *config)
369 {
370 assert(context != NULL || config != NULL);
371 /* use dma */
372 assert(&context->dma_context != NULL);
373 /* spi per trans data size not zero */
374 assert(context->per_trans_max);
375
376 hpm_stat_t stat = status_success;
377
378 if (l1c_dc_is_enabled()) {
379 /* cache writeback for tx buff */
380 if (context->tx_buff != NULL && context->tx_size != 0) {
381 uint32_t aligned_start = HPM_L1C_CACHELINE_ALIGN_DOWN((uint32_t)context->tx_buff);
382 uint32_t aligned_end = HPM_L1C_CACHELINE_ALIGN_UP((uint32_t)context->tx_buff + context->tx_size);
383 uint32_t aligned_size = aligned_end - aligned_start;
384 l1c_dc_writeback(aligned_start, aligned_size);
385 }
386 /* cache invalidate for receive buff */
387 if (context->rx_buff != NULL && context->rx_size != 0) {
388 uint32_t aligned_start = HPM_L1C_CACHELINE_ALIGN_DOWN((uint32_t)context->rx_buff);
389 uint32_t aligned_end = HPM_L1C_CACHELINE_ALIGN_UP((uint32_t)context->rx_buff + context->rx_size);
390 uint32_t aligned_size = aligned_end - aligned_start;
391 l1c_dc_invalidate(aligned_start, aligned_size);
392 }
393 }
394
395 if ((context->rx_count > context->per_trans_max) || (context->tx_count > context->per_trans_max)) {
396 /* multiple SPI transmissions with chained DMA */
397 assert(config->common_config.trans_mode == spi_trans_read_only || config->common_config.trans_mode == spi_trans_dummy_read
398 || config->common_config.trans_mode == spi_trans_write_only || config->common_config.trans_mode == spi_trans_dummy_write
399 || config->common_config.trans_mode == spi_trans_write_read_together);
400 /* master mode */
401 assert((context->ptr->TRANSFMT & SPI_TRANSFMT_SLVMODE_MASK) != SPI_TRANSFMT_SLVMODE_MASK);
402 /* GPIO should be used to replace SPI CS pin for SPI chained DMA transmissions */
403 assert(context->write_cs != NULL);
404
405 stat = spi_setup_trans_with_dma_chain(context, config);
406 } else {
407 /* one SPI transmissions with chained DMA */
408 stat = spi_setup_trans_with_dma(context, config);
409 }
410
411 return stat;
412 }
413
414 /* Using GPIO as SPI CS pin */
415 /* When SPI trans completed, GPIO cs pin should be released manually */
hpm_spi_release_gpio_cs(spi_context_t * context)416 hpm_stat_t hpm_spi_release_gpio_cs(spi_context_t *context)
417 {
418 hpm_stat_t stat;
419 SPI_Type *ptr = context->ptr;
420 assert(context->write_cs != NULL);
421
422 stat = spi_wait_for_idle_status(ptr);
423 if (stat != status_success) {
424 return stat;
425 }
426
427 context->write_cs(context->cs_pin, !SPI_CS_ACTIVE);
428 return status_success;
429 }
430
431