1 /**
2 * Copyright (c) 2020 HiSilicon (Shanghai) Technologies CO., LIMITED.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 *
15 * Description: Provides spi driver source \n
16 *
17 * History: \n
18 * 2022-09-16, Create file. \n
19 */
20 #include "soc_osal.h"
21 #include "common_def.h"
22 #include "spi_porting.h"
23 #include "spi.h"
24 #if defined(CONFIG_SPI_SUPPORT_MASTER) || defined(CONFIG_SPI_SUPPORT_SLAVE)
25 #if defined(CONFIG_SPI_SUPPORT_DMA) && (CONFIG_SPI_SUPPORT_DMA == 1)
26 #include "dma_porting.h"
27 #include "dma.h"
28 #include "hal_dma.h"
29 #include "securec.h"
30
31 #define DMA_SPI_TRANSFER_WIDTH_WORD 4
32 #define DMA_SPI_TRANSFER_WIDTH_HALF_WORD 2
33
34 typedef struct spi_dma_trans_inf {
35 bool trans_succ;
36 #if !defined(CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH)
37 bool is_enable;
38 #endif /* NOT CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH */
39 uint8_t channel;
40 uint8_t reserved;
41 osal_semaphore dma_sem;
42 } spi_dma_trans_inf_t;
43
44 static spi_dma_trans_inf_t g_dma_trans_tx[SPI_BUS_MAX_NUM] = { 0 };
45 static spi_dma_trans_inf_t g_dma_trans_rx[SPI_BUS_MAX_NUM] = { 0 };
46 #if !defined(CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH)
47 static spi_dma_config_t g_dma_cfg[SPI_BUS_MAX_NUM] = { 0 };
48 #endif /* NOT CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH */
49 #endif /* CONFIG_SPI_SUPPORT_DMA */
50
51 #if defined(CONFIG_SPI_SUPPORT_INTERRUPT) && (CONFIG_SPI_SUPPORT_INTERRUPT == 1)
52 #define SPI_MAX_NUMBER_OF_FRAGMENTS 4
53
54 /**
55 * @brief A fragment of data that is to be transmitted.
56 */
57 typedef struct spi_tx_fragment {
58 uint8_t *data;
59 void *params;
60 uint32_t data_length;
61 } spi_tx_fragment_t;
62
63 /**
64 * @brief The SPI Transmission configuration parameters.
65 */
66 typedef struct spi_tx_state {
67 spi_tx_fragment_t *current_tx_fragment; /*!< Current TX fragment being transmitted. */
68 spi_tx_fragment_t *free_tx_fragment; /*!< The unused TX fragment admin blocks available
69 for re-use/freeing. */
70 uint16_t fragments_to_process; /*!< Number of fragments to process including the current one. */
71 uint32_t current_tx_fragment_pos; /*!< Index of the current position of the next byte to be
72 transmitted in the current TX fragment
73 current_tx_fragment_pos == 0 means
74 the first byte is yet to be sent for transmission */
75 spi_tx_fragment_t fragment_buffer[SPI_MAX_NUMBER_OF_FRAGMENTS]; /*!< Fragments buffer pointer. */
76 spi_tx_callback_t tx_callback;
77 } spi_tx_state_t;
78
79 /**
80 * @brief The SPI Reception configuration parameters.
81 */
82 typedef struct spi_rx_state {
83 uint32_t rx_buffer_size; /*!< The size of the receive buffer. */
84 spi_rx_callback_t rx_callback; /*!< The RX callback to make when the condition is met. */
85 uint32_t new_rx_pos; /*!< Index to the position in the RX buffer that is where new data
86 should be put if (new_rx_pos == 0) the buffer is empty. */
87 uint8_t *rx_buffer; /*!< The RX data buffer. */
88 } spi_rx_state_t;
89
90 static bool g_spi_tx_int_flag[SPI_BUS_MAX_NUM];
91 static bool g_spi_rx_int_flag[SPI_BUS_MAX_NUM];
92 /**
93 * @brief Internal SPI TX configuration.
94 */
95 static spi_tx_state_t g_spi_tx_state[SPI_BUS_MAX_NUM];
96 /**
97 * @brief Internal SPI RX configuration.
98 */
99 static spi_rx_state_t g_spi_rx_state[SPI_BUS_MAX_NUM];
100
101 #endif /* CONFIG_SPI_SUPPORT_INTERRUPT */
102 #endif /* CONFIG_SPI_SUPPORT_MASTER || CONFIG_SPI_SUPPORT_SLAVE */
103
104 #define SPI_TX_FIFO_BUSY_TIMEOUT 3200000
105 #define hal_spi_frame_size_trans_to_frame_bytes(x) (((x) + 1) >> 0x03)
106
107 static bool g_spi_is_initialised[SPI_BUS_MAX_NUM] = { false };
108 #if defined(CONFIG_SPI_SUPPORT_CONCURRENCY) && (CONFIG_SPI_SUPPORT_CONCURRENCY == 1)
109 static osal_mutex g_spi_mutex[SPI_BUS_MAX_NUM] = { NULL };
110 #endif /* CONFIG_SPI_SUPPORT_CONCURRENCY */
111
112 #if defined(CONFIG_SPI_SUPPORT_CONCURRENCY) && (CONFIG_SPI_SUPPORT_CONCURRENCY == 1)
spi_mutex_lock(spi_bus_t bus)113 static bool spi_mutex_lock(spi_bus_t bus)
114 {
115 if (osal_mutex_lock_timeout(&g_spi_mutex[bus], OSAL_MUTEX_WAIT_FOREVER) != OSAL_SUCCESS) {
116 return false;
117 }
118 return true;
119 }
120 #endif /* CONFIG_SPI_SUPPORT_CONCURRENCY */
121
spi_mutex_unlock(spi_bus_t bus)122 static void spi_mutex_unlock(spi_bus_t bus)
123 {
124 #if defined(CONFIG_SPI_SUPPORT_CONCURRENCY) && (CONFIG_SPI_SUPPORT_CONCURRENCY == 1)
125 osal_mutex_unlock(&g_spi_mutex[bus]);
126 #endif /* CONFIG_SPI_SUPPORT_CONCURRENCY */
127 unused(bus);
128 }
129
130 #if defined(CONFIG_SPI_SUPPORT_MASTER) || defined(CONFIG_SPI_SUPPORT_SLAVE)
spi_get_attr_tmod(spi_bus_t bus)131 static uint32_t spi_get_attr_tmod(spi_bus_t bus)
132 {
133 spi_attr_t attr; /* uapi_spi_get_attr will copy attr from hal, no need init to 0. */
134
135 if (uapi_spi_get_attr(bus, &attr) != ERRCODE_SUCC) {
136 return 0;
137 }
138 return attr.tmod;
139 }
140
spi_param_check(spi_bus_t bus,spi_mode_t spi_mode,hal_spi_trans_mode_t trans_mode)141 static errcode_t spi_param_check(spi_bus_t bus, spi_mode_t spi_mode, hal_spi_trans_mode_t trans_mode)
142 {
143 if (spi_porting_get_device_mode(bus) != spi_mode) {
144 return ERRCODE_SPI_MODE_MISMATCH;
145 }
146
147 if (spi_get_attr_tmod(bus) == trans_mode) {
148 return ERRCODE_SPI_INVALID_TMODE;
149 }
150
151 return ERRCODE_SUCC;
152 }
153
spi_fifo_check(spi_bus_t bus)154 static errcode_t spi_fifo_check(spi_bus_t bus)
155 {
156 #if defined(CONFIG_SPI_SUPPORT_INTERRUPT) && (CONFIG_SPI_SUPPORT_INTERRUPT == 1)
157 if (!g_spi_tx_int_flag[bus]) {
158 if (hal_spi_ctrl(bus, SPI_CTRL_CHECK_FIFO_BUSY, (uintptr_t)SPI_TX_FIFO_BUSY_TIMEOUT) != ERRCODE_SUCC) {
159 return ERRCODE_SPI_TIMEOUT;
160 }
161 }
162 #else
163 if (hal_spi_ctrl(bus, SPI_CTRL_CHECK_FIFO_BUSY, (uintptr_t)SPI_TX_FIFO_BUSY_TIMEOUT) != ERRCODE_SUCC) {
164 return ERRCODE_SPI_TIMEOUT;
165 }
166 #endif /* CONFIG_SPI_SUPPORT_INTERRUPT */
167 return ERRCODE_SUCC;
168 }
169
spi_param_check_read(spi_bus_t bus,spi_mode_t spi_mode,hal_spi_trans_mode_t trans_mode,const spi_xfer_data_t * data,bool check_fifo)170 static errcode_t spi_param_check_read(spi_bus_t bus, spi_mode_t spi_mode, hal_spi_trans_mode_t trans_mode,
171 const spi_xfer_data_t *data, bool check_fifo)
172 {
173 if (bus >= SPI_BUS_MAX_NUM) {
174 return ERRCODE_INVALID_PARAM;
175 }
176
177 if (data == NULL || data->rx_buff == NULL || data->rx_bytes == 0) {
178 return ERRCODE_INVALID_PARAM;
179 }
180
181 errcode_t ret = spi_param_check(bus, spi_mode, trans_mode);
182 if (ret != ERRCODE_SUCC) {
183 return ret;
184 }
185 if (check_fifo) {
186 if (spi_fifo_check(bus) != ERRCODE_SUCC) {
187 return ERRCODE_SPI_TIMEOUT;
188 }
189 }
190 #if defined(CONFIG_SPI_SUPPORT_CONCURRENCY) && (CONFIG_SPI_SUPPORT_CONCURRENCY == 1)
191 if (!spi_mutex_lock(bus)) {
192 return ERRCODE_SPI_TIMEOUT;
193 }
194 #endif /* CONFIG_SPI_SUPPORT_CONCURRENCY */
195
196 return ERRCODE_SUCC;
197 }
198
spi_param_check_write(spi_bus_t bus,spi_mode_t spi_mode,hal_spi_trans_mode_t trans_mode,const spi_xfer_data_t * data,bool check_fifo)199 static errcode_t spi_param_check_write(spi_bus_t bus, spi_mode_t spi_mode, hal_spi_trans_mode_t trans_mode,
200 const spi_xfer_data_t *data, bool check_fifo)
201 {
202 /* data->tx_buff maybe NULL and data->tx_bytes maybe 0, as flash will write cmd only in master mode. */
203 if (bus >= SPI_BUS_MAX_NUM || data == NULL) {
204 return ERRCODE_INVALID_PARAM;
205 }
206
207 errcode_t ret = spi_param_check(bus, spi_mode, trans_mode);
208 if (ret != ERRCODE_SUCC) {
209 return ret;
210 }
211
212 if (check_fifo) {
213 if (spi_fifo_check(bus) != ERRCODE_SUCC) {
214 return ERRCODE_SPI_TIMEOUT;
215 }
216 }
217 #if defined(CONFIG_SPI_SUPPORT_CONCURRENCY) && (CONFIG_SPI_SUPPORT_CONCURRENCY == 1)
218 if (!spi_mutex_lock(bus)) {
219 return ERRCODE_SPI_TIMEOUT;
220 }
221 #endif /* CONFIG_SPI_SUPPORT_CONCURRENCY */
222 return ERRCODE_SUCC;
223 }
224
225 #endif /* CONFIG_SPI_SUPPORT_MASTER || CONFIG_SPI_SUPPORT_SLAVE */
226
227 #if defined(CONFIG_SPI_SUPPORT_INTERRUPT) && (CONFIG_SPI_SUPPORT_INTERRUPT == 1)
spi_helper_add_fragment(spi_bus_t bus,const void * buffer,uint32_t length)228 static bool spi_helper_add_fragment(spi_bus_t bus, const void *buffer, uint32_t length)
229 {
230 spi_tx_fragment_t *fragment = NULL;
231
232 if (g_spi_tx_state[bus].fragments_to_process >= SPI_MAX_NUMBER_OF_FRAGMENTS) {
233 return false;
234 } else {
235 /* If we have fragments left add it.
236 Put it on the queue and populate the fragment. */
237 fragment = g_spi_tx_state[bus].free_tx_fragment;
238 fragment->data = (uint8_t *)buffer;
239 fragment->data_length = length;
240
241 /* Update the counters */
242 g_spi_tx_state[bus].free_tx_fragment++;
243 if (g_spi_tx_state[bus].free_tx_fragment >=
244 g_spi_tx_state[bus].fragment_buffer + SPI_MAX_NUMBER_OF_FRAGMENTS) {
245 g_spi_tx_state[bus].free_tx_fragment = g_spi_tx_state[bus].fragment_buffer; /* wrapping */
246 }
247 g_spi_tx_state[bus].fragments_to_process++;
248 }
249 return true;
250 }
251
spi_helper_is_the_current_fragment_the_last_to_process(spi_bus_t bus)252 static inline bool spi_helper_is_the_current_fragment_the_last_to_process(spi_bus_t bus)
253 {
254 return (g_spi_tx_state[bus].fragments_to_process == 1);
255 }
256
spi_helper_are_there_fragments_to_process(spi_bus_t bus)257 static inline bool spi_helper_are_there_fragments_to_process(spi_bus_t bus)
258 {
259 return (g_spi_tx_state[bus].fragments_to_process > 0);
260 }
261
spi_helper_send_next_char(spi_bus_t bus,uint32_t frame_bytes)262 static bool spi_helper_send_next_char(spi_bus_t bus, uint32_t frame_bytes)
263 {
264 spi_tx_fragment_t *current_fragment;
265 uint32_t current_fragment_pos;
266 spi_xfer_data_t data;
267
268 current_fragment = g_spi_tx_state[bus].current_tx_fragment;
269 current_fragment_pos = g_spi_tx_state[bus].current_tx_fragment_pos;
270
271 data.tx_buff = ¤t_fragment->data[current_fragment_pos];
272 data.tx_bytes = frame_bytes;
273 hal_spi_write(bus, &data, CONFIG_SPI_MAX_TIMEOUT);
274 /* update the counters */
275 g_spi_tx_state[bus].current_tx_fragment_pos += frame_bytes;
276
277 return (g_spi_tx_state[bus].current_tx_fragment_pos >= current_fragment->data_length);
278 }
279
spi_helper_invoke_current_fragment_callback(spi_bus_t bus)280 static void spi_helper_invoke_current_fragment_callback(spi_bus_t bus)
281 {
282 spi_tx_fragment_t *current_fragment = g_spi_tx_state[bus].current_tx_fragment;
283 /* Call any TX data release call-back */
284 if (g_spi_tx_state[bus].tx_callback != NULL) {
285 g_spi_tx_state[bus].tx_callback(current_fragment->data, current_fragment->data_length);
286 }
287 }
288
spi_helper_move_to_next_fragment(spi_bus_t bus)289 static void spi_helper_move_to_next_fragment(spi_bus_t bus)
290 {
291 /* Move onto the next fragment and re-set the position to zero */
292 g_spi_tx_state[bus].current_tx_fragment++;
293 if (g_spi_tx_state[bus].current_tx_fragment >=
294 g_spi_tx_state[bus].fragment_buffer + SPI_MAX_NUMBER_OF_FRAGMENTS) {
295 g_spi_tx_state[bus].current_tx_fragment = g_spi_tx_state[bus].fragment_buffer; /* wrapping */
296 }
297 g_spi_tx_state[bus].current_tx_fragment_pos = 0; /* reset the current fragment */
298 g_spi_tx_state[bus].fragments_to_process--; /* one fragment less to process */
299 }
300
spi_tx_isr(spi_bus_t bus)301 static void spi_tx_isr(spi_bus_t bus)
302 {
303 bool tx_fifo_full = false;
304 hal_spi_attr_t attr; /* will copy attr from hal, no need init to 0. */
305
306 hal_spi_ctrl(bus, SPI_CTRL_GET_ATTR, (uintptr_t)&attr);
307 uint32_t frame_bytes = hal_spi_frame_size_trans_to_frame_bytes(attr.frame_size);
308
309 /* if there are fragments to process do it */
310 if (!spi_helper_are_there_fragments_to_process(bus)) {
311 /* No data to transmit so disable the TX interrupt */
312 hal_spi_ctrl(bus, SPI_CTRL_EN_TXEI_INT, 0);
313 return;
314 }
315
316 if (g_spi_tx_state[bus].current_tx_fragment_pos == 0) {
317 if (hal_spi_ctrl(bus, SPI_CTRL_CHECK_FIFO_BUSY, (uintptr_t)SPI_TX_FIFO_BUSY_TIMEOUT) != ERRCODE_SUCC) {
318 return;
319 }
320 }
321
322 hal_spi_ctrl(bus, SPI_CTRL_CHECK_TX_FIFO_FULL, (uintptr_t)&tx_fifo_full);
323 /* Populate the SPI TX FIFO if there is data to send */
324 while (!tx_fifo_full) {
325 /* There is some data to transmit so provide another byte to the SPI */
326 bool end_of_fragment = spi_helper_send_next_char(bus, frame_bytes);
327 if (end_of_fragment) {
328 /* If it is the end of the fragment invoke the callback and move to the next one */
329 spi_helper_invoke_current_fragment_callback(bus);
330 spi_helper_move_to_next_fragment(bus);
331 /* If it was the last fragment disable the TX interrupts and leave */
332 if (spi_helper_are_there_fragments_to_process(bus) == false) {
333 /* No data to transmit so disable the TX interrupt */
334 hal_spi_ctrl(bus, SPI_CTRL_EN_TXEI_INT, 0);
335 break;
336 }
337 }
338
339 hal_spi_ctrl(bus, SPI_CTRL_CHECK_TX_FIFO_FULL, (uintptr_t)&tx_fifo_full);
340 }
341 }
342
spi_rx_buffer_release(spi_bus_t bus)343 static inline void spi_rx_buffer_release(spi_bus_t bus)
344 {
345 g_spi_rx_state[bus].new_rx_pos = 0;
346 }
347
spi_rx_buffer_has_free_space(spi_bus_t bus)348 static inline bool spi_rx_buffer_has_free_space(spi_bus_t bus)
349 {
350 return (g_spi_rx_state[bus].new_rx_pos < g_spi_rx_state[bus].rx_buffer_size);
351 }
352
spi_rx_finish_clear_ndf(spi_bus_t bus)353 static void spi_rx_finish_clear_ndf(spi_bus_t bus)
354 {
355 spi_attr_t attr; /* uapi_spi_get_attr will copy attr from hal, no need init to 0. */
356 if (uapi_spi_get_attr(bus, &attr) != ERRCODE_SUCC) {
357 return;
358 }
359 attr.ndf = 1;
360 if (uapi_spi_set_attr(bus, &attr) != ERRCODE_SUCC) {
361 return;
362 }
363 }
364
spi_rx_isr(spi_bus_t bus)365 static void spi_rx_isr(spi_bus_t bus)
366 {
367 if (g_spi_rx_state[bus].rx_callback == NULL) {
368 return;
369 }
370 spi_xfer_data_t data;
371
372 data.rx_bytes = g_spi_rx_state[bus].rx_buffer_size - g_spi_rx_state[bus].new_rx_pos;
373 data.rx_buff = g_spi_rx_state[bus].rx_buffer + g_spi_rx_state[bus].new_rx_pos;
374
375 if (hal_spi_read(bus, &data, 0) != ERRCODE_SUCC) {
376 return;
377 }
378 g_spi_rx_state[bus].new_rx_pos = g_spi_rx_state[bus].rx_buffer_size - data.rx_bytes;
379
380 if (!spi_rx_buffer_has_free_space(bus)) {
381 hal_spi_ctrl(bus, SPI_CTRL_EN_RXFI_INT, 0);
382 g_spi_rx_state[bus].rx_callback(g_spi_rx_state[bus].rx_buffer, g_spi_rx_state[bus].new_rx_pos, false);
383 spi_rx_buffer_release(bus);
384 spi_rx_finish_clear_ndf(bus);
385 }
386 }
387
spi_evt_callback(spi_bus_t bus,hal_spi_evt_id_t evt,uintptr_t param)388 static errcode_t spi_evt_callback(spi_bus_t bus, hal_spi_evt_id_t evt, uintptr_t param)
389 {
390 unused(param);
391 unused(bus);
392
393 switch (evt) {
394 case SPI_EVT_TX_EMPTY_ISR:
395 spi_tx_isr(bus);
396 break;
397 case SPI_EVT_TX_OVERFLOW_ISR:
398 break;
399 case SPI_EVT_RX_FULL_ISR:
400 spi_rx_isr(bus);
401 break;
402 case SPI_EVT_RX_OVERFLOW_ISR:
403 break;
404 case SPI_EVT_RX_UNDERFLOW_ISR:
405 break;
406 case SPI_EVT_MULTI_MASTER_ISR:
407 break;
408 }
409 return ERRCODE_SUCC;
410 }
411
spi_config_tx_state(spi_bus_t bus)412 static void spi_config_tx_state(spi_bus_t bus)
413 {
414 /* Configure TX state structure */
415 g_spi_tx_state[bus].current_tx_fragment = g_spi_tx_state[bus].fragment_buffer; /* the queue is empty */
416 g_spi_tx_state[bus].free_tx_fragment = g_spi_tx_state[bus].fragment_buffer; /* the queue is empty */
417 }
418 #endif /* CONFIG_SPI_SUPPORT_INTERRUPT */
419
spi_mutex_init(spi_bus_t bus)420 static void spi_mutex_init(spi_bus_t bus)
421 {
422 #if defined(CONFIG_SPI_SUPPORT_CONCURRENCY) && (CONFIG_SPI_SUPPORT_CONCURRENCY == 1)
423 osal_mutex_init(&g_spi_mutex[bus]);
424 #endif /* CONFIG_SPI_SUPPORT_CONCURRENCY */
425 unused(bus);
426 }
427
spi_mutex_deinit(spi_bus_t bus)428 static void spi_mutex_deinit(spi_bus_t bus)
429 {
430 #if defined(CONFIG_SPI_SUPPORT_CONCURRENCY) && (CONFIG_SPI_SUPPORT_CONCURRENCY == 1)
431 osal_mutex_destroy(&g_spi_mutex[bus]);
432 #endif /* CONFIG_SPI_SUPPORT_CONCURRENCY */
433 unused(bus);
434 }
435
spi_int_mode_init(spi_bus_t bus)436 static void spi_int_mode_init(spi_bus_t bus)
437 {
438 #if defined(CONFIG_SPI_SUPPORT_INTERRUPT) && (CONFIG_SPI_SUPPORT_INTERRUPT == 1)
439 spi_port_register_irq(bus);
440 spi_config_tx_state(bus);
441 #else
442 unused(bus);
443 #endif /* CONFIG_SPI_SUPPORT_INTERRUPT */
444 }
445
uapi_spi_init(spi_bus_t bus,spi_attr_t * attr,spi_extra_attr_t * extra_attr)446 errcode_t uapi_spi_init(spi_bus_t bus, spi_attr_t *attr, spi_extra_attr_t *extra_attr)
447 {
448 errcode_t ret = ERRCODE_SUCC;
449
450 if (bus >= SPI_BUS_MAX_NUM || attr == NULL) {
451 return ERRCODE_INVALID_PARAM;
452 }
453
454 if (g_spi_is_initialised[bus]) {
455 return ret;
456 }
457
458 #if defined(CONFIG_SPI_SUPPORT_LPC)
459 spi_port_clock_enable(bus, true);
460 #endif
461
462 #if defined(CONFIG_SPI_SUPPORT_DMA) && (CONFIG_SPI_SUPPORT_DMA == 1)
463 (void)osal_sem_init(&(g_dma_trans_tx[bus].dma_sem), 0);
464 (void)osal_sem_init(&(g_dma_trans_rx[bus].dma_sem), 0);
465 #endif /* CONFIG_SPI_SUPPORT_DMA */
466
467 spi_mutex_init(bus);
468
469 spi_int_mode_init(bus);
470
471 #if defined(CONFIG_SPI_SUPPORT_INTERRUPT) && (CONFIG_SPI_SUPPORT_INTERRUPT == 1)
472 ret = hal_spi_init(bus, (hal_spi_attr_t *)attr, (hal_spi_extra_attr_t *)extra_attr, spi_evt_callback);
473 #else /* CONFIG_SPI_SUPPORT_INTERRUPT */
474 ret = hal_spi_init(bus, (hal_spi_attr_t *)attr, (hal_spi_extra_attr_t *)extra_attr, NULL);
475 #endif /* CONFIG_SPI_SUPPORT_INTERRUPT */
476 if (ret == ERRCODE_SUCC) {
477 g_spi_is_initialised[bus] = true;
478 }
479
480 return ret;
481 }
482
uapi_spi_deinit(spi_bus_t bus)483 errcode_t uapi_spi_deinit(spi_bus_t bus)
484 {
485 if (bus >= SPI_BUS_MAX_NUM) {
486 return ERRCODE_INVALID_PARAM;
487 }
488
489 if (!g_spi_is_initialised[bus]) {
490 return ERRCODE_SUCC;
491 }
492
493 errcode_t ret = hal_spi_deinit(bus);
494
495 #if defined(CONFIG_SPI_SUPPORT_DMA) && (CONFIG_SPI_SUPPORT_DMA == 1)
496 osal_sem_destroy(&(g_dma_trans_tx[bus].dma_sem));
497 osal_sem_destroy(&(g_dma_trans_rx[bus].dma_sem));
498 #endif /* CONFIG_SPI_SUPPORT_DMA */
499
500 #if defined(CONFIG_SPI_SUPPORT_INTERRUPT) && (CONFIG_SPI_SUPPORT_INTERRUPT == 1)
501 spi_port_unregister_irq(bus);
502 #endif /* CONFIG_SPI_SUPPORT_INTERRUPT */
503
504 #if defined(CONFIG_SPI_SUPPORT_LPC)
505 spi_port_clock_enable(bus, false);
506 #endif
507
508 spi_mutex_deinit(bus);
509
510 g_spi_is_initialised[bus] = false;
511
512 return ret;
513 }
514
515 #if defined(CONFIG_SPI_SUPPORT_DMA) && (CONFIG_SPI_SUPPORT_DMA == 1)
uapi_spi_set_dma_mode(spi_bus_t bus,bool en,const spi_dma_config_t * dma_cfg)516 errcode_t uapi_spi_set_dma_mode(spi_bus_t bus, bool en, const spi_dma_config_t *dma_cfg)
517 {
518 if (bus >= SPI_BUS_MAX_NUM) {
519 return ERRCODE_INVALID_PARAM;
520 }
521 hal_spi_dma_cfg_param_t data;
522 #if !defined(CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH)
523 #if defined(CONFIG_SPI_SUPPORT_INTERRUPT) && (CONFIG_SPI_SUPPORT_INTERRUPT == 1)
524 if (g_spi_rx_int_flag[bus] || g_spi_tx_int_flag[bus]) {
525 return ERRCODE_SPI_DMA_IRQ_MODE_MUTEX;
526 }
527 #endif /* CONFIG_SPI_SUPPORT_INTERRUPT */
528 #endif /* NOT CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH */
529
530 uint32_t irq_sts = spi_porting_lock(bus);
531 if (en) {
532 data.dma_tx_level = spi_port_tx_data_level_get(bus);
533 data.dma_rx_level = spi_port_rx_data_level_get(bus);
534 #if !defined(CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH)
535 g_dma_cfg[bus].src_width = dma_cfg->src_width;
536 g_dma_cfg[bus].dest_width = dma_cfg->dest_width;
537 g_dma_cfg[bus].burst_length = dma_cfg->burst_length;
538 g_dma_cfg[bus].priority = dma_cfg->priority;
539 #else
540 unused(dma_cfg);
541 #endif /* NOT CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH */
542 } else {
543 data.dma_tx_level = 0;
544 data.dma_rx_level = 0;
545 }
546
547 data.is_enable = en;
548 hal_spi_ctrl(bus, SPI_CTRL_SET_DMA_CFG, (uintptr_t)&data);
549 #if !defined(CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH)
550 g_dma_trans_tx[bus].is_enable = en;
551 g_dma_trans_rx[bus].is_enable = en;
552 #endif /* NOT CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH */
553 spi_porting_unlock(bus, irq_sts);
554
555 return ERRCODE_SUCC;
556 }
557
spi_dma_isr(uint8_t int_type,uint8_t ch,uintptr_t arg)558 static void spi_dma_isr(uint8_t int_type, uint8_t ch, uintptr_t arg)
559 {
560 uint8_t bus = SPI_BUS_MAX_NUM;
561 spi_dma_trans_inf_t *dma_trans = NULL;
562
563 if ((uint32_t)arg == 0) {
564 dma_trans = g_dma_trans_tx;
565 } else {
566 dma_trans = g_dma_trans_rx;
567 }
568
569 for (uint8_t i = SPI_BUS_0; i < SPI_BUS_MAX_NUM; i++) {
570 /* channel default value is 0, means not used. channel > 0 means used.
571 So ch + 1 will not misjudgment with channel value 0. */
572 if (dma_trans[i].channel == ch + 1) {
573 bus = i;
574 break;
575 }
576 }
577
578 if (bus != SPI_BUS_MAX_NUM) {
579 if (int_type == 0) {
580 dma_trans[bus].trans_succ = true;
581 }
582 osal_sem_up(&(dma_trans[bus].dma_sem));
583 }
584 }
585
586 #if defined(CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH) && (CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH == 1)
spi_dma_align_check_word(uint32_t data_buf,uint32_t data_len,uint32_t align_data)587 static inline bool spi_dma_align_check_word(uint32_t data_buf, uint32_t data_len, uint32_t align_data)
588 {
589 bool ret1 = (data_buf % align_data) == 0 ? true : false;
590 bool ret2 = (data_len % align_data) == 0 ? true : false;
591 return ret1 && ret2;
592 }
593
spi_dma_get_mem_width(uint32_t buff,uint32_t bytes)594 static hal_dma_data_width_t spi_dma_get_mem_width(uint32_t buff, uint32_t bytes)
595 {
596 if (spi_dma_align_check_word(buff, bytes, (uint32_t)DMA_SPI_TRANSFER_WIDTH_WORD)) {
597 return HAL_DMA_TRANSFER_WIDTH_32;
598 }
599 if (spi_dma_align_check_word(buff, bytes, (uint32_t)DMA_SPI_TRANSFER_WIDTH_HALF_WORD)) {
600 return HAL_DMA_TRANSFER_WIDTH_16;
601 }
602 return HAL_DMA_TRANSFER_WIDTH_8;
603 }
604
spi_dma_get_spi_width(spi_bus_t bus)605 static hal_dma_data_width_t spi_dma_get_spi_width(spi_bus_t bus)
606 {
607 hal_spi_attr_t attr; /* will copy attr from hal, no need init to 0. */
608
609 hal_spi_ctrl(bus, SPI_CTRL_GET_ATTR, (uintptr_t)&attr);
610 uint32_t frame_bytes = hal_spi_frame_size_trans_to_frame_bytes(attr.frame_size);
611
612 if (frame_bytes == (uint32_t)DMA_SPI_TRANSFER_WIDTH_WORD) {
613 return HAL_DMA_TRANSFER_WIDTH_32;
614 } else if (frame_bytes == (uint32_t)DMA_SPI_TRANSFER_WIDTH_HALF_WORD) {
615 return HAL_DMA_TRANSFER_WIDTH_16;
616 }
617 return HAL_DMA_TRANSFER_WIDTH_8;
618 }
619 #endif /* CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH */
620
621 #if !defined(CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH)
spi_dma_common_config(dma_ch_user_peripheral_config_t * transfer_config,const spi_dma_config_t * dma_cfg)622 static void spi_dma_common_config(dma_ch_user_peripheral_config_t *transfer_config, const spi_dma_config_t *dma_cfg)
623 {
624 transfer_config->src_width = dma_cfg->src_width;
625 transfer_config->dest_width = dma_cfg->dest_width;
626 transfer_config->burst_length = dma_cfg->burst_length;
627 transfer_config->priority = dma_cfg->priority;
628 }
629 #endif /* NOT CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH */
630
spi_dma_tx_config(dma_ch_user_peripheral_config_t * transfer_config,spi_bus_t bus)631 static void spi_dma_tx_config(dma_ch_user_peripheral_config_t *transfer_config, spi_bus_t bus)
632 {
633 uint32_t data_addr = 0;
634 hal_spi_ctrl(bus, SPI_CTRL_GET_DMA_DATA_ADDR, (uintptr_t)(&data_addr));
635
636 #if defined(CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH) && (CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH == 1)
637 transfer_config->dest_width = spi_dma_get_spi_width(bus);
638 #endif /* CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH */
639 transfer_config->trans_type = HAL_DMA_TRANS_MEMORY_TO_PERIPHERAL_DMA;
640 transfer_config->trans_dir = HAL_DMA_TRANSFER_DIR_MEM_TO_PERIPHERAL;
641 transfer_config->src_increment = HAL_DMA_ADDRESS_INC_INCREMENT;
642 transfer_config->dest_increment = HAL_DMA_ADDRESS_INC_NO_CHANGE;
643 transfer_config->protection = HAL_DMA_PROTECTION_CONTROL_BUFFERABLE;
644 transfer_config->src_handshaking = 0;
645 transfer_config->dest = data_addr;
646 }
647
spi_write_dma(spi_bus_t bus,const spi_xfer_data_t * data,uint32_t timeout)648 static errcode_t spi_write_dma(spi_bus_t bus, const spi_xfer_data_t *data, uint32_t timeout)
649 {
650 dma_ch_user_peripheral_config_t transfer_config = { 0 };
651 uint8_t channel = DMA_CHANNEL_NONE;
652 if (data->tx_buff == NULL || data->tx_bytes == 0) {
653 return ERRCODE_INVALID_PARAM;
654 }
655 transfer_config.src = (uint32_t)(uintptr_t)data->tx_buff;
656 #if defined(CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH) && (CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH == 1)
657 transfer_config.src_width = spi_dma_get_mem_width(transfer_config.src, data->tx_bytes);
658 transfer_config.transfer_num = (uint16_t)data->tx_bytes >> transfer_config.src_width;
659 transfer_config.burst_length = HAL_DMA_BURST_TRANSACTION_LENGTH_1;
660 transfer_config.priority = HAL_DMA_CH_PRIORITY_0;
661 #else
662 if (data->tx_bytes % bit(g_dma_cfg[bus].src_width) == 0) {
663 transfer_config.transfer_num = (uint16_t)data->tx_bytes >> g_dma_cfg[bus].src_width;
664 } else {
665 return ERRCODE_INVALID_PARAM;
666 }
667
668 spi_dma_common_config(&transfer_config, &(g_dma_cfg[bus]));
669 #endif /* CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH */
670 spi_dma_tx_config(&transfer_config, bus);
671
672 transfer_config.dest_handshaking = spi_port_get_dma_trans_dest_handshaking(bus);
673 if (transfer_config.dest_handshaking == HAL_DMA_HANDSHAKING_MAX_NUM) {
674 return ERRCODE_SPI_DMA_CONFIG_ERROR;
675 }
676
677 if (uapi_dma_configure_peripheral_transfer_single(&transfer_config, &channel, spi_dma_isr, 0) != ERRCODE_SUCC) {
678 return ERRCODE_SPI_DMA_CONFIG_ERROR;
679 }
680
681 g_dma_trans_tx[bus].channel = channel + 1;
682 g_dma_trans_tx[bus].trans_succ = false;
683
684 if (uapi_dma_start_transfer(channel) != ERRCODE_SUCC) {
685 g_dma_trans_tx[bus].channel = 0;
686 return ERRCODE_SPI_DMA_CONFIG_ERROR;
687 }
688
689 if (osal_sem_down_timeout(&(g_dma_trans_tx[bus].dma_sem), timeout) != OSAL_SUCCESS) {
690 uapi_dma_end_transfer(channel);
691 g_dma_trans_tx[bus].channel = 0;
692 return ERRCODE_SPI_DMA_TRANSFER_ERROR;
693 }
694
695 uapi_dma_end_transfer(channel);
696 g_dma_trans_tx[bus].channel = 0;
697
698 if (!g_dma_trans_tx[bus].trans_succ) {
699 return ERRCODE_SPI_DMA_TRANSFER_ERROR;
700 }
701
702 return ERRCODE_SUCC;
703 }
704
spi_dma_rx_config(dma_ch_user_peripheral_config_t * transfer_config,spi_bus_t bus)705 static void spi_dma_rx_config(dma_ch_user_peripheral_config_t *transfer_config, spi_bus_t bus)
706 {
707 uint32_t data_addr = 0;
708 hal_spi_ctrl(bus, SPI_CTRL_GET_DMA_DATA_ADDR, (uintptr_t)(&data_addr));
709
710 #if defined(CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH) && (CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH == 1)
711 transfer_config->src_width = spi_dma_get_spi_width(bus);
712 #endif /* CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH */
713 transfer_config->trans_type = HAL_DMA_TRANS_PERIPHERAL_TO_MEMORY_DMA;
714 transfer_config->trans_dir = HAL_DMA_TRANSFER_DIR_PERIPHERAL_TO_MEM;
715 transfer_config->src_increment = HAL_DMA_ADDRESS_INC_NO_CHANGE;
716 transfer_config->dest_increment = HAL_DMA_ADDRESS_INC_INCREMENT;
717 transfer_config->protection = HAL_DMA_PROTECTION_CONTROL_BUFFERABLE;
718 transfer_config->dest_handshaking = 0;
719 transfer_config->src = data_addr;
720 }
721
spi_read_dma(spi_bus_t bus,const spi_xfer_data_t * data,uint32_t timeout)722 static errcode_t spi_read_dma(spi_bus_t bus, const spi_xfer_data_t *data, uint32_t timeout)
723 {
724 dma_ch_user_peripheral_config_t transfer_config = { 0 };
725 uint8_t channel = DMA_CHANNEL_NONE;
726
727 spi_dma_rx_config(&transfer_config, bus);
728 transfer_config.dest = (uint32_t)(uintptr_t)data->rx_buff;
729 #if defined(CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH) && (CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH == 1)
730 transfer_config.dest_width = spi_dma_get_mem_width(transfer_config.dest, data->rx_bytes);
731 transfer_config.burst_length = HAL_DMA_BURST_TRANSACTION_LENGTH_1;
732 transfer_config.priority = HAL_DMA_CH_PRIORITY_0;
733 transfer_config.transfer_num = (uint16_t)data->rx_bytes >> transfer_config.src_width;
734 #else
735 if (data->rx_bytes % bit(g_dma_cfg[bus].src_width) == 0) {
736 transfer_config.transfer_num = (uint16_t)data->rx_bytes >> g_dma_cfg[bus].src_width;
737 } else {
738 return ERRCODE_INVALID_PARAM;
739 }
740
741 spi_dma_common_config(&transfer_config, &(g_dma_cfg[bus]));
742 #endif /* CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH */
743
744 transfer_config.src_handshaking = spi_port_get_dma_trans_src_handshaking(bus);
745 if (transfer_config.src_handshaking == HAL_DMA_HANDSHAKING_MAX_NUM) {
746 return ERRCODE_SPI_DMA_CONFIG_ERROR;
747 }
748
749 if (uapi_dma_configure_peripheral_transfer_single(&transfer_config, &channel, spi_dma_isr, 1) != ERRCODE_SUCC) {
750 return ERRCODE_SPI_DMA_CONFIG_ERROR;
751 }
752
753 g_dma_trans_rx[bus].channel = channel + 1;
754 g_dma_trans_rx[bus].trans_succ = false;
755
756 if (uapi_dma_start_transfer(channel) != ERRCODE_SUCC) {
757 g_dma_trans_rx[bus].channel = 0;
758 return ERRCODE_SPI_DMA_CONFIG_ERROR;
759 }
760
761 if (osal_sem_down_timeout(&(g_dma_trans_rx[bus].dma_sem), timeout) != OSAL_SUCCESS) {
762 uapi_dma_end_transfer(channel);
763 g_dma_trans_rx[bus].channel = 0;
764 return ERRCODE_SPI_DMA_TRANSFER_ERROR;
765 }
766
767 uapi_dma_end_transfer(channel);
768 g_dma_trans_rx[bus].channel = 0;
769
770 if (!g_dma_trans_rx[bus].trans_succ) {
771 return ERRCODE_SPI_DMA_TRANSFER_ERROR;
772 }
773
774 return ERRCODE_SUCC;
775 }
776
spi_read_dma_config(spi_bus_t bus,uint8_t * ch,const spi_xfer_data_t * data)777 static errcode_t spi_read_dma_config(spi_bus_t bus, uint8_t *ch, const spi_xfer_data_t *data)
778 {
779 dma_ch_user_peripheral_config_t transfer_config = { 0 };
780 uint8_t channel = DMA_CHANNEL_NONE;
781
782 spi_dma_rx_config(&transfer_config, bus);
783 transfer_config.dest = (uint32_t)(uintptr_t)data->rx_buff;
784 #if defined(CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH) && (CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH == 1)
785 transfer_config.dest_width = spi_dma_get_mem_width(transfer_config.dest, data->rx_bytes);
786 transfer_config.burst_length = HAL_DMA_BURST_TRANSACTION_LENGTH_1;
787 transfer_config.priority = HAL_DMA_CH_PRIORITY_0;
788 transfer_config.transfer_num = (uint16_t)data->rx_bytes >> transfer_config.src_width;
789 #else
790 if (data->rx_bytes % bit(g_dma_cfg[bus].src_width) == 0) {
791 transfer_config.transfer_num = (uint16_t)data->rx_bytes >> g_dma_cfg[bus].src_width;
792 } else {
793 return ERRCODE_INVALID_PARAM;
794 }
795
796 spi_dma_common_config(&transfer_config, &(g_dma_cfg[bus]));
797 #endif /* CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH */
798
799 transfer_config.src_handshaking = spi_port_get_dma_trans_src_handshaking(bus);
800 if (transfer_config.src_handshaking == HAL_DMA_HANDSHAKING_MAX_NUM) {
801 return ERRCODE_SPI_DMA_CONFIG_ERROR;
802 }
803
804 if (uapi_dma_configure_peripheral_transfer_single(&transfer_config, &channel, spi_dma_isr, 1) != ERRCODE_SUCC) {
805 return ERRCODE_SPI_DMA_CONFIG_ERROR;
806 }
807
808 g_dma_trans_rx[bus].channel = channel + 1;
809 g_dma_trans_rx[bus].trans_succ = false;
810 *ch = channel;
811
812 return ERRCODE_SUCC;
813 }
814
spi_write_dma_config(spi_bus_t bus,uint8_t * ch,const spi_xfer_data_t * data)815 static errcode_t spi_write_dma_config(spi_bus_t bus, uint8_t *ch, const spi_xfer_data_t *data)
816 {
817 dma_ch_user_peripheral_config_t transfer_config = { 0 };
818 uint8_t channel = DMA_CHANNEL_NONE;
819
820 transfer_config.src = (uint32_t)(uintptr_t)data->tx_buff;
821 #if defined(CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH) && (CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH == 1)
822 transfer_config.src_width = spi_dma_get_mem_width(transfer_config.src, data->tx_bytes);
823 transfer_config.transfer_num = (uint16_t)data->tx_bytes >> transfer_config.src_width;
824 transfer_config.burst_length = HAL_DMA_BURST_TRANSACTION_LENGTH_1;
825 transfer_config.priority = HAL_DMA_CH_PRIORITY_0;
826 #else
827 if (data->tx_bytes % bit(g_dma_cfg[bus].src_width) == 0) {
828 transfer_config.transfer_num = (uint16_t)data->tx_bytes >> g_dma_cfg[bus].src_width;
829 } else {
830 return ERRCODE_INVALID_PARAM;
831 }
832
833 spi_dma_common_config(&transfer_config, &(g_dma_cfg[bus]));
834 #endif /* CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH */
835 spi_dma_tx_config(&transfer_config, bus);
836
837 transfer_config.dest_handshaking = spi_port_get_dma_trans_dest_handshaking(bus);
838 if (transfer_config.dest_handshaking == HAL_DMA_HANDSHAKING_MAX_NUM) {
839 return ERRCODE_SPI_DMA_CONFIG_ERROR;
840 }
841
842 if (uapi_dma_configure_peripheral_transfer_single(&transfer_config, &channel, spi_dma_isr, 0) != ERRCODE_SUCC) {
843 return ERRCODE_SPI_DMA_CONFIG_ERROR;
844 }
845
846 g_dma_trans_tx[bus].channel = channel + 1;
847 g_dma_trans_tx[bus].trans_succ = false;
848 *ch = channel;
849
850 return ERRCODE_SUCC;
851 }
852
spi_writeread_dma_clear_trans(spi_bus_t bus,uint8_t channel_tx,uint8_t channel_rx)853 static void spi_writeread_dma_clear_trans(spi_bus_t bus, uint8_t channel_tx, uint8_t channel_rx)
854 {
855 g_dma_trans_tx[bus].channel = 0;
856 g_dma_trans_rx[bus].channel = 0;
857 uapi_dma_end_transfer(channel_rx);
858 uapi_dma_end_transfer(channel_tx);
859 }
860
spi_writeread_dma(spi_bus_t bus,const spi_xfer_data_t * data,uint32_t timeout)861 static errcode_t spi_writeread_dma(spi_bus_t bus, const spi_xfer_data_t *data, uint32_t timeout)
862 {
863 uint8_t channel_rx;
864 uint8_t channel_tx;
865
866 if (data->tx_buff == NULL || data->tx_bytes == 0) {
867 return ERRCODE_INVALID_PARAM;
868 }
869
870 if (spi_read_dma_config(bus, &channel_rx, data) != ERRCODE_SUCC) {
871 return ERRCODE_SPI_DMA_CONFIG_ERROR;
872 }
873
874 if (uapi_dma_start_transfer(channel_rx) != ERRCODE_SUCC) {
875 g_dma_trans_rx[bus].channel = 0;
876 return ERRCODE_SPI_DMA_CONFIG_ERROR;
877 }
878
879 if (spi_write_dma_config(bus, &channel_tx, data) != ERRCODE_SUCC) {
880 uapi_dma_end_transfer(channel_rx);
881 return ERRCODE_SPI_DMA_CONFIG_ERROR;
882 }
883
884 if (uapi_dma_start_transfer(channel_tx) != ERRCODE_SUCC) {
885 uapi_dma_end_transfer(channel_rx);
886 g_dma_trans_tx[bus].channel = 0;
887 return ERRCODE_SPI_DMA_CONFIG_ERROR;
888 }
889
890 if (osal_sem_down_timeout(&(g_dma_trans_tx[bus].dma_sem), timeout) != OSAL_SUCCESS) {
891 spi_writeread_dma_clear_trans(bus, channel_tx, channel_rx);
892 return ERRCODE_SPI_DMA_TRANSFER_ERROR;
893 }
894
895 if (osal_sem_down_timeout(&(g_dma_trans_rx[bus].dma_sem), timeout) != OSAL_SUCCESS) {
896 spi_writeread_dma_clear_trans(bus, channel_tx, channel_rx);
897 return ERRCODE_SPI_DMA_TRANSFER_ERROR;
898 }
899
900 spi_writeread_dma_clear_trans(bus, channel_tx, channel_rx);
901
902 if ((!g_dma_trans_tx[bus].trans_succ) || (!g_dma_trans_rx[bus].trans_succ)) {
903 return ERRCODE_SPI_DMA_TRANSFER_ERROR;
904 }
905
906 return ERRCODE_SUCC;
907 }
908 #endif /* CONFIG_SPI_SUPPORT_DMA */
909
uapi_spi_set_tmod(spi_bus_t bus,hal_spi_trans_mode_t tmod,uint8_t data_frame_num)910 errcode_t uapi_spi_set_tmod(spi_bus_t bus, hal_spi_trans_mode_t tmod, uint8_t data_frame_num)
911 {
912 if (bus >= SPI_BUS_MAX_NUM || tmod >= HAL_SPI_TRANS_MODE_MAX) {
913 return ERRCODE_INVALID_PARAM;
914 }
915 spi_attr_t attr;
916 attr.ndf = data_frame_num;
917 attr.tmod = tmod;
918 return hal_spi_ctrl(bus, SPI_CTRL_SET_TMOD, (uintptr_t)&attr);
919 }
920
uapi_spi_set_attr(spi_bus_t bus,spi_attr_t * attr)921 errcode_t uapi_spi_set_attr(spi_bus_t bus, spi_attr_t *attr)
922 {
923 if (bus >= SPI_BUS_MAX_NUM || attr == NULL) {
924 return ERRCODE_INVALID_PARAM;
925 }
926
927 return hal_spi_ctrl(bus, SPI_CTRL_SET_ATTR, (uintptr_t)attr);
928 }
929
uapi_spi_get_attr(spi_bus_t bus,spi_attr_t * attr)930 errcode_t uapi_spi_get_attr(spi_bus_t bus, spi_attr_t *attr)
931 {
932 if (bus >= SPI_BUS_MAX_NUM || attr == NULL) {
933 return ERRCODE_INVALID_PARAM;
934 }
935
936 return hal_spi_ctrl(bus, SPI_CTRL_GET_ATTR, (uintptr_t)attr);
937 }
938
uapi_spi_set_extra_attr(spi_bus_t bus,spi_extra_attr_t * extra_attr)939 errcode_t uapi_spi_set_extra_attr(spi_bus_t bus, spi_extra_attr_t *extra_attr)
940 {
941 if (bus >= SPI_BUS_MAX_NUM || extra_attr == NULL) {
942 return ERRCODE_INVALID_PARAM;
943 }
944
945 return hal_spi_ctrl(bus, SPI_CTRL_SET_EXTRA_ATTR, (uintptr_t)extra_attr);
946 }
947
uapi_spi_get_extra_attr(spi_bus_t bus,spi_extra_attr_t * extra_attr)948 errcode_t uapi_spi_get_extra_attr(spi_bus_t bus, spi_extra_attr_t *extra_attr)
949 {
950 if (bus >= SPI_BUS_MAX_NUM || extra_attr == NULL) {
951 return ERRCODE_INVALID_PARAM;
952 }
953
954 return hal_spi_ctrl(bus, SPI_CTRL_GET_EXTRA_ATTR, (uintptr_t)extra_attr);
955 }
956
957 #if defined(CONFIG_SPI_SUPPORT_INTERRUPT) && (CONFIG_SPI_SUPPORT_INTERRUPT == 1)
uapi_spi_set_irq_mode(spi_bus_t bus,bool irq_en,spi_rx_callback_t rx_callback,spi_tx_callback_t tx_callback)958 errcode_t uapi_spi_set_irq_mode(spi_bus_t bus, bool irq_en, spi_rx_callback_t rx_callback,
959 spi_tx_callback_t tx_callback)
960 {
961 if (bus >= SPI_BUS_MAX_NUM) {
962 return ERRCODE_INVALID_PARAM;
963 }
964
965 #if defined(CONFIG_SPI_SUPPORT_DMA) && (CONFIG_SPI_SUPPORT_DMA == 1)
966 if (g_dma_trans_tx[bus].is_enable || g_dma_trans_rx[bus].is_enable) {
967 return ERRCODE_SPI_DMA_IRQ_MODE_MUTEX;
968 }
969 #endif /* CONFIG_SPI_SUPPORT_DMA */
970 uint32_t irq_sts = spi_porting_lock(bus);
971 if (irq_en) {
972 g_spi_rx_state[bus].rx_callback = rx_callback;
973 g_spi_tx_state[bus].tx_callback = tx_callback;
974 } else {
975 g_spi_rx_state[bus].rx_callback = NULL;
976 g_spi_tx_state[bus].tx_callback = NULL;
977 }
978
979 g_spi_rx_int_flag[bus] = irq_en;
980 g_spi_tx_int_flag[bus] = irq_en;
981 spi_porting_unlock(bus, irq_sts);
982
983 return ERRCODE_SUCC;
984 }
985
spi_data_send(spi_bus_t bus)986 static void spi_data_send(spi_bus_t bus)
987 {
988 bool tx_fifo_full = true;
989 hal_spi_attr_t attr; /* will copy attr from hal, no need init to 0. */
990
991 hal_spi_ctrl(bus, SPI_CTRL_GET_ATTR, (uintptr_t)&attr);
992 uint32_t frame_bytes = hal_spi_frame_size_trans_to_frame_bytes(attr.frame_size);
993
994 if (g_spi_tx_state[bus].current_tx_fragment_pos == 0) {
995 if (hal_spi_ctrl(bus, SPI_CTRL_CHECK_FIFO_BUSY, (uintptr_t)SPI_TX_FIFO_BUSY_TIMEOUT) != ERRCODE_SUCC) {
996 return;
997 }
998 }
999
1000 hal_spi_ctrl(bus, SPI_CTRL_CHECK_TX_FIFO_FULL, (uintptr_t)&tx_fifo_full);
1001 /* Populate the SPI TX FIFO if there is data to send */
1002 while (!tx_fifo_full) {
1003 /* There is some data to transmit so provide another byte to the SPI */
1004 bool end_of_fragment = spi_helper_send_next_char(bus, frame_bytes);
1005 if (end_of_fragment) {
1006 /* If it is the end of the fragment invoke the callback and move to the next one */
1007 spi_helper_invoke_current_fragment_callback(bus);
1008 spi_helper_move_to_next_fragment(bus);
1009 /* As it was the only fragment leave */
1010 break;
1011 }
1012
1013 hal_spi_ctrl(bus, SPI_CTRL_CHECK_TX_FIFO_FULL, (uintptr_t)&tx_fifo_full);
1014 }
1015 }
1016
spi_write_int(spi_bus_t bus,const spi_xfer_data_t * data)1017 static errcode_t spi_write_int(spi_bus_t bus, const spi_xfer_data_t *data)
1018 {
1019 if (data->tx_buff == NULL || data->tx_bytes == 0) {
1020 return ERRCODE_INVALID_PARAM;
1021 }
1022 uint32_t irq_sts = spi_porting_lock(bus);
1023
1024 if (!spi_helper_add_fragment(bus, data->tx_buff, data->tx_bytes)) {
1025 spi_porting_unlock(bus, irq_sts);
1026 return ERRCODE_SPI_ADD_QUEUE_FAIL;
1027 }
1028 /* If it is the first on the list process it */
1029 /* No other fragments require transmission so start the transmission */
1030 if (spi_helper_is_the_current_fragment_the_last_to_process(bus)) {
1031 spi_data_send(bus);
1032 /* if we have not finished transmitting it enable the interrupts */
1033 if (spi_helper_are_there_fragments_to_process(bus)) { /* if it is not finished transmitting it */
1034 hal_spi_ctrl(bus, SPI_CTRL_EN_TXEI_INT, 1);
1035 }
1036 }
1037 spi_porting_unlock(bus, irq_sts);
1038 return ERRCODE_SUCC;
1039 }
1040
spi_register_rx_callback(spi_bus_t bus,const spi_xfer_data_t * data)1041 static errcode_t spi_register_rx_callback(spi_bus_t bus, const spi_xfer_data_t *data)
1042 {
1043 spi_attr_t attr; /* uapi_spi_get_attr will copy attr from hal, no need init to 0. */
1044 errcode_t ret = ERRCODE_SUCC;
1045 if (uapi_spi_get_attr(bus, &attr) != ERRCODE_SUCC) {
1046 return ERRCODE_SPI_CONFIG_FAIL;
1047 }
1048 uint32_t irq_sts = spi_porting_lock(bus);
1049 g_spi_rx_state[bus].rx_buffer = data->rx_buff;
1050 g_spi_rx_state[bus].rx_buffer_size = data->rx_bytes;
1051 hal_spi_ctrl(bus, SPI_CTRL_EN_RXFI_INT, 1);
1052
1053 if (spi_porting_get_device_mode(bus) == SPI_MODE_MASTER && attr.tmod == HAL_SPI_TRANS_MODE_RX) {
1054 uint32_t frame_bytes = hal_spi_frame_size_trans_to_frame_bytes(attr.frame_size);
1055 if (frame_bytes == 0) {
1056 return ERRCODE_SPI_CONFIG_FAIL;
1057 }
1058 attr.ndf = g_spi_rx_state[bus].rx_buffer_size / frame_bytes;
1059 if (uapi_spi_set_attr(bus, &attr) != ERRCODE_SUCC) {
1060 return ERRCODE_SPI_CONFIG_FAIL;
1061 }
1062 uint32_t data_tx = 0;
1063 spi_xfer_data_t data_write = { 0 };
1064 data_write.tx_buff = (uint8_t *)(uintptr_t)&data_tx;
1065 data_write.tx_bytes = frame_bytes;
1066 ret = hal_spi_write(bus, (hal_spi_xfer_data_t *)&data_write, CONFIG_SPI_MAX_TIMEOUT);
1067 }
1068 spi_porting_unlock(bus, irq_sts);
1069
1070 return ret;
1071 }
1072 #endif /* CONFIG_SPI_SUPPORT_INTERRUPT */
1073
1074 #if defined(CONFIG_SPI_SUPPORT_MASTER) && (CONFIG_SPI_SUPPORT_MASTER == 1)
uapi_spi_select_slave(spi_bus_t bus,spi_slave_t cs)1075 errcode_t uapi_spi_select_slave(spi_bus_t bus, spi_slave_t cs)
1076 {
1077 errcode_t ret;
1078
1079 if (bus >= SPI_BUS_MAX_NUM || cs >= SPI_SLAVE_MAX_NUM) {
1080 return ERRCODE_INVALID_PARAM;
1081 }
1082 if (spi_porting_get_device_mode(bus) != SPI_MODE_MASTER) {
1083 return ERRCODE_SPI_MODE_MISMATCH;
1084 }
1085
1086 uint32_t irq_sts = spi_porting_lock(bus);
1087
1088 ret = hal_spi_ctrl(bus, SPI_CTRL_SELECT_SLAVE, (uintptr_t)cs);
1089 spi_porting_unlock(bus, irq_sts);
1090
1091 return ret;
1092 }
1093
uapi_spi_master_write(spi_bus_t bus,const spi_xfer_data_t * data,uint32_t timeout)1094 errcode_t uapi_spi_master_write(spi_bus_t bus, const spi_xfer_data_t *data, uint32_t timeout)
1095 {
1096 errcode_t ret;
1097 uint32_t timeout_tmp = timeout == 0 ? CONFIG_SPI_MAX_TIMEOUT : timeout;
1098
1099 ret = spi_param_check_write(bus, SPI_MODE_MASTER, HAL_SPI_TRANS_MODE_RX, data, true);
1100 if (ret != ERRCODE_SUCC) {
1101 return ret;
1102 }
1103
1104 #if defined(CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH) && (CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH == 1)
1105 if (data->tx_bytes > CONFIG_SPI_AUTO_SWITCH_DMA_THRESHOLD) {
1106 uapi_spi_set_dma_mode(bus, true, NULL);
1107 ret = spi_write_dma(bus, data, timeout_tmp);
1108 if (hal_spi_ctrl(bus, SPI_CTRL_CHECK_FIFO_BUSY, (uintptr_t)SPI_TX_FIFO_BUSY_TIMEOUT) != ERRCODE_SUCC) {
1109 spi_mutex_unlock(bus);
1110 return ERRCODE_SPI_TIMEOUT;
1111 }
1112 spi_mutex_unlock(bus);
1113 return ret;
1114 }
1115 uapi_spi_set_dma_mode(bus, false, NULL);
1116 #else
1117 #if defined(CONFIG_SPI_SUPPORT_DMA) && (CONFIG_SPI_SUPPORT_DMA == 1)
1118 if (g_dma_trans_tx[bus].is_enable) {
1119 ret = spi_write_dma(bus, data, timeout_tmp);
1120 spi_mutex_unlock(bus);
1121 return ret;
1122 }
1123 #endif /* CONFIG_SPI_SUPPORT_DMA */
1124 #if defined(CONFIG_SPI_SUPPORT_INTERRUPT) && (CONFIG_SPI_SUPPORT_INTERRUPT == 1)
1125 if (g_spi_tx_int_flag[bus]) {
1126 ret = spi_write_int(bus, data);
1127 spi_mutex_unlock(bus);
1128 return ret;
1129 }
1130 #endif /* CONFIG_SPI_SUPPORT_INTERRUPT */
1131 #endif /* CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH */
1132
1133 uint32_t irq_sts = spi_porting_lock(bus);
1134 ret = hal_spi_write(bus, (hal_spi_xfer_data_t *)data, timeout_tmp);
1135 spi_porting_unlock(bus, irq_sts);
1136 spi_mutex_unlock(bus);
1137
1138 return ret;
1139 }
1140
1141 #if defined(CONFIG_SPI_SUPPORT_DMA) && (CONFIG_SPI_SUPPORT_DMA == 1)
spi_read_by_writeread(spi_bus_t bus,const spi_xfer_data_t * data,uint32_t timeout)1142 static errcode_t spi_read_by_writeread(spi_bus_t bus, const spi_xfer_data_t *data, uint32_t timeout)
1143 {
1144 spi_attr_t attr; /* uapi_spi_get_attr will copy attr from hal, no need init to 0. */
1145 uint32_t data_tx = 0;
1146 spi_xfer_data_t data_writeread = { 0 };
1147
1148 uapi_spi_get_attr(bus, &attr);
1149 uint32_t ndf_before = attr.ndf;
1150 uint32_t frame_bytes = hal_spi_frame_size_trans_to_frame_bytes(attr.frame_size);
1151 if (frame_bytes == 0) {
1152 return ERRCODE_SPI_CONFIG_FAIL;
1153 }
1154 attr.ndf = data->rx_bytes / frame_bytes;
1155 if (uapi_spi_set_attr(bus, &attr) != ERRCODE_SUCC) {
1156 return ERRCODE_SPI_CONFIG_FAIL;
1157 }
1158 data_writeread.rx_buff = data->rx_buff;
1159 data_writeread.rx_bytes = data->rx_bytes;
1160 data_writeread.tx_buff = (uint8_t *)(uintptr_t)&data_tx;
1161 data_writeread.tx_bytes = frame_bytes;
1162
1163 errcode_t ret = uapi_spi_master_writeread(bus, &data_writeread, timeout);
1164
1165 attr.ndf = ndf_before;
1166 if (uapi_spi_set_attr(bus, &attr) != ERRCODE_SUCC) {
1167 return ERRCODE_SPI_CONFIG_FAIL;
1168 }
1169
1170 return ret;
1171 }
1172
1173 #if !defined(CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH)
spi_master_read_dma(spi_bus_t bus,const spi_xfer_data_t * data,uint32_t timeout)1174 static errcode_t spi_master_read_dma(spi_bus_t bus, const spi_xfer_data_t *data, uint32_t timeout)
1175 {
1176 if (spi_get_attr_tmod(bus) == HAL_SPI_TRANS_MODE_RX) {
1177 return spi_read_by_writeread(bus, data, timeout);
1178 }
1179 return spi_read_dma(bus, data, timeout);
1180 }
1181 #endif /* NOT CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH */
1182 #endif /* CONFIG_SPI_SUPPORT_DMA */
1183
uapi_spi_master_read(spi_bus_t bus,const spi_xfer_data_t * data,uint32_t timeout)1184 errcode_t uapi_spi_master_read(spi_bus_t bus, const spi_xfer_data_t *data, uint32_t timeout)
1185 {
1186 errcode_t ret;
1187 uint32_t timeout_tmp = timeout == 0 ? CONFIG_SPI_MAX_TIMEOUT : timeout;
1188
1189 ret = spi_param_check_read(bus, SPI_MODE_MASTER, HAL_SPI_TRANS_MODE_TX, data, true);
1190 if (ret != ERRCODE_SUCC) {
1191 return ret;
1192 }
1193
1194 #if defined(CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH) && (CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH == 1)
1195 if (data->rx_bytes > CONFIG_SPI_AUTO_SWITCH_DMA_THRESHOLD) {
1196 /* When the threshold is exceeded, the writeread interface should be used to write data and then read data. */
1197 ret = spi_read_by_writeread(bus, data, timeout_tmp);
1198 spi_mutex_unlock(bus);
1199 return ret;
1200 }
1201 uapi_spi_set_dma_mode(bus, false, NULL);
1202 #else
1203 #if defined(CONFIG_SPI_SUPPORT_DMA) && (CONFIG_SPI_SUPPORT_DMA == 1)
1204 if (g_dma_trans_rx[bus].is_enable) {
1205 ret = spi_master_read_dma(bus, data, timeout_tmp);
1206 spi_mutex_unlock(bus);
1207 return ret;
1208 }
1209 #endif /* CONFIG_SPI_SUPPORT_DMA */
1210
1211 #if defined(CONFIG_SPI_SUPPORT_INTERRUPT) && (CONFIG_SPI_SUPPORT_INTERRUPT == 1)
1212 if (g_spi_rx_int_flag[bus]) {
1213 ret = spi_register_rx_callback(bus, data);
1214 spi_mutex_unlock(bus);
1215 return ret;
1216 }
1217 #endif /* CONFIG_SPI_SUPPORT_INTERRUPT */
1218 #endif /* CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH */
1219
1220 uint32_t irq_sts = spi_porting_lock(bus);
1221 ret = hal_spi_read(bus, (hal_spi_xfer_data_t *)data, timeout_tmp);
1222 spi_porting_unlock(bus, irq_sts);
1223 spi_mutex_unlock(bus);
1224
1225 return ret;
1226 }
1227
uapi_spi_master_writeread(spi_bus_t bus,const spi_xfer_data_t * data,uint32_t timeout)1228 errcode_t uapi_spi_master_writeread(spi_bus_t bus, const spi_xfer_data_t *data, uint32_t timeout)
1229 {
1230 errcode_t ret;
1231 uint32_t timeout_tmp = timeout == 0 ? CONFIG_SPI_MAX_TIMEOUT : timeout;
1232
1233 ret = spi_param_check_read(bus, SPI_MODE_MASTER, HAL_SPI_TRANS_MODE_TX, data, true);
1234 if (ret != ERRCODE_SUCC) {
1235 return ret;
1236 }
1237
1238 #if defined(CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH) && (CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH == 1)
1239 if (data->rx_bytes > CONFIG_SPI_AUTO_SWITCH_DMA_THRESHOLD ||
1240 data->tx_bytes > CONFIG_SPI_AUTO_SWITCH_DMA_THRESHOLD) {
1241 uapi_spi_set_dma_mode(bus, true, NULL);
1242 ret = spi_writeread_dma(bus, data, timeout_tmp);
1243 spi_mutex_unlock(bus);
1244 return ret;
1245 }
1246 uapi_spi_set_dma_mode(bus, false, NULL);
1247 #else
1248 #if defined(CONFIG_SPI_SUPPORT_DMA) && (CONFIG_SPI_SUPPORT_DMA == 1)
1249 if (g_dma_trans_tx[bus].is_enable && g_dma_trans_rx[bus].is_enable) {
1250 ret = spi_writeread_dma(bus, data, timeout_tmp);
1251 spi_mutex_unlock(bus);
1252 return ret;
1253 }
1254 #endif /* CONFIG_SPI_SUPPORT_DMA */
1255 #endif /* CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH */
1256
1257 uint32_t irq_sts = spi_porting_lock(bus);
1258 ret = hal_spi_write(bus, (hal_spi_xfer_data_t *)data, timeout_tmp);
1259 ret = hal_spi_read(bus, (hal_spi_xfer_data_t *)data, timeout_tmp);
1260 spi_porting_unlock(bus, irq_sts);
1261 spi_mutex_unlock(bus);
1262 return ret;
1263 }
1264 #endif /* CONFIG_SPI_SUPPORT_MASTER */
1265
1266 #if defined(CONFIG_SPI_SUPPORT_SLAVE) && (CONFIG_SPI_SUPPORT_SLAVE == 1)
uapi_spi_slave_write(spi_bus_t bus,const spi_xfer_data_t * data,uint32_t timeout)1267 errcode_t uapi_spi_slave_write(spi_bus_t bus, const spi_xfer_data_t *data, uint32_t timeout)
1268 {
1269 errcode_t ret;
1270 uint32_t timeout_tmp = timeout == 0 ? CONFIG_SPI_MAX_TIMEOUT : timeout;
1271
1272 if (data == NULL || data->tx_buff == NULL || data->tx_bytes == 0) {
1273 return ERRCODE_INVALID_PARAM;
1274 }
1275
1276 ret = spi_param_check_write(bus, SPI_MODE_SLAVE, HAL_SPI_TRANS_MODE_RX, data, true);
1277 if (ret != ERRCODE_SUCC) {
1278 return ret;
1279 }
1280
1281 #if defined(CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH) && (CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH == 1)
1282 if (data->tx_bytes > CONFIG_SPI_AUTO_SWITCH_DMA_THRESHOLD) {
1283 uapi_spi_set_dma_mode(bus, true, NULL);
1284 ret = spi_write_dma(bus, data, timeout_tmp);
1285 spi_mutex_unlock(bus);
1286 return ret;
1287 }
1288 uapi_spi_set_dma_mode(bus, false, NULL);
1289 #else
1290 #if defined(CONFIG_SPI_SUPPORT_DMA) && (CONFIG_SPI_SUPPORT_DMA == 1)
1291 if (g_dma_trans_tx[bus].is_enable) {
1292 ret = spi_write_dma(bus, data, timeout_tmp);
1293 spi_mutex_unlock(bus);
1294 return ret;
1295 }
1296 #endif /* CONFIG_SPI_SUPPORT_DMA */
1297 #if defined(CONFIG_SPI_SUPPORT_INTERRUPT) && (CONFIG_SPI_SUPPORT_INTERRUPT == 1)
1298 if (g_spi_tx_int_flag[bus]) {
1299 ret = spi_write_int(bus, data);
1300 spi_mutex_unlock(bus);
1301 return ret;
1302 }
1303 #endif /* CONFIG_SPI_SUPPORT_INTERRUPT */
1304 #endif /* CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH */
1305
1306 uint32_t irq_sts = spi_porting_lock(bus);
1307 ret = hal_spi_write(bus, (hal_spi_xfer_data_t *)data, timeout_tmp);
1308 spi_porting_unlock(bus, irq_sts);
1309 spi_mutex_unlock(bus);
1310
1311 return ret;
1312 }
1313
uapi_spi_slave_read(spi_bus_t bus,const spi_xfer_data_t * data,uint32_t timeout)1314 errcode_t uapi_spi_slave_read(spi_bus_t bus, const spi_xfer_data_t *data, uint32_t timeout)
1315 {
1316 errcode_t ret;
1317 uint32_t timeout_tmp = timeout == 0 ? CONFIG_SPI_MAX_TIMEOUT : timeout;
1318
1319 ret = spi_param_check_read(bus, SPI_MODE_SLAVE, HAL_SPI_TRANS_MODE_TX, data, false);
1320 if (ret != ERRCODE_SUCC) {
1321 return ret;
1322 }
1323
1324 #if defined(CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH) && (CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH == 1)
1325 if (data->rx_bytes > CONFIG_SPI_AUTO_SWITCH_DMA_THRESHOLD) {
1326 uapi_spi_set_dma_mode(bus, true, NULL);
1327 ret = spi_read_dma(bus, data, timeout_tmp);
1328 spi_mutex_unlock(bus);
1329 return ret;
1330 }
1331 uapi_spi_set_dma_mode(bus, false, NULL);
1332 #else
1333 #if defined(CONFIG_SPI_SUPPORT_DMA) && (CONFIG_SPI_SUPPORT_DMA == 1)
1334 if (g_dma_trans_rx[bus].is_enable) {
1335 ret = spi_read_dma(bus, data, timeout_tmp);
1336 spi_mutex_unlock(bus);
1337 return ret;
1338 }
1339 #endif /* CONFIG_SPI_SUPPORT_DMA */
1340
1341 #if defined(CONFIG_SPI_SUPPORT_INTERRUPT) && (CONFIG_SPI_SUPPORT_INTERRUPT == 1)
1342 if (g_spi_rx_int_flag[bus]) {
1343 ret = spi_register_rx_callback(bus, data);
1344 spi_mutex_unlock(bus);
1345 return ret;
1346 }
1347 #endif /* CONFIG_SPI_SUPPORT_INTERRUPT */
1348 #endif /* CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH */
1349
1350 uint32_t irq_sts = spi_porting_lock(bus);
1351 ret = hal_spi_read(bus, (hal_spi_xfer_data_t *)data, timeout_tmp);
1352 spi_porting_unlock(bus, irq_sts);
1353 spi_mutex_unlock(bus);
1354
1355 return ret;
1356 }
1357
uapi_spi_slave_writeread(spi_bus_t bus,const spi_xfer_data_t * data,uint32_t timeout)1358 errcode_t uapi_spi_slave_writeread(spi_bus_t bus, const spi_xfer_data_t *data, uint32_t timeout)
1359 {
1360 errcode_t ret;
1361 uint32_t timeout_tmp = timeout == 0 ? CONFIG_SPI_MAX_TIMEOUT : timeout;
1362
1363 ret = spi_param_check_read(bus, SPI_MODE_SLAVE, HAL_SPI_TRANS_MODE_TX, data, false);
1364 if (ret != ERRCODE_SUCC) {
1365 return ret;
1366 }
1367
1368 #if defined(CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH) && (CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH == 1)
1369 if (data->rx_bytes > CONFIG_SPI_AUTO_SWITCH_DMA_THRESHOLD ||
1370 data->tx_bytes > CONFIG_SPI_AUTO_SWITCH_DMA_THRESHOLD) {
1371 uapi_spi_set_dma_mode(bus, true, NULL);
1372 ret = spi_writeread_dma(bus, data, timeout_tmp);
1373 spi_mutex_unlock(bus);
1374 return ret;
1375 }
1376 uapi_spi_set_dma_mode(bus, false, NULL);
1377 #else
1378 #if defined(CONFIG_SPI_SUPPORT_DMA) && (CONFIG_SPI_SUPPORT_DMA == 1)
1379 if (g_dma_trans_tx[bus].is_enable && g_dma_trans_rx[bus].is_enable) {
1380 ret = spi_writeread_dma(bus, data, timeout_tmp);
1381 spi_mutex_unlock(bus);
1382 return ret;
1383 }
1384 #endif /* CONFIG_SPI_SUPPORT_DMA */
1385 #endif /* CONFIG_SPI_SUPPORT_POLL_AND_DMA_AUTO_SWITCH */
1386
1387 uint32_t irq_sts = spi_porting_lock(bus);
1388 ret = hal_spi_write(bus, (hal_spi_xfer_data_t *)data, timeout_tmp);
1389 ret = hal_spi_read(bus, (hal_spi_xfer_data_t *)data, timeout_tmp);
1390 hal_spi_ctrl(bus, SPI_CTRL_CHECK_FIFO_BUSY, timeout_tmp);
1391 spi_porting_unlock(bus, irq_sts);
1392 spi_mutex_unlock(bus);
1393 return ret;
1394 }
1395 #endif /* CONFIG_SPI_SUPPORT_SLAVE */
1396
1397 #if defined(CONFIG_SPI_SUPPORT_LOOPBACK)
uapi_spi_set_loop_back_mode(spi_bus_t bus,bool loopback_en)1398 errcode_t uapi_spi_set_loop_back_mode(spi_bus_t bus, bool loopback_en)
1399 {
1400 unused(bus);
1401 unused(loopback_en);
1402 return ERRCODE_SUCC;
1403 }
1404 #endif /* CONFIG_SPI_SUPPORT_LOOPBACK */
1405
1406 #if defined(CONFIG_SPI_SUPPORT_CRC)
uapi_spi_set_crc_mode(spi_bus_t bus,const spi_crc_config_t * crc_config,spi_crc_err_callback_t cb)1407 errcode_t uapi_spi_set_crc_mode(spi_bus_t bus, const spi_crc_config_t *crc_config, spi_crc_err_callback_t cb)
1408 {
1409 unused(bus);
1410 unused(crc_config);
1411 unused(cb);
1412 return ERRCODE_SUCC;
1413 }
1414 #endif
1415
1416 #if defined(CONFIG_SPI_SUPPORT_LPM)
uapi_spi_suspend(uintptr_t arg)1417 errcode_t uapi_spi_suspend(uintptr_t arg)
1418 {
1419 if (!g_spi_is_initialised[arg]) {
1420 return ERRCODE_SUCC;
1421 }
1422 if (hal_spi_ctrl(arg, SPI_CTRL_SUSPEND, 0) != ERRCODE_SUCC) {
1423 return ERRCODE_FAIL;
1424 }
1425 #if defined(CONFIG_SPI_SUPPORT_LPC)
1426 spi_port_clock_enable(arg, false);
1427 #endif
1428 #if defined(CONFIG_SPI_SUPPORT_DMA)
1429 uapi_dma_suspend(arg);
1430 #endif
1431
1432 return ERRCODE_SUCC;
1433 }
1434
uapi_spi_resume(uintptr_t arg)1435 errcode_t uapi_spi_resume(uintptr_t arg)
1436 {
1437 if (!g_spi_is_initialised[arg]) {
1438 return ERRCODE_SUCC;
1439 }
1440 #if defined(CONFIG_SPI_SUPPORT_LPC)
1441 spi_port_clock_enable(arg, true);
1442 #endif
1443
1444 if (hal_spi_ctrl(arg, SPI_CTRL_RESUME, 0) != ERRCODE_SUCC) {
1445 return ERRCODE_FAIL;
1446 }
1447
1448 #if defined(CONFIG_SPI_SUPPORT_DMA)
1449 uapi_dma_resume(arg);
1450 #endif
1451 return ERRCODE_SUCC;
1452 }
1453 #endif