1 /*
2 * Copyright (c) 2021-2023 HPMicro
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 *
6 */
7 #include "hpm_sdxc_drv.h"
8
9
10 #define SDXC_DMA_MAX_XFER_LEN_26BIT ((1UL << 26) - 4U)
11 #define SDXC_DMA_MAX_XFER_LEN_16BIT ((1UL << 16) - 4U)
12
13 #define SDXC_SYS_DMA_ALIGN_LEN (4U)
14
15 enum {
16 sdxc_cmd_only = (1UL << 0),
17 sdxc_cmd_and_tx_data = (1UL << 1),
18 sdxc_cmd_and_rx_data = (1UL << 2),
19 sdxc_data_with_auto_cmd12 = (1UL << 3),
20 sdxc_data_with_auto_cmd23 = (1UL << 4),
21 sdxc_boot_data = (1UL << 5),
22 sdxc_boot_data_continuous = (1UL << 6),
23 };
24
25 static const uint32_t s_sdxc_boot_dummy = 0;
26
27
28 static hpm_stat_t sdxc_set_transfer_config(SDXC_Type *base,
29 uint32_t xfer_flags,
30 uint32_t block_size,
31 uint32_t block_cnt,
32 uint32_t *new_flags);
33
34 static void sdxc_read_data_buf(SDXC_Type *base, uint32_t *data, uint32_t num_of_words);
35
36 static hpm_stat_t sdxc_read_via_data_buf_blocking(SDXC_Type *base, sdxc_data_t *data);
37
38 static void sdxc_write_data_buf(SDXC_Type *base, const uint32_t *data, uint32_t num_of_words);
39
40 static hpm_stat_t sdxc_write_via_data_buf_blocking(SDXC_Type *base, sdxc_data_t *data);
41
42 static hpm_stat_t sdxc_transfer_data_blocking(SDXC_Type *base, sdxc_data_t *data, bool enable_dma);
43
44 static hpm_stat_t sdxc_tuning_error_recovery(SDXC_Type *base);
45
46 static bool sdxc_is_bus_idle(SDXC_Type *base);
47
sdxc_set_transfer_config(SDXC_Type * base,uint32_t xfer_flags,uint32_t block_size,uint32_t block_cnt,uint32_t * new_flags)48 static hpm_stat_t sdxc_set_transfer_config(SDXC_Type *base,
49 uint32_t xfer_flags,
50 uint32_t block_size,
51 uint32_t block_cnt,
52 uint32_t *new_flags)
53 {
54 uint32_t flags = base->CMD_XFER & ~(SDXC_CMD_XFER_MULTI_BLK_SEL_MASK | SDXC_CMD_XFER_BLOCK_COUNT_ENABLE_MASK |
55 SDXC_CMD_XFER_DATA_PRESENT_SEL_MASK | SDXC_CMD_XFER_DATA_XFER_DIR_MASK |
56 SDXC_CMD_XFER_AUTO_CMD_ENABLE_MASK | SDXC_CMD_XFER_RESP_TYPE_SELECT_MASK);
57 if (IS_HPM_BITMASK_SET(base->PSTATE, SDXC_PSTATE_CMD_INHIBIT_MASK)) {
58 return status_sdxc_busy;
59 } else {
60 if (IS_HPM_BITMASK_SET(xfer_flags, sdxc_boot_data_continuous)) {
61
62 base->PROT_CTRL &= ~SDXC_PROT_CTRL_STOP_BG_REQ_MASK;
63 base->PROT_CTRL |= SDXC_PROT_CTRL_CONTINUE_REQ_MASK;
64 return status_success;
65 }
66
67 if (IS_HPM_BITMASK_SET(base->PSTATE, SDXC_PSTATE_DAT_INHIBIT_MASK)) {
68 return status_sdxc_busy;
69 }
70
71 if (block_cnt > SDXC_BLK_ATTR_BLOCK_CNT_GET(SDXC_BLK_ATTR_BLOCK_CNT_MASK)) {
72 return status_invalid_argument;
73 }
74
75 if (IS_HPM_BITMASK_SET(xfer_flags, sdxc_cmd_and_rx_data)) {
76 flags |= SDXC_CMD_XFER_DATA_XFER_DIR_MASK;
77 }
78 if (block_cnt > 1U) {
79 flags |= SDXC_CMD_XFER_MULTI_BLK_SEL_MASK | SDXC_CMD_XFER_BLOCK_COUNT_ENABLE_MASK;
80 if (IS_HPM_BITMASK_SET(xfer_flags, sdxc_data_with_auto_cmd12)) {
81 flags |= SDXC_CMD_XFER_AUTO_CMD_ENABLE_SET(sdxc_auto_cmd12_enabled);
82 }
83 }
84
85 if (IS_HPM_BITMASK_SET(xfer_flags, sdxc_data_with_auto_cmd23)) {
86 flags |= SDXC_CMD_XFER_AUTO_CMD_ENABLE_SET(sdxc_auto_cmd23_enabled);
87 }
88
89 if (IS_HPM_BITMASK_CLR(xfer_flags, sdxc_boot_data)) {
90 base->BLK_ATTR = block_size;
91 base->SDMASA = block_cnt;
92 } else {
93 flags |= SDXC_CMD_XFER_MULTI_BLK_SEL_MASK | SDXC_CMD_XFER_BLOCK_COUNT_ENABLE_MASK;
94 }
95 }
96
97 *new_flags = flags;
98
99 return status_success;
100 }
101
102
sdxc_receive_cmd_response(SDXC_Type * base,sdxc_command_t * cmd)103 hpm_stat_t sdxc_receive_cmd_response(SDXC_Type *base, sdxc_command_t *cmd)
104 {
105 hpm_stat_t status = status_success;
106
107 if (cmd->resp_type != sdxc_dev_resp_none) {
108
109 cmd->response[0] = base->RESP[0];
110 if (cmd->resp_type == sdxc_dev_resp_r2) {
111
112 /* R3-R2-R1-R0 (lowest 8 bits are invalid bits) has the same format as R2 format in SD spec
113 * after removing internal CRC7 and end bit
114 */
115 cmd->response[0] = (base->RESP[0] << 8);
116 cmd->response[1] = (base->RESP[1] << 8) | (base->RESP[0] >> 24);
117 cmd->response[2] = (base->RESP[2] << 8) | (base->RESP[1] >> 24);
118 cmd->response[3] = (base->RESP[3] << 8) | (base->RESP[2] >> 24);
119 }
120
121 if (SDXC_CMD_XFER_AUTO_CMD_ENABLE_GET(base->CMD_XFER) == sdxc_auto_cmd12_enabled) {
122 cmd->auto_cmd_resp = base->RESP[3];
123 }
124 }
125
126 /* check response flag */
127 if ((cmd->resp_error_flags != 0U) &&
128 ((cmd->resp_type == sdxc_dev_resp_r1) || (cmd->resp_type == sdxc_dev_resp_r1b) ||
129 (cmd->resp_type == sdxc_dev_resp_r6) || (cmd->resp_type == sdxc_dev_resp_r5))) {
130 if ((cmd->resp_error_flags & cmd->response[0]) != 0U) {
131 status = status_sdxc_send_cmd_failed;
132 }
133 }
134
135 return status;
136 }
137
sdxc_read_data_buf(SDXC_Type * base,uint32_t * data,uint32_t num_of_words)138 static void sdxc_read_data_buf(SDXC_Type *base, uint32_t *data, uint32_t num_of_words)
139 {
140 if (sdxc_is_data_buf_readable(base)) {
141 for (uint32_t i = 0; i < num_of_words; i++) {
142 data[i] = sdxc_read_data(base);
143 }
144 }
145 }
146
sdxc_read_via_data_buf_blocking(SDXC_Type * base,sdxc_data_t * data)147 static hpm_stat_t sdxc_read_via_data_buf_blocking(SDXC_Type *base, sdxc_data_t *data)
148 {
149 uint32_t interrupt_status = 0;
150 hpm_stat_t status = status_success;
151 do {
152 /* For multi-block transfer, the block size must be 4-byte aligned */
153 if ((data->block_cnt > 1) && (data->block_size % sizeof(uint32_t) != 0)) {
154 status = status_invalid_argument;
155 break;
156 }
157 if (data->block_size % sizeof(uint32_t) != 0U) {
158 data->block_size += sizeof(uint32_t) - (data->block_size % sizeof(uint32_t));
159 }
160
161 uint32_t words_per_block = data->block_size / sizeof(uint32_t);
162 uint32_t remaining_blocks = data->block_cnt;
163 uint32_t *read_buf = data->rx_data;
164 while (remaining_blocks > 0) {
165 uint32_t status_flags = SDXC_INT_STAT_BUF_RD_READY_MASK | SDXC_STS_DATA_ERR;
166 /* Wait until data is ready or timeout event occurs */
167 do {
168 interrupt_status = sdxc_get_interrupt_status(base);
169 } while (!IS_HPM_BITMASK_SET(interrupt_status, status_flags));
170
171 if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_DATA_CRC_ERR_MASK)) {
172 /* Handle Data CRC error */
173 if (!data->enable_ignore_error) {
174 status = status_sdxc_data_crc_error;
175 break;
176 }
177 sdxc_clear_interrupt_status(base, SDXC_STS_DATA_ERR);
178 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_DATA_TOUT_ERR_MASK)) {
179 /* Handle Data timeout error */
180 status = status_sdxc_data_timeout_error;
181 break;
182 } else {
183 /* Receive data block by block */
184 sdxc_clear_interrupt_status(base, SDXC_INT_STAT_BUF_RD_READY_MASK);
185 sdxc_read_data_buf(base, read_buf, words_per_block);
186 read_buf += words_per_block;
187 remaining_blocks--;
188 }
189 }
190
191 sdxc_clear_interrupt_status(base, SDXC_INT_STAT_XFER_COMPLETE_MASK);
192
193 } while (false);
194
195 return status;
196 }
197
sdxc_write_data_buf(SDXC_Type * base,const uint32_t * data,uint32_t num_of_words)198 static void sdxc_write_data_buf(SDXC_Type *base, const uint32_t *data, uint32_t num_of_words)
199 {
200 if (sdxc_is_data_buf_writable(base)) {
201 for (uint32_t i = 0; i < num_of_words; i++) {
202 sdxc_write_data(base, data[i]);
203 }
204 }
205 }
206
sdxc_write_via_data_buf_blocking(SDXC_Type * base,sdxc_data_t * data)207 static hpm_stat_t sdxc_write_via_data_buf_blocking(SDXC_Type *base, sdxc_data_t *data)
208 {
209 uint32_t interrupt_status = 0;
210 hpm_stat_t status = status_success;
211 do {
212 /* For multi-block transfer, the block size must be 4-byte aligned */
213 if ((data->block_cnt > 1) && (data->block_size % sizeof(uint32_t) != 0)) {
214 status = status_invalid_argument;
215 break;
216 }
217 if (data->block_size % sizeof(uint32_t) != 0U) {
218 data->block_size += sizeof(uint32_t) - (data->block_size % sizeof(uint32_t));
219 }
220
221 uint32_t words_per_block = data->block_size / sizeof(uint32_t);
222 uint32_t remaining_blocks = data->block_cnt;
223 const uint32_t *write_buf = data->tx_data;
224 while (remaining_blocks > 0) {
225 uint32_t status_flags = SDXC_INT_STAT_BUF_WR_READY_MASK | SDXC_STS_DATA_ERR;
226 /* Wait until write data is allowed or timeout event occurs */
227 do {
228 interrupt_status = sdxc_get_interrupt_status(base);
229 } while (!IS_HPM_BITMASK_SET(interrupt_status, status_flags));
230
231 if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_DATA_CRC_ERR_MASK)) {
232 /* Handle Data CRC error */
233 if (!data->enable_ignore_error) {
234 status = status_sdxc_data_crc_error;
235 break;
236 }
237 sdxc_clear_interrupt_status(base, SDXC_STS_DATA_ERR);
238 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_DATA_TOUT_ERR_MASK)) {
239 /* Handle Data timeout error */
240 status = status_sdxc_data_timeout_error;
241 break;
242 } else {
243 /* Receive data block by block */
244 sdxc_clear_interrupt_status(base, SDXC_INT_STAT_BUF_WR_READY_MASK);
245 sdxc_write_data_buf(base, write_buf, words_per_block);
246 write_buf += words_per_block;
247 remaining_blocks--;
248 }
249 }
250
251 sdxc_clear_interrupt_status(base, SDXC_INT_STAT_XFER_COMPLETE_MASK);
252
253 } while (false);
254
255 return status;
256 }
257
sdxc_is_bus_idle(SDXC_Type * base)258 static bool sdxc_is_bus_idle(SDXC_Type *base)
259 {
260 uint32_t busy_mask = SDXC_PSTATE_CMD_INHIBIT_MASK | SDXC_PSTATE_DAT_LINE_ACTIVE_MASK | SDXC_PSTATE_DAT_INHIBIT_MASK;
261
262 return IS_HPM_BITMASK_CLR(base->PSTATE, busy_mask);
263 }
264
sdxc_get_capabilities(SDXC_Type * base,sdxc_capabilities_t * capabilities)265 hpm_stat_t sdxc_get_capabilities(SDXC_Type *base, sdxc_capabilities_t *capabilities)
266 {
267 hpm_stat_t status = status_invalid_argument;
268 do {
269 HPM_BREAK_IF((base == NULL) || (capabilities == NULL));
270
271 capabilities->capabilities1.U = base->CAPABILITIES1;
272 capabilities->capabilities2.U = base->CAPABILITIES2;
273 capabilities->curr_capabilities1.U = base->CURR_CAPABILITIES1;
274 capabilities->curr_capabilities2.U = base->CURR_CAPABILITIES2;
275
276 status = status_success;
277 } while (false);
278
279 return status;
280 }
281
sdxc_send_command(SDXC_Type * base,sdxc_command_t * cmd)282 hpm_stat_t sdxc_send_command(SDXC_Type *base, sdxc_command_t *cmd)
283 {
284 sdxc_clear_interrupt_status(base, ~0U);
285
286 uint32_t cmd_xfer = base->CMD_XFER;
287 uint32_t flags = cmd->cmd_flags;
288
289 uint32_t wait_cnt = 1000000L;
290
291 while (!sdxc_is_bus_idle(base) && (wait_cnt > 0U)) {
292 wait_cnt--;
293 }
294
295 if (wait_cnt == 0) {
296 return status_timeout;
297 }
298
299 if (IS_HPM_BITMASK_CLR(base->PSTATE, SDXC_PSTATE_CMD_INHIBIT_MASK) && (cmd->cmd_type != sdxc_cmd_type_empty)) {
300 if ((cmd->resp_type == sdxc_dev_resp_r1) || (cmd->resp_type == sdxc_dev_resp_r5) ||
301 (cmd->resp_type == sdxc_dev_resp_r6) || (cmd->resp_type == sdxc_dev_resp_r7)) {
302 flags |= SDXC_CMD_RESP_LEN_48 | SDXC_CMD_XFER_CMD_CRC_CHK_ENABLE_MASK |
303 SDXC_CMD_XFER_CMD_IDX_CHK_ENABLE_MASK;
304 } else if ((cmd->resp_type == sdxc_dev_resp_r1b) || (cmd->resp_type == sdxc_dev_resp_r5b)) {
305 flags |= SDXC_CMD_RESP_LEN_48B | SDXC_CMD_XFER_CMD_CRC_CHK_ENABLE_MASK |
306 SDXC_CMD_XFER_CMD_IDX_CHK_ENABLE_MASK;
307 } else if (cmd->resp_type == sdxc_dev_resp_r2) {
308 flags |= SDXC_CMD_RESP_LEN_136 | SDXC_CMD_XFER_CMD_CRC_CHK_ENABLE_MASK;
309 } else if ((cmd->resp_type == sdxc_dev_resp_r3) || (cmd->resp_type == sdxc_dev_resp_r4)) {
310 flags |= SDXC_CMD_RESP_LEN_48;
311 } else {
312 /* do nothing */
313 }
314 if (IS_HPM_BITMASK_SET(flags, SDXC_CMD_XFER_RESP_ERR_CHK_ENABLE_MASK)) {
315 flags |= SDXC_CMD_XFER_RESP_INT_DISABLE_MASK;
316 }
317 }
318
319 switch (cmd->cmd_type) {
320 case sdxc_cmd_type_abort_cmd:
321 flags |= SDXC_CMD_TYPE_ABORT;
322 break;
323 case sdxc_cmd_type_suspend_cmd:
324 flags |= SDXC_CMD_TYPE_SUSPEND;
325 break;
326 case sdxc_cmd_tye_resume_cmd:
327 flags |= SDXC_CMD_TYPE_RESUME;
328 break;
329 default:
330 flags |= SDXC_CMD_TYPE_NORMAL;
331 break;
332 }
333
334 cmd_xfer &= ~(SDXC_CMD_XFER_CMD_INDEX_MASK | SDXC_CMD_XFER_CMD_TYPE_MASK | SDXC_CMD_XFER_CMD_CRC_CHK_ENABLE_MASK |
335 SDXC_CMD_XFER_CMD_IDX_CHK_ENABLE_MASK | SDXC_CMD_XFER_RESP_TYPE_SELECT_MASK |
336 SDXC_CMD_XFER_DATA_PRESENT_SEL_MASK | SDXC_CMD_XFER_AUTO_CMD_ENABLE_MASK |
337 SDXC_CMD_XFER_DATA_XFER_DIR_MASK | SDXC_CMD_XFER_DMA_ENABLE_MASK | SDXC_CMD_XFER_MULTI_BLK_SEL_MASK |
338 SDXC_CMD_XFER_BLOCK_COUNT_ENABLE_MASK);
339
340 cmd_xfer |= SDXC_CMD_XFER_CMD_INDEX_SET(cmd->cmd_index) | ((flags & (SDXC_CMD_XFER_CMD_TYPE_MASK |
341 SDXC_CMD_XFER_CMD_CRC_CHK_ENABLE_MASK |
342 SDXC_CMD_XFER_CMD_IDX_CHK_ENABLE_MASK |
343 SDXC_CMD_XFER_RESP_TYPE_SELECT_MASK |
344 SDXC_CMD_XFER_DATA_PRESENT_SEL_MASK |
345 SDXC_CMD_XFER_DMA_ENABLE_MASK |
346 SDXC_CMD_XFER_DATA_XFER_DIR_MASK |
347 SDXC_CMD_XFER_AUTO_CMD_ENABLE_MASK |
348 SDXC_CMD_XFER_MULTI_BLK_SEL_MASK |
349 SDXC_CMD_XFER_BLOCK_COUNT_ENABLE_MASK |
350 SDXC_CMD_XFER_RESP_TYPE_MASK |
351 SDXC_CMD_XFER_RESP_ERR_CHK_ENABLE_MASK)));
352
353 base->CMD_ARG = cmd->cmd_argument;
354 base->CMD_XFER = cmd_xfer;
355
356 return status_success;
357 }
358
359
sdxc_parse_interrupt_status(SDXC_Type * base)360 hpm_stat_t sdxc_parse_interrupt_status(SDXC_Type *base)
361 {
362 uint32_t interrupt_status = sdxc_get_interrupt_status(base);
363 hpm_stat_t status = status_success;
364 if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_STS_ERROR)) {
365 if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_CMD_TOUT_ERR_MASK)) {
366 status = status_sdxc_cmd_timeout_error;
367 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_CMD_CRC_ERR_MASK)) {
368 status = status_sdxc_cmd_crc_error;
369 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_CMD_END_BIT_ERR_MASK)) {
370 status = status_sdxc_cmd_end_bit_error;
371 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_CMD_IDX_ERR_MASK)) {
372 status = status_sdxc_cmd_index_error;
373 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_DATA_TOUT_ERR_MASK)) {
374 status = status_sdxc_data_timeout_error;
375 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_DATA_CRC_ERR_MASK)) {
376 status = status_sdxc_data_crc_error;
377 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_DATA_END_BIT_ERR_MASK)) {
378 status = status_sdxc_data_end_bit_error;
379 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_AUTO_CMD_ERR_MASK)) {
380 uint32_t auto_cmd_err_mask = base->AC_HOST_CTRL & 0xFFFFUL;
381 if (IS_HPM_BITMASK_SET(auto_cmd_err_mask, SDXC_AC_HOST_CTRL_AUTO_CMD12_NOT_EXEC_MASK)) {
382 status = status_sdxc_autocmd_cmd12_not_exec;
383 } else if (IS_HPM_BITMASK_SET(auto_cmd_err_mask, SDXC_AC_HOST_CTRL_AUTO_CMD_TOUT_ERR_MASK)) {
384 status = status_sdxc_autocmd_cmd_timeout_error;
385 } else if (IS_HPM_BITMASK_SET(auto_cmd_err_mask, SDXC_AC_HOST_CTRL_AUTO_CMD_CRC_ERR_MASK)) {
386 status = status_sdxc_autocmd_cmd_crc_error;
387 } else if (IS_HPM_BITMASK_SET(auto_cmd_err_mask, SDXC_AC_HOST_CTRL_AUTO_CMD_IDX_ERR_MASK)) {
388 status = status_sdxc_autocmd_cmd_index_error;
389 } else if (IS_HPM_BITMASK_SET(auto_cmd_err_mask, SDXC_AC_HOST_CTRL_AUTO_CMD_EBIT_ERR_MASK)) {
390 status = status_sdxc_autocmd_end_bit_error;
391 } else if (IS_HPM_BITMASK_SET(auto_cmd_err_mask, SDXC_AC_HOST_CTRL_AUTO_CMD_RESP_ERR_MASK)) {
392 status = status_sdxc_autocmd_cmd_response_error;
393 } else if (IS_HPM_BITMASK_SET(auto_cmd_err_mask, SDXC_AC_HOST_CTRL_CMD_NOT_ISSUED_AUTO_CMD12_MASK)) {
394 status = status_sdxc_autocmd_cmd_not_issued_auto_cmd12;
395 } else {
396 status = status_sdxc_auto_cmd_error;
397 }
398 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_ADMA_ERR_MASK)) {
399 status = status_sdxc_adma_error;
400 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_TUNING_ERR_MASK)) {
401 status = status_sdxc_tuning_error;
402 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_RESP_ERR_MASK)) {
403 status = status_sdxc_response_error;
404 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_BOOT_ACK_ERR_MASK)) {
405 status = status_sdxc_boot_ack_error;
406 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_CARD_REMOVAL_MASK)) {
407 status = status_sdxc_card_removed;
408 } else {
409 status = status_sdxc_error;
410 }
411 } else {
412 status = status_success;
413 }
414 return status;
415 }
416
sdxc_wait_cmd_done(SDXC_Type * base,sdxc_command_t * cmd,bool polling_cmd_done)417 hpm_stat_t sdxc_wait_cmd_done(SDXC_Type *base, sdxc_command_t *cmd, bool polling_cmd_done)
418 {
419 hpm_stat_t status = status_success;
420 uint32_t interrupt_status = 0U;
421
422 if (polling_cmd_done) {
423 while (!IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_CMD_COMPLETE_MASK)) {
424 interrupt_status = sdxc_get_interrupt_status(base);
425 status = sdxc_parse_interrupt_status(base);
426 HPM_BREAK_IF(status != status_success);
427 }
428 sdxc_clear_interrupt_status(base, SDXC_INT_STAT_CMD_COMPLETE_MASK);
429
430 if (status == status_success) {
431 status = sdxc_receive_cmd_response(base, cmd);
432 }
433 }
434
435 return status;
436 }
437
438
sdxc_transfer_data_blocking(SDXC_Type * base,sdxc_data_t * data,bool enable_dma)439 static hpm_stat_t sdxc_transfer_data_blocking(SDXC_Type *base, sdxc_data_t *data, bool enable_dma)
440 {
441 hpm_stat_t status = status_success;
442
443 uint32_t interrupt_status = 0;
444
445 if (enable_dma) {
446 uint32_t status_flags = SDXC_INT_STAT_XFER_COMPLETE_MASK | SDXC_STS_ERROR;
447 while (!IS_HPM_BITMASK_SET(interrupt_status, status_flags)) {
448 interrupt_status = sdxc_get_interrupt_status(base);
449 if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_DMA_INTERRUPT_MASK)) {
450 sdxc_clear_interrupt_status(base, SDXC_INT_STAT_DMA_INTERRUPT_MASK);
451 if (SDXC_PROT_CTRL_DMA_SEL_GET(base->PROT_CTRL) == (uint32_t) sdxc_dmasel_sdma) {
452 base->ADMA_SYS_ADDR += data->block_size;
453 }
454 }
455 }
456
457 if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_TUNING_ERR_MASK)) {
458 status = status_sdxc_transfer_data_failed;
459 } else if (IS_HPM_BITMASK_SET(interrupt_status, SDXC_STS_DATA_ERR | SDXC_INT_STAT_ADMA_ERR_MASK)) {
460 if ((!data->enable_ignore_error) ||
461 IS_HPM_BITMASK_SET(interrupt_status, SDXC_INT_STAT_DATA_TOUT_ERR_MASK)) {
462 status = status_sdxc_transfer_data_failed;
463 }
464 } else {
465 /* Do nothing */
466 }
467
468 if ((data->data_type == (uint8_t) sdxc_xfer_data_boot_continuous) && (status == status_success)) {
469 *(data->rx_data) = s_sdxc_boot_dummy;
470 }
471 sdxc_clear_interrupt_status(base, SDXC_INT_STAT_DMA_INTERRUPT_MASK | SDXC_INT_STAT_TUNING_ERR_MASK);
472
473 } else {
474 if (data->rx_data != NULL) {
475 status = sdxc_read_via_data_buf_blocking(base, data);
476 if (status != status_success) {
477 return status;
478 }
479 } else {
480 status = sdxc_write_via_data_buf_blocking(base, data);
481 if (status != status_success) {
482 return status;
483 }
484 }
485 }
486
487 return status;
488 }
489
490
sdxc_init(SDXC_Type * base,const sdxc_config_t * config)491 void sdxc_init(SDXC_Type *base, const sdxc_config_t *config)
492 {
493 sdxc_reset(base, sdxc_reset_all, 0x10000U);
494
495 uint32_t prot_ctrl = base->PROT_CTRL;
496
497 prot_ctrl &= ~(SDXC_PROT_CTRL_DMA_SEL_MASK | SDXC_PROT_CTRL_SD_BUS_VOL_VDD1_MASK);
498
499 prot_ctrl |= SDXC_PROT_CTRL_SD_BUS_PWR_VDD1_MASK;
500
501 sdxc_set_data_timeout(base, config->data_timeout, NULL);
502
503 base->PROT_CTRL = prot_ctrl;
504
505 /* Enable SD internal clock and the output clock */
506 base->SYS_CTRL |= SDXC_SYS_CTRL_INTERNAL_CLK_EN_MASK;
507 while (!IS_HPM_BITMASK_SET(base->SYS_CTRL, SDXC_SYS_CTRL_INTERNAL_CLK_STABLE_MASK)) {
508
509 }
510 base->SYS_CTRL |= SDXC_SYS_CTRL_PLL_ENABLE_MASK;
511 while (!IS_HPM_BITMASK_SET(base->SYS_CTRL, SDXC_SYS_CTRL_INTERNAL_CLK_STABLE_MASK)) {
512
513 }
514
515 base->SYS_CTRL |= SDXC_SYS_CTRL_SD_CLK_EN_MASK;
516
517 base->INT_STAT_EN = SDXC_STS_ALL_FLAGS;
518 base->INT_SIGNAL_EN = 0UL;
519 base->INT_STAT = SDXC_STS_ALL_FLAGS;
520
521 /* Set Host to version 4, enable 26-bit ADMA2 length mode */
522 base->AC_HOST_CTRL &= ~(SDXC_AC_HOST_CTRL_UHS_MODE_SEL_MASK | SDXC_AC_HOST_CTRL_SAMPLE_CLK_SEL_MASK);
523 base->AC_HOST_CTRL |= SDXC_AC_HOST_CTRL_HOST_VER4_ENABLE_MASK | SDXC_AC_HOST_CTRL_ADMA2_LEN_MODE_MASK;
524 }
525
sdxc_set_data_timeout(SDXC_Type * base,uint32_t timeout_in_ms,uint32_t * actual_timeout_ms)526 void sdxc_set_data_timeout(SDXC_Type *base, uint32_t timeout_in_ms, uint32_t *actual_timeout_ms)
527 {
528 static uint32_t pre_calc_timeout_list[15] = {8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768,
529 65536, 131072};
530
531 uint32_t field_value = 0;
532 for (uint32_t i = 0; i < ARRAY_SIZE(pre_calc_timeout_list); i++) {
533 if (timeout_in_ms < pre_calc_timeout_list[i]) {
534 field_value = i;
535 break;
536 }
537 }
538 uint32_t max_index = ARRAY_SIZE(pre_calc_timeout_list) - 1U;
539 if (timeout_in_ms > pre_calc_timeout_list[max_index]) {
540 field_value = max_index;
541 }
542
543 sdxc_enable_interrupt_status(base, SDXC_INT_STAT_EN_DATA_TOUT_ERR_STAT_EN_MASK, false);
544 base->SYS_CTRL = (base->SYS_CTRL & ~SDXC_SYS_CTRL_TOUT_CNT_MASK) | SDXC_SYS_CTRL_TOUT_CNT_SET(field_value);
545 sdxc_enable_interrupt_status(base, SDXC_INT_STAT_EN_DATA_TOUT_ERR_STAT_EN_MASK, true);
546
547 if (actual_timeout_ms != NULL) {
548 *actual_timeout_ms = pre_calc_timeout_list[field_value];
549 }
550 }
551
sdxc_set_mmc_boot_config(SDXC_Type * base,const sdxc_boot_config_t * config)552 void sdxc_set_mmc_boot_config(SDXC_Type *base, const sdxc_boot_config_t *config)
553 {
554 uint32_t emmc_boot_reg = base->EMMC_BOOT_CTRL;
555
556 emmc_boot_reg &= ~(SDXC_EMMC_BOOT_CTRL_BOOT_TOUT_CNT_MASK | SDXC_EMMC_BOOT_CTRL_MAN_BOOT_EN_MASK);
557
558 if (config->enable_boot_ack) {
559 emmc_boot_reg |= SDXC_EMMC_BOOT_CTRL_BOOT_ACK_ENABLE_MASK;
560 }
561
562 /* TODO: Auto stop at block gap, how to handle this? */
563
564 uint32_t block_attr_reg = base->BLK_ATTR & ~(SDXC_BLK_ATTR_XFER_BLOCK_SIZE_MASK | SDXC_BLK_ATTR_BLOCK_CNT_MASK);
565
566 block_attr_reg |=
567 SDXC_BLK_ATTR_XFER_BLOCK_SIZE_SET(config->block_size) | SDXC_BLK_ATTR_BLOCK_CNT_SET(config->block_cnt);
568
569 base->BLK_ATTR = block_attr_reg;
570 base->EMMC_BOOT_CTRL = emmc_boot_reg;
571 }
572
sdxc_set_data_config(SDXC_Type * base,sdxc_xfer_direction_t data_dir,uint32_t block_cnt,uint32_t block_size)573 void sdxc_set_data_config(SDXC_Type *base, sdxc_xfer_direction_t data_dir, uint32_t block_cnt, uint32_t block_size)
574 {
575 uint32_t block_attr_reg = base->BLK_ATTR & ~(SDXC_BLK_ATTR_XFER_BLOCK_SIZE_MASK | SDXC_BLK_ATTR_BLOCK_CNT_MASK);
576
577 block_attr_reg |= SDXC_BLK_ATTR_BLOCK_CNT_SET(block_cnt) | SDXC_BLK_ATTR_XFER_BLOCK_SIZE_SET(block_size);
578
579 base->BLK_ATTR = block_attr_reg;
580
581 if (data_dir == sdxc_xfer_dir_write) {
582 base->CMD_XFER &= ~SDXC_CMD_XFER_DATA_XFER_DIR_MASK;
583 } else {
584 base->CMD_XFER |= SDXC_CMD_XFER_DATA_XFER_DIR_MASK;
585 }
586 }
587
sdxc_set_dma_config(SDXC_Type * base,sdxc_adma_config_t * dma_cfg,const uint32_t * data_addr,bool enable_auto_cmd23)588 hpm_stat_t sdxc_set_dma_config(SDXC_Type *base,
589 sdxc_adma_config_t *dma_cfg,
590 const uint32_t *data_addr,
591 bool enable_auto_cmd23)
592 {
593 (void) enable_auto_cmd23;
594 if (dma_cfg->dma_type == sdxc_dmasel_sdma) {
595
596 if (((uint32_t) data_addr % SDXC_SYS_DMA_ALIGN_LEN) != 0U) {
597 return status_sdxc_dma_addr_unaligned;
598 }
599
600 base->ADMA_SYS_ADDR = (uint32_t) data_addr;
601 } else {
602 base->ADMA_SYS_ADDR = (uint32_t) dma_cfg->adma_table;
603 }
604
605 /* Set DMA mode */
606 uint32_t sys_ctl = base->SYS_CTRL;
607 base->PROT_CTRL = (base->PROT_CTRL & ~SDXC_PROT_CTRL_DMA_SEL_MASK) | SDXC_PROT_CTRL_DMA_SEL_SET(dma_cfg->dma_type);
608 base->SYS_CTRL = sys_ctl;
609 return status_success;
610 }
611
612
sdxc_set_adma2_desc(uint32_t * adma_tbl,uint32_t adma_table_words,const uint32_t * data_buf,uint32_t data_bytes,uint32_t flags)613 hpm_stat_t sdxc_set_adma2_desc(uint32_t *adma_tbl,
614 uint32_t adma_table_words,
615 const uint32_t *data_buf,
616 uint32_t data_bytes,
617 uint32_t flags)
618 {
619 hpm_stat_t status = status_invalid_argument;
620 do {
621 if ((adma_tbl == NULL) || (data_buf == NULL)) {
622 break;
623 }
624 if ((uint32_t) data_buf % 4U != 0U) {
625 status = status_sdxc_dma_addr_unaligned;
626 break;
627 }
628
629 uint32_t start_idx = 0;
630 uint32_t min_entries;
631 uint32_t max_entries = adma_table_words * sizeof(uint32_t) / sizeof(sdxc_adma2_descriptor_t);
632 sdxc_adma2_descriptor_t *adma2_desc = (sdxc_adma2_descriptor_t *) adma_tbl;
633
634 /* Ensure that the data_bytes is 4-byte aligned. */
635 data_bytes += (data_bytes % sizeof(uint32_t));
636
637 min_entries = data_bytes / SDXC_DMA_MAX_XFER_LEN_26BIT;
638 if (data_bytes % SDXC_DMA_MAX_XFER_LEN_26BIT != 0U) {
639 min_entries += 1U;
640 }
641
642 uint32_t i;
643 if (flags == (uint32_t) sdxc_adma_desc_multi_flag) {
644
645 for (i = 0; i < max_entries; i++) {
646 if (adma2_desc[i].valid == 0U) {
647 break;
648 }
649 }
650
651 start_idx = i;
652
653 /* add one entry as dummy entry */
654 min_entries += 1U;
655 }
656
657 if ((min_entries + start_idx) > max_entries) {
658 return status_invalid_argument;
659 }
660
661 uint32_t dma_buf_len = 0U;
662 const uint32_t *data = data_buf;
663 for (i = start_idx; i < (min_entries + start_idx); i++) {
664 if (data_bytes > SDXC_DMA_MAX_XFER_LEN_26BIT) {
665 dma_buf_len = SDXC_DMA_MAX_XFER_LEN_26BIT;
666 } else {
667 dma_buf_len = (data_bytes == 0U) ? sizeof(uint32_t) : data_bytes;
668 }
669
670 /* Format each adma2 descriptor entry */
671 adma2_desc[i].addr = (data_bytes == 0U) ? &s_sdxc_boot_dummy : data;
672 adma2_desc[i].len_attr = 0U;
673 adma2_desc[i].len_lower = dma_buf_len & 0xFFFFU;
674 adma2_desc[i].len_upper = dma_buf_len >> 16;
675 adma2_desc[i].len_attr |= SDXC_ADMA2_DESC_VALID_FLAG;
676 if (data_bytes != 0U) {
677 adma2_desc[i].act = SDXC_ADMA2_DESC_TYPE_TRANS;
678 }
679
680 data = (uint32_t *) ((uint32_t) data + dma_buf_len);
681 if (data_bytes != 0U) {
682 data_bytes -= dma_buf_len;
683 }
684 }
685
686 if (flags == (uint32_t) sdxc_adma_desc_multi_flag) {
687 adma2_desc[i + 1U].len_attr |= SDXC_ADMA2_DESC_TYPE_TRANS;
688 } else {
689 adma2_desc[i - 1U].len_attr |= SDXC_ADMA2_DESC_END_FLAG;
690 }
691
692 status = status_success;
693 } while (false);
694
695 return status;
696 }
697
sdxc_set_adma_table_config(SDXC_Type * base,sdxc_adma_config_t * dma_cfg,sdxc_data_t * data_cfg,uint32_t flags)698 hpm_stat_t sdxc_set_adma_table_config(SDXC_Type *base,
699 sdxc_adma_config_t *dma_cfg,
700 sdxc_data_t *data_cfg,
701 uint32_t flags)
702 {
703 hpm_stat_t status = status_fail;
704
705 uint32_t boot_dummy_offset = (data_cfg->data_type == (uint8_t) sdxc_xfer_data_boot_continuous) ? sizeof(uint32_t)
706 : 0;
707 const uint32_t *data;
708
709 if (data_cfg->rx_data == NULL) {
710 data = (const uint32_t *) data_cfg->tx_data;
711 } else {
712 data = (const uint32_t *) data_cfg->rx_data;
713 }
714 if (boot_dummy_offset > 0) {
715 data++;
716 }
717
718 uint32_t block_size = data_cfg->block_size * data_cfg->block_cnt - boot_dummy_offset;
719
720 if (dma_cfg->dma_type == sdxc_dmasel_sdma) {
721 status = status_success;
722 } else if (dma_cfg->dma_type == sdxc_dmasel_adma2) {
723 status = sdxc_set_adma2_desc(dma_cfg->adma_table, dma_cfg->adma_table_words, data, block_size, flags);
724
725 } else if (dma_cfg->dma_type == sdxc_dmasel_adma2_or_3) {
726 /* TODO: To be implemented */
727
728 } else {
729 status = status_invalid_argument;
730 }
731
732 if ((status == status_success) && (data_cfg->data_type != (uint8_t) sdxc_xfer_data_boot_continuous)) {
733 status = sdxc_set_dma_config(base, dma_cfg, data, data_cfg->enable_auto_cmd23);
734 }
735
736 return status;
737 }
738
sdxc_reset(SDXC_Type * base,sdxc_sw_reset_type_t reset_type,uint32_t timeout)739 bool sdxc_reset(SDXC_Type *base, sdxc_sw_reset_type_t reset_type, uint32_t timeout)
740 {
741 uint32_t reset_mask = 0U;
742
743 switch (reset_type) {
744 case sdxc_reset_all:
745 reset_mask = SDXC_SYS_CTRL_SW_RST_ALL_MASK;
746 break;
747 case sdxc_reset_cmd_line:
748 reset_mask = SDXC_SYS_CTRL_SW_RST_CMD_MASK;
749 break;
750 case sdxc_reset_data_line:
751 reset_mask = SDXC_SYS_CTRL_SW_RST_DAT_MASK;
752 break;
753 default:
754 /* Do nothing */
755 break;
756 }
757 base->SYS_CTRL |= reset_mask;
758 while (IS_HPM_BITMASK_SET(base->SYS_CTRL, reset_mask)) {
759 if (timeout == 0U) {
760 return false;
761 }
762 timeout--;
763 }
764
765 return true;
766 }
767
sdxc_select_voltage(SDXC_Type * base,sdxc_bus_voltage_option_t option)768 void sdxc_select_voltage(SDXC_Type *base, sdxc_bus_voltage_option_t option)
769 {
770 uint32_t option_u32 = (uint32_t) option;
771
772 base->PROT_CTRL =
773 (base->PROT_CTRL & ~SDXC_PROT_CTRL_SD_BUS_VOL_VDD1_MASK) | SDXC_PROT_CTRL_SD_BUS_VOL_VDD1_SET(option_u32);
774
775 if ((option == sdxc_bus_voltage_sd_1v8) || (option == sdxc_bus_voltage_emmc_1v8)) {
776 base->AC_HOST_CTRL |= SDXC_AC_HOST_CTRL_SIGNALING_EN_MASK;
777 } else {
778 base->AC_HOST_CTRL &= ~SDXC_AC_HOST_CTRL_SIGNALING_EN_MASK;
779 }
780 }
781
sdxc_enable_wakeup_event(SDXC_Type * base,sdxc_wakeup_event_t evt,bool enable)782 void sdxc_enable_wakeup_event(SDXC_Type *base, sdxc_wakeup_event_t evt, bool enable)
783 {
784 uint32_t evt_u32 = (uint32_t) evt;
785
786 if (enable) {
787 base->PROT_CTRL |= evt_u32;
788 } else {
789 base->PROT_CTRL &= ~evt_u32;
790 }
791 }
792
sdxc_set_data_bus_width(SDXC_Type * base,sdxc_bus_width_t width)793 void sdxc_set_data_bus_width(SDXC_Type *base, sdxc_bus_width_t width)
794 {
795 uint32_t host_ctrl = base->PROT_CTRL & ~(SDXC_PROT_CTRL_EXT_DAT_XFER_MASK | SDXC_PROT_CTRL_DAT_XFER_WIDTH_MASK);
796
797 if (width == sdxc_bus_width_1bit) {
798 host_ctrl |= SDXC_PROT_CTRL_DAT_XFER_WIDTH_SET(0U);
799 } else if (width == sdxc_bus_width_4bit) {
800 host_ctrl |= SDXC_PROT_CTRL_DAT_XFER_WIDTH_SET(1U);
801 } else if (width == sdxc_bus_width_8bit) {
802 host_ctrl |= SDXC_PROT_CTRL_EXT_DAT_XFER_SET(1U);
803 } else {
804 /* Do nothing */
805 }
806 base->PROT_CTRL = host_ctrl;
807
808 sdxc_enable_sd_clock(base, true);
809 }
810
sdxc_get_data_bus_width(SDXC_Type * base)811 uint32_t sdxc_get_data_bus_width(SDXC_Type *base)
812 {
813 uint32_t bus_width;
814 uint32_t bus_width_mask = base->PROT_CTRL & (SDXC_PROT_CTRL_EXT_DAT_XFER_MASK | SDXC_PROT_CTRL_DAT_XFER_WIDTH_MASK);
815
816 if (IS_HPM_BITMASK_SET(bus_width_mask, SDXC_PROT_CTRL_EXT_DAT_XFER_MASK)) {
817 bus_width = 8;
818 } else if (IS_HPM_BITMASK_SET(bus_width_mask, SDXC_PROT_CTRL_DAT_XFER_WIDTH_MASK)) {
819 bus_width = 4;
820 } else {
821 bus_width = 1;
822 }
823 return bus_width;
824 }
825
sdxc_set_speed_mode(SDXC_Type * base,sdxc_speed_mode_t mode)826 void sdxc_set_speed_mode(SDXC_Type *base, sdxc_speed_mode_t mode)
827 {
828 uint32_t mode_u32 = (uint32_t) mode;
829
830 base->AC_HOST_CTRL =
831 (base->AC_HOST_CTRL & ~SDXC_AC_HOST_CTRL_UHS_MODE_SEL_MASK) | SDXC_AC_HOST_CTRL_UHS_MODE_SEL_SET(mode_u32);
832 if ((mode_u32 & 0xFU) > sdxc_sd_speed_sdr12) {
833 base->PROT_CTRL |= SDXC_PROT_CTRL_HIGH_SPEED_EN_MASK;
834 } else {
835 base->PROT_CTRL &= ~SDXC_PROT_CTRL_HIGH_SPEED_EN_MASK;
836 }
837 }
838
sdxc_transfer_nonblocking(SDXC_Type * base,sdxc_adma_config_t * dma_config,sdxc_xfer_t * xfer)839 hpm_stat_t sdxc_transfer_nonblocking(SDXC_Type *base, sdxc_adma_config_t *dma_config, sdxc_xfer_t *xfer)
840 {
841 hpm_stat_t status = status_invalid_argument;
842 sdxc_command_t *cmd = xfer->command;
843 sdxc_data_t *data = xfer->data;
844 uint32_t xfer_flags = (uint32_t) sdxc_cmd_only;
845
846 uint32_t block_size = 0U;
847 uint32_t block_cnt = 0U;
848 do {
849 if (IS_HPM_BITMASK_SET(base->INT_STAT, SDXC_INT_STAT_RE_TUNE_EVENT_MASK)) {
850 base->INT_STAT = SDXC_INT_STAT_RE_TUNE_EVENT_MASK;
851 status = status_sdxc_retuning_request;
852 break;
853 }
854
855 uint32_t new_xfer_flags = 0;
856 if (data != NULL) {
857
858 if (dma_config != NULL) {
859 uint32_t flags = IS_HPM_BITMASK_SET(data->data_type, sdxc_xfer_data_boot) ? sdxc_adma_desc_multi_flag
860 : sdxc_adma_desc_single_flag;
861 status = sdxc_set_adma_table_config(base, dma_config, data, flags);
862 if (status != status_success) {
863 break;
864 }
865 }
866 block_size = data->block_size;
867 block_cnt = data->block_cnt;
868 xfer_flags = data->enable_auto_cmd12 ? (uint32_t) sdxc_data_with_auto_cmd12 : 0;
869 xfer_flags |= (data->enable_auto_cmd23) ? (uint32_t) sdxc_data_with_auto_cmd23 : 0;
870 xfer_flags |= (data->tx_data != NULL) ? (uint32_t) sdxc_cmd_and_tx_data : 0;
871 xfer_flags |= (data->rx_data != NULL) ? (uint32_t) sdxc_cmd_and_rx_data : 0;
872 xfer_flags |= (data->data_type == sdxc_xfer_data_boot) ? (uint32_t) sdxc_boot_data : 0;
873 xfer_flags |= (data->data_type == sdxc_xfer_data_boot_continuous) ? (uint32_t) sdxc_boot_data_continuous
874 : 0;
875 cmd->cmd_flags |= SDXC_CMD_XFER_DATA_PRESENT_SEL_MASK;
876
877 if (dma_config->dma_type != sdxc_dmasel_nodma) {
878 cmd->cmd_flags |= SDXC_CMD_XFER_DMA_ENABLE_MASK;
879 }
880
881 status = sdxc_set_transfer_config(base, xfer_flags, block_size, block_cnt, &new_xfer_flags);
882 if (status != status_success) {
883 break;
884 }
885 }
886
887 cmd->cmd_flags |= new_xfer_flags;
888 status = sdxc_send_command(base, cmd);
889 } while (false);
890
891 return status;
892 }
893
sdxc_transfer_blocking(SDXC_Type * base,sdxc_adma_config_t * dma_config,sdxc_xfer_t * xfer)894 hpm_stat_t sdxc_transfer_blocking(SDXC_Type *base, sdxc_adma_config_t *dma_config, sdxc_xfer_t *xfer)
895 {
896 hpm_stat_t status = status_invalid_argument;
897 sdxc_command_t *cmd = xfer->command;
898 sdxc_data_t *data = xfer->data;
899 bool enable_dma = true;
900 do {
901 status = sdxc_transfer_nonblocking(base, dma_config, xfer);
902 HPM_BREAK_IF(status != status_success);
903
904 bool polling_cmd_done = (data == NULL) || (data->data_type == sdxc_xfer_data_normal);
905 status = sdxc_wait_cmd_done(base, cmd, polling_cmd_done);
906 if (status != status_success) {
907 status = status_sdxc_send_cmd_failed;
908 break;
909 }
910 if (data != NULL) {
911 status = sdxc_transfer_data_blocking(base, data, enable_dma);
912 }
913 } while (false);
914
915 return status;
916 }
917
sdxc_error_recovery(SDXC_Type * base)918 hpm_stat_t sdxc_error_recovery(SDXC_Type *base)
919 {
920 hpm_stat_t status = status_success;
921 /* D8, D3-D0 of ERROR_INT_STAT */
922 uint32_t cmdline_err_mask =
923 SDXC_INT_STAT_AUTO_CMD_ERR_MASK | SDXC_INT_STAT_CMD_TOUT_ERR_MASK | SDXC_INT_STAT_CMD_CRC_ERR_MASK |
924 SDXC_INT_STAT_CMD_END_BIT_ERR_MASK | SDXC_INT_STAT_CMD_IDX_ERR_MASK;
925
926 if ((base->INT_STAT & cmdline_err_mask) != 0U) {
927 status = status_sdxc_send_cmd_failed;
928 /* Only Auto Command error */
929 if ((base->INT_STAT & cmdline_err_mask) == SDXC_INT_STAT_AUTO_CMD_ERR_MASK) {
930 /* Will reset command line */
931 } else {
932 if (((base->CMD_XFER & SDXC_CMD_XFER_RESP_INT_DISABLE_MASK) != 0U) ||
933 ((base->AC_HOST_CTRL & SDXC_AC_HOST_CTRL_EXEC_TUNING_MASK))) {
934 /* Will reset command line */
935 } else {
936 /* Wait until command completes */
937 uint32_t timeout = 1000000UL;
938 while (!IS_HPM_BITMASK_SET(base->CMD_XFER, SDXC_INT_STAT_CMD_COMPLETE_MASK)) {
939 --timeout;
940 if (timeout < 1U) {
941 status = status_timeout;
942 break;
943 }
944 }
945 }
946 }
947
948 sdxc_reset(base, sdxc_reset_cmd_line, 0xFFFFFFU);
949 }
950
951 /* Check D9, D6-D4 in ERR_INT_STAT */
952 uint32_t dataline_err_mask =
953 SDXC_INT_STAT_ADMA_ERR_MASK | SDXC_INT_STAT_DATA_TOUT_ERR_MASK | SDXC_INT_STAT_DATA_CRC_ERR_MASK |
954 SDXC_INT_STAT_DATA_END_BIT_ERR_MASK;
955
956 if ((base->INT_STAT & dataline_err_mask) != 0U) {
957 status = status_sdxc_transfer_data_failed;
958 sdxc_reset(base, sdxc_reset_data_line, 0xFFFFFFU);
959 }
960 sdxc_clear_interrupt_status(base, ~0UL);
961
962 if (IS_HPM_BITMASK_SET(base->PSTATE, SDXC_PSTATE_CMD_INHIBIT_MASK)) {
963 sdxc_reset(base, sdxc_reset_cmd_line, 10000U);
964 }
965 if (IS_HPM_BITMASK_SET(base->PSTATE, SDXC_PSTATE_DAT_INHIBIT_MASK)) {
966 sdxc_reset(base, sdxc_reset_data_line, 10000U);
967 }
968
969 return status;
970 }
971
sdxc_tuning_error_recovery(SDXC_Type * base)972 hpm_stat_t sdxc_tuning_error_recovery(SDXC_Type *base)
973 {
974 sdxc_reset_tuning_engine(base);
975 sdxc_reset(base, sdxc_reset_data_line, 0xFFFFFFUL);
976 sdxc_reset(base, sdxc_reset_cmd_line, 0xFFFFFFUL);
977
978 return status_success;
979 }
980
sdxc_perform_tuning_flow_sequence(SDXC_Type * base,uint8_t tuning_cmd)981 hpm_stat_t sdxc_perform_tuning_flow_sequence(SDXC_Type *base, uint8_t tuning_cmd)
982 {
983 hpm_stat_t status = status_success;
984
985 /* Turn off Sampling clock */
986 sdxc_enable_sd_clock(base, false);
987 sdxc_execute_tuning(base);
988 uint32_t block_size = SDXC_PROT_CTRL_EXT_DAT_XFER_GET(base->PROT_CTRL) ? 128U : 64U;
989 sdxc_command_t cmd;
990 (void) memset(&cmd, 0, sizeof(cmd));
991 cmd.cmd_index = tuning_cmd;
992 cmd.cmd_argument = 0;
993 cmd.cmd_flags = SDXC_CMD_XFER_DATA_PRESENT_SEL_MASK | SDXC_CMD_XFER_DATA_XFER_DIR_MASK;
994 cmd.resp_type = sdxc_dev_resp_r1;
995 sdxc_enable_sd_clock(base, true);
996 do {
997 base->BLK_ATTR = block_size;
998 base->SDMASA = 1;
999 status = sdxc_send_command(base, &cmd);
1000 while (!IS_HPM_BITMASK_SET(base->INT_STAT, SDXC_INT_STAT_BUF_RD_READY_MASK)) {
1001 }
1002 sdxc_clear_interrupt_status(base, SDXC_INT_STAT_BUF_RD_READY_MASK);
1003 } while (IS_HPM_BITMASK_SET(base->AC_HOST_CTRL, SDXC_AC_HOST_CTRL_EXEC_TUNING_MASK));
1004
1005 if (!IS_HPM_BITMASK_SET(base->AC_HOST_CTRL, SDXC_AC_HOST_CTRL_SAMPLE_CLK_SEL_MASK)) {
1006 sdxc_tuning_error_recovery(base);
1007 status = status_sdxc_tuning_failed;
1008 }
1009
1010 return status;
1011 }
1012
sdxc_perform_software_tuning(SDXC_Type * base,uint8_t tuning_cmd)1013 hpm_stat_t sdxc_perform_software_tuning(SDXC_Type *base, uint8_t tuning_cmd)
1014 {
1015 hpm_stat_t status;
1016
1017 sdxc_tuning_error_recovery(base);
1018
1019 /* Turn off Sampling clock */
1020 sdxc_enable_sd_clock(base, false);
1021 sdxc_reset_tuning_engine(base);
1022 uint32_t block_size = SDXC_PROT_CTRL_EXT_DAT_XFER_GET(base->PROT_CTRL) ? 128U : 64U;
1023 sdxc_command_t cmd;
1024 (void) memset(&cmd, 0, sizeof(cmd));
1025 cmd.cmd_index = tuning_cmd;
1026 cmd.cmd_argument = 0;
1027 cmd.cmd_flags = SDXC_CMD_XFER_DATA_PRESENT_SEL_MASK | SDXC_CMD_XFER_DATA_XFER_DIR_MASK;
1028 cmd.resp_type = sdxc_dev_resp_r1;
1029 base->BLK_ATTR = block_size;
1030 base->SDMASA = 0;
1031 uint32_t tuning_cclk_sel = 0;
1032 sdxc_enable_software_tuning(base, true);
1033 sdxc_set_center_phase_code(base, 0);
1034 sdxc_enable_sd_clock(base, true);
1035
1036 bool center_phase_codes_valid[255];
1037 (void) memset(¢er_phase_codes_valid, 0, sizeof(center_phase_codes_valid));
1038
1039 do {
1040
1041 sdxc_send_command(base, &cmd);
1042
1043 uint32_t timeout_cnt = 0xFFFFFUL;
1044 while (!IS_HPM_BITMASK_SET(base->INT_STAT,
1045 SDXC_INT_STAT_BUF_RD_READY_MASK | SDXC_INT_STAT_ERR_INTERRUPT_MASK)) {
1046 timeout_cnt--;
1047 if (timeout_cnt < 1U) {
1048 break;
1049 }
1050 }
1051
1052 if (IS_HPM_BITMASK_SET(base->INT_STAT, SDXC_INT_STAT_BUF_RD_READY_MASK)) {
1053 center_phase_codes_valid[tuning_cclk_sel] = true;
1054 }
1055 sdxc_clear_interrupt_status(base, ~0UL);
1056
1057 sdxc_reset(base, sdxc_reset_cmd_line, 0xFFFFFFU);
1058 sdxc_reset(base, sdxc_reset_data_line, 0xFFFFFFU);
1059
1060 tuning_cclk_sel++;
1061
1062 sdxc_enable_sd_clock(base, false);
1063 sdxc_set_center_phase_code(base, tuning_cclk_sel);
1064 sdxc_enable_sd_clock(base, true);
1065
1066 } while (tuning_cclk_sel < SDXC_AUTO_TUNING_STAT_CENTER_PH_CODE_GET(SDXC_AUTO_TUNING_STAT_CENTER_PH_CODE_MASK));
1067
1068 int32_t first_window_idx = -1;
1069 int32_t last_window_idx = -1;
1070 for (int32_t i = 0; i < (int32_t) ARRAY_SIZE(center_phase_codes_valid); i++) {
1071 if (center_phase_codes_valid[i]) {
1072 first_window_idx = i;
1073 break;
1074 }
1075 }
1076
1077 for (int32_t i = ARRAY_SIZE(center_phase_codes_valid) - 1; i >= 0; i--) {
1078 if (center_phase_codes_valid[i]) {
1079 last_window_idx = i;
1080 break;
1081 }
1082 }
1083
1084 if ((first_window_idx >= 0) && (last_window_idx >= 0)) {
1085
1086 uint32_t center_window = (first_window_idx + last_window_idx) / 2;
1087 sdxc_set_center_phase_code(base, center_window);
1088
1089 status = status_success;
1090 } else {
1091 status = status_sdxc_tuning_failed;
1092 }
1093
1094 return status;
1095 }
1096
sdxc_perform_auto_tuning(SDXC_Type * base,uint8_t tuning_cmd)1097 hpm_stat_t sdxc_perform_auto_tuning(SDXC_Type *base, uint8_t tuning_cmd)
1098 {
1099 bool need_inverse = sdxc_is_inverse_clock_enabled(base);
1100 sdxc_enable_inverse_clock(base, false);
1101 sdxc_enable_sd_clock(base, false);
1102 sdxc_enable_auto_tuning(base, true);
1103 sdxc_enable_inverse_clock(base, need_inverse);
1104 sdxc_enable_sd_clock(base, true);
1105
1106 return sdxc_perform_tuning_flow_sequence(base, tuning_cmd);
1107 }
1108