1 // SPDX-License-Identifier: BSD-3-Clause-Clear
2 /*
3 * Copyright (c) 2018-2021 The Linux Foundation. All rights reserved.
4 * Copyright (c) 2021-2025 Qualcomm Innovation Center, Inc. All rights reserved.
5 */
6 #include <linux/dma-mapping.h>
7 #include "hal_tx.h"
8 #include "hal_rx.h"
9 #include "debug.h"
10 #include "hal_desc.h"
11 #include "hif.h"
12
13 static const struct hal_srng_config hw_srng_config_template[] = {
14 /* TODO: max_rings can populated by querying HW capabilities */
15 [HAL_REO_DST] = {
16 .start_ring_id = HAL_SRNG_RING_ID_REO2SW1,
17 .max_rings = 8,
18 .entry_size = sizeof(struct hal_reo_dest_ring) >> 2,
19 .mac_type = ATH12K_HAL_SRNG_UMAC,
20 .ring_dir = HAL_SRNG_DIR_DST,
21 .max_size = HAL_REO_REO2SW1_RING_BASE_MSB_RING_SIZE,
22 },
23 [HAL_REO_EXCEPTION] = {
24 /* Designating REO2SW0 ring as exception ring.
25 * Any of theREO2SW rings can be used as exception ring.
26 */
27 .start_ring_id = HAL_SRNG_RING_ID_REO2SW0,
28 .max_rings = 1,
29 .entry_size = sizeof(struct hal_reo_dest_ring) >> 2,
30 .mac_type = ATH12K_HAL_SRNG_UMAC,
31 .ring_dir = HAL_SRNG_DIR_DST,
32 .max_size = HAL_REO_REO2SW0_RING_BASE_MSB_RING_SIZE,
33 },
34 [HAL_REO_REINJECT] = {
35 .start_ring_id = HAL_SRNG_RING_ID_SW2REO,
36 .max_rings = 4,
37 .entry_size = sizeof(struct hal_reo_entrance_ring) >> 2,
38 .mac_type = ATH12K_HAL_SRNG_UMAC,
39 .ring_dir = HAL_SRNG_DIR_SRC,
40 .max_size = HAL_REO_SW2REO_RING_BASE_MSB_RING_SIZE,
41 },
42 [HAL_REO_CMD] = {
43 .start_ring_id = HAL_SRNG_RING_ID_REO_CMD,
44 .max_rings = 1,
45 .entry_size = (sizeof(struct hal_tlv_64_hdr) +
46 sizeof(struct hal_reo_get_queue_stats)) >> 2,
47 .mac_type = ATH12K_HAL_SRNG_UMAC,
48 .ring_dir = HAL_SRNG_DIR_SRC,
49 .max_size = HAL_REO_CMD_RING_BASE_MSB_RING_SIZE,
50 },
51 [HAL_REO_STATUS] = {
52 .start_ring_id = HAL_SRNG_RING_ID_REO_STATUS,
53 .max_rings = 1,
54 .entry_size = (sizeof(struct hal_tlv_64_hdr) +
55 sizeof(struct hal_reo_get_queue_stats_status)) >> 2,
56 .mac_type = ATH12K_HAL_SRNG_UMAC,
57 .ring_dir = HAL_SRNG_DIR_DST,
58 .max_size = HAL_REO_STATUS_RING_BASE_MSB_RING_SIZE,
59 },
60 [HAL_TCL_DATA] = {
61 .start_ring_id = HAL_SRNG_RING_ID_SW2TCL1,
62 .max_rings = 6,
63 .entry_size = sizeof(struct hal_tcl_data_cmd) >> 2,
64 .mac_type = ATH12K_HAL_SRNG_UMAC,
65 .ring_dir = HAL_SRNG_DIR_SRC,
66 .max_size = HAL_SW2TCL1_RING_BASE_MSB_RING_SIZE,
67 },
68 [HAL_TCL_CMD] = {
69 .start_ring_id = HAL_SRNG_RING_ID_SW2TCL_CMD,
70 .max_rings = 1,
71 .entry_size = sizeof(struct hal_tcl_gse_cmd) >> 2,
72 .mac_type = ATH12K_HAL_SRNG_UMAC,
73 .ring_dir = HAL_SRNG_DIR_SRC,
74 .max_size = HAL_SW2TCL1_CMD_RING_BASE_MSB_RING_SIZE,
75 },
76 [HAL_TCL_STATUS] = {
77 .start_ring_id = HAL_SRNG_RING_ID_TCL_STATUS,
78 .max_rings = 1,
79 .entry_size = (sizeof(struct hal_tlv_hdr) +
80 sizeof(struct hal_tcl_status_ring)) >> 2,
81 .mac_type = ATH12K_HAL_SRNG_UMAC,
82 .ring_dir = HAL_SRNG_DIR_DST,
83 .max_size = HAL_TCL_STATUS_RING_BASE_MSB_RING_SIZE,
84 },
85 [HAL_CE_SRC] = {
86 .start_ring_id = HAL_SRNG_RING_ID_CE0_SRC,
87 .max_rings = 16,
88 .entry_size = sizeof(struct hal_ce_srng_src_desc) >> 2,
89 .mac_type = ATH12K_HAL_SRNG_UMAC,
90 .ring_dir = HAL_SRNG_DIR_SRC,
91 .max_size = HAL_CE_SRC_RING_BASE_MSB_RING_SIZE,
92 },
93 [HAL_CE_DST] = {
94 .start_ring_id = HAL_SRNG_RING_ID_CE0_DST,
95 .max_rings = 16,
96 .entry_size = sizeof(struct hal_ce_srng_dest_desc) >> 2,
97 .mac_type = ATH12K_HAL_SRNG_UMAC,
98 .ring_dir = HAL_SRNG_DIR_SRC,
99 .max_size = HAL_CE_DST_RING_BASE_MSB_RING_SIZE,
100 },
101 [HAL_CE_DST_STATUS] = {
102 .start_ring_id = HAL_SRNG_RING_ID_CE0_DST_STATUS,
103 .max_rings = 16,
104 .entry_size = sizeof(struct hal_ce_srng_dst_status_desc) >> 2,
105 .mac_type = ATH12K_HAL_SRNG_UMAC,
106 .ring_dir = HAL_SRNG_DIR_DST,
107 .max_size = HAL_CE_DST_STATUS_RING_BASE_MSB_RING_SIZE,
108 },
109 [HAL_WBM_IDLE_LINK] = {
110 .start_ring_id = HAL_SRNG_RING_ID_WBM_IDLE_LINK,
111 .max_rings = 1,
112 .entry_size = sizeof(struct hal_wbm_link_desc) >> 2,
113 .mac_type = ATH12K_HAL_SRNG_UMAC,
114 .ring_dir = HAL_SRNG_DIR_SRC,
115 .max_size = HAL_WBM_IDLE_LINK_RING_BASE_MSB_RING_SIZE,
116 },
117 [HAL_SW2WBM_RELEASE] = {
118 .start_ring_id = HAL_SRNG_RING_ID_WBM_SW0_RELEASE,
119 .max_rings = 2,
120 .entry_size = sizeof(struct hal_wbm_release_ring) >> 2,
121 .mac_type = ATH12K_HAL_SRNG_UMAC,
122 .ring_dir = HAL_SRNG_DIR_SRC,
123 .max_size = HAL_SW2WBM_RELEASE_RING_BASE_MSB_RING_SIZE,
124 },
125 [HAL_WBM2SW_RELEASE] = {
126 .start_ring_id = HAL_SRNG_RING_ID_WBM2SW0_RELEASE,
127 .max_rings = 8,
128 .entry_size = sizeof(struct hal_wbm_release_ring) >> 2,
129 .mac_type = ATH12K_HAL_SRNG_UMAC,
130 .ring_dir = HAL_SRNG_DIR_DST,
131 .max_size = HAL_WBM2SW_RELEASE_RING_BASE_MSB_RING_SIZE,
132 },
133 [HAL_RXDMA_BUF] = {
134 .start_ring_id = HAL_SRNG_SW2RXDMA_BUF0,
135 .max_rings = 1,
136 .entry_size = sizeof(struct hal_wbm_buffer_ring) >> 2,
137 .mac_type = ATH12K_HAL_SRNG_DMAC,
138 .ring_dir = HAL_SRNG_DIR_SRC,
139 .max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
140 },
141 [HAL_RXDMA_DST] = {
142 .start_ring_id = HAL_SRNG_RING_ID_WMAC1_RXDMA2SW0,
143 .max_rings = 0,
144 .entry_size = 0,
145 .mac_type = ATH12K_HAL_SRNG_PMAC,
146 .ring_dir = HAL_SRNG_DIR_DST,
147 .max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
148 },
149 [HAL_RXDMA_MONITOR_BUF] = {
150 .start_ring_id = HAL_SRNG_SW2RXMON_BUF0,
151 .max_rings = 1,
152 .entry_size = sizeof(struct hal_mon_buf_ring) >> 2,
153 .mac_type = ATH12K_HAL_SRNG_PMAC,
154 .ring_dir = HAL_SRNG_DIR_SRC,
155 .max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
156 },
157 [HAL_RXDMA_MONITOR_STATUS] = { 0, },
158 [HAL_RXDMA_MONITOR_DESC] = { 0, },
159 [HAL_RXDMA_DIR_BUF] = {
160 .start_ring_id = HAL_SRNG_RING_ID_RXDMA_DIR_BUF,
161 .max_rings = 2,
162 .entry_size = 8 >> 2, /* TODO: Define the struct */
163 .mac_type = ATH12K_HAL_SRNG_PMAC,
164 .ring_dir = HAL_SRNG_DIR_SRC,
165 .max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
166 },
167 [HAL_PPE2TCL] = {
168 .start_ring_id = HAL_SRNG_RING_ID_PPE2TCL1,
169 .max_rings = 1,
170 .entry_size = sizeof(struct hal_tcl_entrance_from_ppe_ring) >> 2,
171 .mac_type = ATH12K_HAL_SRNG_PMAC,
172 .ring_dir = HAL_SRNG_DIR_SRC,
173 .max_size = HAL_SW2TCL1_RING_BASE_MSB_RING_SIZE,
174 },
175 [HAL_PPE_RELEASE] = {
176 .start_ring_id = HAL_SRNG_RING_ID_WBM_PPE_RELEASE,
177 .max_rings = 1,
178 .entry_size = sizeof(struct hal_wbm_release_ring) >> 2,
179 .mac_type = ATH12K_HAL_SRNG_PMAC,
180 .ring_dir = HAL_SRNG_DIR_SRC,
181 .max_size = HAL_WBM2PPE_RELEASE_RING_BASE_MSB_RING_SIZE,
182 },
183 [HAL_TX_MONITOR_BUF] = {
184 .start_ring_id = HAL_SRNG_SW2TXMON_BUF0,
185 .max_rings = 1,
186 .entry_size = sizeof(struct hal_mon_buf_ring) >> 2,
187 .mac_type = ATH12K_HAL_SRNG_PMAC,
188 .ring_dir = HAL_SRNG_DIR_SRC,
189 .max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
190 },
191 [HAL_RXDMA_MONITOR_DST] = {
192 .start_ring_id = HAL_SRNG_RING_ID_WMAC1_SW2RXMON_BUF0,
193 .max_rings = 1,
194 .entry_size = sizeof(struct hal_mon_dest_desc) >> 2,
195 .mac_type = ATH12K_HAL_SRNG_PMAC,
196 .ring_dir = HAL_SRNG_DIR_DST,
197 .max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
198 },
199 [HAL_TX_MONITOR_DST] = {
200 .start_ring_id = HAL_SRNG_RING_ID_WMAC1_TXMON2SW0_BUF0,
201 .max_rings = 1,
202 .entry_size = sizeof(struct hal_mon_dest_desc) >> 2,
203 .mac_type = ATH12K_HAL_SRNG_PMAC,
204 .ring_dir = HAL_SRNG_DIR_DST,
205 .max_size = HAL_RXDMA_RING_MAX_SIZE_BE,
206 }
207 };
208
209 static const struct ath12k_hal_tcl_to_wbm_rbm_map
210 ath12k_hal_qcn9274_tcl_to_wbm_rbm_map[DP_TCL_NUM_RING_MAX] = {
211 {
212 .wbm_ring_num = 0,
213 .rbm_id = HAL_RX_BUF_RBM_SW0_BM,
214 },
215 {
216 .wbm_ring_num = 1,
217 .rbm_id = HAL_RX_BUF_RBM_SW1_BM,
218 },
219 {
220 .wbm_ring_num = 2,
221 .rbm_id = HAL_RX_BUF_RBM_SW2_BM,
222 },
223 {
224 .wbm_ring_num = 4,
225 .rbm_id = HAL_RX_BUF_RBM_SW4_BM,
226 }
227 };
228
229 static const struct ath12k_hal_tcl_to_wbm_rbm_map
230 ath12k_hal_wcn7850_tcl_to_wbm_rbm_map[DP_TCL_NUM_RING_MAX] = {
231 {
232 .wbm_ring_num = 0,
233 .rbm_id = HAL_RX_BUF_RBM_SW0_BM,
234 },
235 {
236 .wbm_ring_num = 2,
237 .rbm_id = HAL_RX_BUF_RBM_SW2_BM,
238 },
239 {
240 .wbm_ring_num = 4,
241 .rbm_id = HAL_RX_BUF_RBM_SW4_BM,
242 },
243 };
244
ath12k_hal_reo1_ring_id_offset(struct ath12k_base * ab)245 static unsigned int ath12k_hal_reo1_ring_id_offset(struct ath12k_base *ab)
246 {
247 return HAL_REO1_RING_ID(ab) - HAL_REO1_RING_BASE_LSB(ab);
248 }
249
ath12k_hal_reo1_ring_msi1_base_lsb_offset(struct ath12k_base * ab)250 static unsigned int ath12k_hal_reo1_ring_msi1_base_lsb_offset(struct ath12k_base *ab)
251 {
252 return HAL_REO1_RING_MSI1_BASE_LSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
253 }
254
ath12k_hal_reo1_ring_msi1_base_msb_offset(struct ath12k_base * ab)255 static unsigned int ath12k_hal_reo1_ring_msi1_base_msb_offset(struct ath12k_base *ab)
256 {
257 return HAL_REO1_RING_MSI1_BASE_MSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
258 }
259
ath12k_hal_reo1_ring_msi1_data_offset(struct ath12k_base * ab)260 static unsigned int ath12k_hal_reo1_ring_msi1_data_offset(struct ath12k_base *ab)
261 {
262 return HAL_REO1_RING_MSI1_DATA(ab) - HAL_REO1_RING_BASE_LSB(ab);
263 }
264
ath12k_hal_reo1_ring_base_msb_offset(struct ath12k_base * ab)265 static unsigned int ath12k_hal_reo1_ring_base_msb_offset(struct ath12k_base *ab)
266 {
267 return HAL_REO1_RING_BASE_MSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
268 }
269
ath12k_hal_reo1_ring_producer_int_setup_offset(struct ath12k_base * ab)270 static unsigned int ath12k_hal_reo1_ring_producer_int_setup_offset(struct ath12k_base *ab)
271 {
272 return HAL_REO1_RING_PRODUCER_INT_SETUP(ab) - HAL_REO1_RING_BASE_LSB(ab);
273 }
274
ath12k_hal_reo1_ring_hp_addr_lsb_offset(struct ath12k_base * ab)275 static unsigned int ath12k_hal_reo1_ring_hp_addr_lsb_offset(struct ath12k_base *ab)
276 {
277 return HAL_REO1_RING_HP_ADDR_LSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
278 }
279
ath12k_hal_reo1_ring_hp_addr_msb_offset(struct ath12k_base * ab)280 static unsigned int ath12k_hal_reo1_ring_hp_addr_msb_offset(struct ath12k_base *ab)
281 {
282 return HAL_REO1_RING_HP_ADDR_MSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
283 }
284
ath12k_hal_reo1_ring_misc_offset(struct ath12k_base * ab)285 static unsigned int ath12k_hal_reo1_ring_misc_offset(struct ath12k_base *ab)
286 {
287 return HAL_REO1_RING_MISC(ab) - HAL_REO1_RING_BASE_LSB(ab);
288 }
289
ath12k_hw_qcn9274_rx_desc_get_first_msdu(struct hal_rx_desc * desc)290 static bool ath12k_hw_qcn9274_rx_desc_get_first_msdu(struct hal_rx_desc *desc)
291 {
292 return !!le16_get_bits(desc->u.qcn9274.msdu_end.info5,
293 RX_MSDU_END_INFO5_FIRST_MSDU);
294 }
295
ath12k_hw_qcn9274_rx_desc_get_last_msdu(struct hal_rx_desc * desc)296 static bool ath12k_hw_qcn9274_rx_desc_get_last_msdu(struct hal_rx_desc *desc)
297 {
298 return !!le16_get_bits(desc->u.qcn9274.msdu_end.info5,
299 RX_MSDU_END_INFO5_LAST_MSDU);
300 }
301
ath12k_hw_qcn9274_rx_desc_get_l3_pad_bytes(struct hal_rx_desc * desc)302 static u8 ath12k_hw_qcn9274_rx_desc_get_l3_pad_bytes(struct hal_rx_desc *desc)
303 {
304 return le16_get_bits(desc->u.qcn9274.msdu_end.info5,
305 RX_MSDU_END_INFO5_L3_HDR_PADDING);
306 }
307
ath12k_hw_qcn9274_rx_desc_encrypt_valid(struct hal_rx_desc * desc)308 static bool ath12k_hw_qcn9274_rx_desc_encrypt_valid(struct hal_rx_desc *desc)
309 {
310 return !!le32_get_bits(desc->u.qcn9274.mpdu_start.info4,
311 RX_MPDU_START_INFO4_ENCRYPT_INFO_VALID);
312 }
313
ath12k_hw_qcn9274_rx_desc_get_encrypt_type(struct hal_rx_desc * desc)314 static u32 ath12k_hw_qcn9274_rx_desc_get_encrypt_type(struct hal_rx_desc *desc)
315 {
316 return le32_get_bits(desc->u.qcn9274.mpdu_start.info2,
317 RX_MPDU_START_INFO2_ENC_TYPE);
318 }
319
ath12k_hw_qcn9274_rx_desc_get_decap_type(struct hal_rx_desc * desc)320 static u8 ath12k_hw_qcn9274_rx_desc_get_decap_type(struct hal_rx_desc *desc)
321 {
322 return le32_get_bits(desc->u.qcn9274.msdu_end.info11,
323 RX_MSDU_END_INFO11_DECAP_FORMAT);
324 }
325
ath12k_hw_qcn9274_rx_desc_get_mesh_ctl(struct hal_rx_desc * desc)326 static u8 ath12k_hw_qcn9274_rx_desc_get_mesh_ctl(struct hal_rx_desc *desc)
327 {
328 return le32_get_bits(desc->u.qcn9274.msdu_end.info11,
329 RX_MSDU_END_INFO11_MESH_CTRL_PRESENT);
330 }
331
ath12k_hw_qcn9274_rx_desc_get_mpdu_seq_ctl_vld(struct hal_rx_desc * desc)332 static bool ath12k_hw_qcn9274_rx_desc_get_mpdu_seq_ctl_vld(struct hal_rx_desc *desc)
333 {
334 return !!le32_get_bits(desc->u.qcn9274.mpdu_start.info4,
335 RX_MPDU_START_INFO4_MPDU_SEQ_CTRL_VALID);
336 }
337
ath12k_hw_qcn9274_rx_desc_get_mpdu_fc_valid(struct hal_rx_desc * desc)338 static bool ath12k_hw_qcn9274_rx_desc_get_mpdu_fc_valid(struct hal_rx_desc *desc)
339 {
340 return !!le32_get_bits(desc->u.qcn9274.mpdu_start.info4,
341 RX_MPDU_START_INFO4_MPDU_FCTRL_VALID);
342 }
343
ath12k_hw_qcn9274_rx_desc_get_mpdu_start_seq_no(struct hal_rx_desc * desc)344 static u16 ath12k_hw_qcn9274_rx_desc_get_mpdu_start_seq_no(struct hal_rx_desc *desc)
345 {
346 return le32_get_bits(desc->u.qcn9274.mpdu_start.info4,
347 RX_MPDU_START_INFO4_MPDU_SEQ_NUM);
348 }
349
ath12k_hw_qcn9274_rx_desc_get_msdu_len(struct hal_rx_desc * desc)350 static u16 ath12k_hw_qcn9274_rx_desc_get_msdu_len(struct hal_rx_desc *desc)
351 {
352 return le32_get_bits(desc->u.qcn9274.msdu_end.info10,
353 RX_MSDU_END_INFO10_MSDU_LENGTH);
354 }
355
ath12k_hw_qcn9274_rx_desc_get_msdu_sgi(struct hal_rx_desc * desc)356 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_sgi(struct hal_rx_desc *desc)
357 {
358 return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
359 RX_MSDU_END_INFO12_SGI);
360 }
361
ath12k_hw_qcn9274_rx_desc_get_msdu_rate_mcs(struct hal_rx_desc * desc)362 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_rate_mcs(struct hal_rx_desc *desc)
363 {
364 return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
365 RX_MSDU_END_INFO12_RATE_MCS);
366 }
367
ath12k_hw_qcn9274_rx_desc_get_msdu_rx_bw(struct hal_rx_desc * desc)368 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_rx_bw(struct hal_rx_desc *desc)
369 {
370 return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
371 RX_MSDU_END_INFO12_RECV_BW);
372 }
373
ath12k_hw_qcn9274_rx_desc_get_msdu_freq(struct hal_rx_desc * desc)374 static u32 ath12k_hw_qcn9274_rx_desc_get_msdu_freq(struct hal_rx_desc *desc)
375 {
376 return __le32_to_cpu(desc->u.qcn9274.msdu_end.phy_meta_data);
377 }
378
ath12k_hw_qcn9274_rx_desc_get_msdu_pkt_type(struct hal_rx_desc * desc)379 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_pkt_type(struct hal_rx_desc *desc)
380 {
381 return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
382 RX_MSDU_END_INFO12_PKT_TYPE);
383 }
384
ath12k_hw_qcn9274_rx_desc_get_msdu_nss(struct hal_rx_desc * desc)385 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_nss(struct hal_rx_desc *desc)
386 {
387 return le32_get_bits(desc->u.qcn9274.msdu_end.info12,
388 RX_MSDU_END_QCN9274_INFO12_MIMO_SS_BITMAP);
389 }
390
ath12k_hw_qcn9274_rx_desc_get_mpdu_tid(struct hal_rx_desc * desc)391 static u8 ath12k_hw_qcn9274_rx_desc_get_mpdu_tid(struct hal_rx_desc *desc)
392 {
393 return le16_get_bits(desc->u.qcn9274.msdu_end.info5,
394 RX_MSDU_END_QCN9274_INFO5_TID);
395 }
396
ath12k_hw_qcn9274_rx_desc_get_mpdu_peer_id(struct hal_rx_desc * desc)397 static u16 ath12k_hw_qcn9274_rx_desc_get_mpdu_peer_id(struct hal_rx_desc *desc)
398 {
399 return __le16_to_cpu(desc->u.qcn9274.mpdu_start.sw_peer_id);
400 }
401
ath12k_hw_qcn9274_rx_desc_copy_end_tlv(struct hal_rx_desc * fdesc,struct hal_rx_desc * ldesc)402 static void ath12k_hw_qcn9274_rx_desc_copy_end_tlv(struct hal_rx_desc *fdesc,
403 struct hal_rx_desc *ldesc)
404 {
405 memcpy(&fdesc->u.qcn9274.msdu_end, &ldesc->u.qcn9274.msdu_end,
406 sizeof(struct rx_msdu_end_qcn9274));
407 }
408
ath12k_hw_qcn9274_rx_desc_get_mpdu_ppdu_id(struct hal_rx_desc * desc)409 static u32 ath12k_hw_qcn9274_rx_desc_get_mpdu_ppdu_id(struct hal_rx_desc *desc)
410 {
411 return __le16_to_cpu(desc->u.qcn9274.mpdu_start.phy_ppdu_id);
412 }
413
ath12k_hw_qcn9274_rx_desc_set_msdu_len(struct hal_rx_desc * desc,u16 len)414 static void ath12k_hw_qcn9274_rx_desc_set_msdu_len(struct hal_rx_desc *desc, u16 len)
415 {
416 u32 info = __le32_to_cpu(desc->u.qcn9274.msdu_end.info10);
417
418 info &= ~RX_MSDU_END_INFO10_MSDU_LENGTH;
419 info |= u32_encode_bits(len, RX_MSDU_END_INFO10_MSDU_LENGTH);
420
421 desc->u.qcn9274.msdu_end.info10 = __cpu_to_le32(info);
422 }
423
ath12k_hw_qcn9274_rx_desc_get_msdu_payload(struct hal_rx_desc * desc)424 static u8 *ath12k_hw_qcn9274_rx_desc_get_msdu_payload(struct hal_rx_desc *desc)
425 {
426 return &desc->u.qcn9274.msdu_payload[0];
427 }
428
ath12k_hw_qcn9274_rx_desc_get_mpdu_start_offset(void)429 static u32 ath12k_hw_qcn9274_rx_desc_get_mpdu_start_offset(void)
430 {
431 return offsetof(struct hal_rx_desc_qcn9274, mpdu_start);
432 }
433
ath12k_hw_qcn9274_rx_desc_get_msdu_end_offset(void)434 static u32 ath12k_hw_qcn9274_rx_desc_get_msdu_end_offset(void)
435 {
436 return offsetof(struct hal_rx_desc_qcn9274, msdu_end);
437 }
438
ath12k_hw_qcn9274_rx_desc_mac_addr2_valid(struct hal_rx_desc * desc)439 static bool ath12k_hw_qcn9274_rx_desc_mac_addr2_valid(struct hal_rx_desc *desc)
440 {
441 return __le32_to_cpu(desc->u.qcn9274.mpdu_start.info4) &
442 RX_MPDU_START_INFO4_MAC_ADDR2_VALID;
443 }
444
ath12k_hw_qcn9274_rx_desc_mpdu_start_addr2(struct hal_rx_desc * desc)445 static u8 *ath12k_hw_qcn9274_rx_desc_mpdu_start_addr2(struct hal_rx_desc *desc)
446 {
447 return desc->u.qcn9274.mpdu_start.addr2;
448 }
449
ath12k_hw_qcn9274_rx_desc_is_da_mcbc(struct hal_rx_desc * desc)450 static bool ath12k_hw_qcn9274_rx_desc_is_da_mcbc(struct hal_rx_desc *desc)
451 {
452 return __le16_to_cpu(desc->u.qcn9274.msdu_end.info5) &
453 RX_MSDU_END_INFO5_DA_IS_MCBC;
454 }
455
ath12k_hw_qcn9274_rx_desc_get_dot11_hdr(struct hal_rx_desc * desc,struct ieee80211_hdr * hdr)456 static void ath12k_hw_qcn9274_rx_desc_get_dot11_hdr(struct hal_rx_desc *desc,
457 struct ieee80211_hdr *hdr)
458 {
459 hdr->frame_control = desc->u.qcn9274.mpdu_start.frame_ctrl;
460 hdr->duration_id = desc->u.qcn9274.mpdu_start.duration;
461 ether_addr_copy(hdr->addr1, desc->u.qcn9274.mpdu_start.addr1);
462 ether_addr_copy(hdr->addr2, desc->u.qcn9274.mpdu_start.addr2);
463 ether_addr_copy(hdr->addr3, desc->u.qcn9274.mpdu_start.addr3);
464 if (__le32_to_cpu(desc->u.qcn9274.mpdu_start.info4) &
465 RX_MPDU_START_INFO4_MAC_ADDR4_VALID) {
466 ether_addr_copy(hdr->addr4, desc->u.qcn9274.mpdu_start.addr4);
467 }
468 hdr->seq_ctrl = desc->u.qcn9274.mpdu_start.seq_ctrl;
469 }
470
ath12k_hw_qcn9274_rx_desc_get_crypto_hdr(struct hal_rx_desc * desc,u8 * crypto_hdr,enum hal_encrypt_type enctype)471 static void ath12k_hw_qcn9274_rx_desc_get_crypto_hdr(struct hal_rx_desc *desc,
472 u8 *crypto_hdr,
473 enum hal_encrypt_type enctype)
474 {
475 unsigned int key_id;
476
477 switch (enctype) {
478 case HAL_ENCRYPT_TYPE_OPEN:
479 return;
480 case HAL_ENCRYPT_TYPE_TKIP_NO_MIC:
481 case HAL_ENCRYPT_TYPE_TKIP_MIC:
482 crypto_hdr[0] =
483 HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.qcn9274.mpdu_start.pn[0]);
484 crypto_hdr[1] = 0;
485 crypto_hdr[2] =
486 HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.qcn9274.mpdu_start.pn[0]);
487 break;
488 case HAL_ENCRYPT_TYPE_CCMP_128:
489 case HAL_ENCRYPT_TYPE_CCMP_256:
490 case HAL_ENCRYPT_TYPE_GCMP_128:
491 case HAL_ENCRYPT_TYPE_AES_GCMP_256:
492 crypto_hdr[0] =
493 HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.qcn9274.mpdu_start.pn[0]);
494 crypto_hdr[1] =
495 HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.qcn9274.mpdu_start.pn[0]);
496 crypto_hdr[2] = 0;
497 break;
498 case HAL_ENCRYPT_TYPE_WEP_40:
499 case HAL_ENCRYPT_TYPE_WEP_104:
500 case HAL_ENCRYPT_TYPE_WEP_128:
501 case HAL_ENCRYPT_TYPE_WAPI_GCM_SM4:
502 case HAL_ENCRYPT_TYPE_WAPI:
503 return;
504 }
505 key_id = le32_get_bits(desc->u.qcn9274.mpdu_start.info5,
506 RX_MPDU_START_INFO5_KEY_ID);
507 crypto_hdr[3] = 0x20 | (key_id << 6);
508 crypto_hdr[4] = HAL_RX_MPDU_INFO_PN_GET_BYTE3(desc->u.qcn9274.mpdu_start.pn[0]);
509 crypto_hdr[5] = HAL_RX_MPDU_INFO_PN_GET_BYTE4(desc->u.qcn9274.mpdu_start.pn[0]);
510 crypto_hdr[6] = HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.qcn9274.mpdu_start.pn[1]);
511 crypto_hdr[7] = HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.qcn9274.mpdu_start.pn[1]);
512 }
513
ath12k_hal_srng_create_config_qcn9274(struct ath12k_base * ab)514 static int ath12k_hal_srng_create_config_qcn9274(struct ath12k_base *ab)
515 {
516 struct ath12k_hal *hal = &ab->hal;
517 struct hal_srng_config *s;
518
519 hal->srng_config = kmemdup(hw_srng_config_template,
520 sizeof(hw_srng_config_template),
521 GFP_KERNEL);
522 if (!hal->srng_config)
523 return -ENOMEM;
524
525 s = &hal->srng_config[HAL_REO_DST];
526 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO1_RING_BASE_LSB(ab);
527 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO1_RING_HP;
528 s->reg_size[0] = HAL_REO2_RING_BASE_LSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
529 s->reg_size[1] = HAL_REO2_RING_HP - HAL_REO1_RING_HP;
530
531 s = &hal->srng_config[HAL_REO_EXCEPTION];
532 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_SW0_RING_BASE_LSB(ab);
533 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_SW0_RING_HP;
534
535 s = &hal->srng_config[HAL_REO_REINJECT];
536 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_SW2REO_RING_BASE_LSB(ab);
537 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_SW2REO_RING_HP;
538 s->reg_size[0] = HAL_SW2REO1_RING_BASE_LSB(ab) - HAL_SW2REO_RING_BASE_LSB(ab);
539 s->reg_size[1] = HAL_SW2REO1_RING_HP - HAL_SW2REO_RING_HP;
540
541 s = &hal->srng_config[HAL_REO_CMD];
542 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_CMD_RING_BASE_LSB(ab);
543 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_CMD_HP;
544
545 s = &hal->srng_config[HAL_REO_STATUS];
546 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_STATUS_RING_BASE_LSB(ab);
547 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_STATUS_HP;
548
549 s = &hal->srng_config[HAL_TCL_DATA];
550 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL1_RING_BASE_LSB(ab);
551 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL1_RING_HP;
552 s->reg_size[0] = HAL_TCL2_RING_BASE_LSB(ab) - HAL_TCL1_RING_BASE_LSB(ab);
553 s->reg_size[1] = HAL_TCL2_RING_HP - HAL_TCL1_RING_HP;
554
555 s = &hal->srng_config[HAL_TCL_CMD];
556 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_RING_BASE_LSB(ab);
557 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_RING_HP;
558
559 s = &hal->srng_config[HAL_TCL_STATUS];
560 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_STATUS_RING_BASE_LSB(ab);
561 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_STATUS_RING_HP;
562
563 s = &hal->srng_config[HAL_CE_SRC];
564 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_SRC_REG(ab) + HAL_CE_DST_RING_BASE_LSB;
565 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_SRC_REG(ab) + HAL_CE_DST_RING_HP;
566 s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_SRC_REG(ab) -
567 HAL_SEQ_WCSS_UMAC_CE0_SRC_REG(ab);
568 s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_SRC_REG(ab) -
569 HAL_SEQ_WCSS_UMAC_CE0_SRC_REG(ab);
570
571 s = &hal->srng_config[HAL_CE_DST];
572 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab) + HAL_CE_DST_RING_BASE_LSB;
573 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab) + HAL_CE_DST_RING_HP;
574 s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG(ab) -
575 HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab);
576 s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG(ab) -
577 HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab);
578
579 s = &hal->srng_config[HAL_CE_DST_STATUS];
580 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab) +
581 HAL_CE_DST_STATUS_RING_BASE_LSB;
582 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab) + HAL_CE_DST_STATUS_RING_HP;
583 s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG(ab) -
584 HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab);
585 s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG(ab) -
586 HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab);
587
588 s = &hal->srng_config[HAL_WBM_IDLE_LINK];
589 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_IDLE_LINK_RING_BASE_LSB(ab);
590 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_IDLE_LINK_RING_HP;
591
592 s = &hal->srng_config[HAL_SW2WBM_RELEASE];
593 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG +
594 HAL_WBM_SW_RELEASE_RING_BASE_LSB(ab);
595 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_SW_RELEASE_RING_HP;
596 s->reg_size[0] = HAL_WBM_SW1_RELEASE_RING_BASE_LSB(ab) -
597 HAL_WBM_SW_RELEASE_RING_BASE_LSB(ab);
598 s->reg_size[1] = HAL_WBM_SW1_RELEASE_RING_HP - HAL_WBM_SW_RELEASE_RING_HP;
599
600 s = &hal->srng_config[HAL_WBM2SW_RELEASE];
601 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM0_RELEASE_RING_BASE_LSB(ab);
602 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM0_RELEASE_RING_HP;
603 s->reg_size[0] = HAL_WBM1_RELEASE_RING_BASE_LSB(ab) -
604 HAL_WBM0_RELEASE_RING_BASE_LSB(ab);
605 s->reg_size[1] = HAL_WBM1_RELEASE_RING_HP - HAL_WBM0_RELEASE_RING_HP;
606
607 /* Some LMAC rings are not accessed from the host:
608 * RXDMA_BUG, RXDMA_DST, RXDMA_MONITOR_BUF, RXDMA_MONITOR_STATUS,
609 * RXDMA_MONITOR_DST, RXDMA_MONITOR_DESC, RXDMA_DIR_BUF_SRC,
610 * RXDMA_RX_MONITOR_BUF, TX_MONITOR_BUF, TX_MONITOR_DST, SW2RXDMA
611 */
612 s = &hal->srng_config[HAL_PPE2TCL];
613 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_PPE2TCL1_RING_BASE_LSB;
614 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_PPE2TCL1_RING_HP;
615
616 s = &hal->srng_config[HAL_PPE_RELEASE];
617 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG +
618 HAL_WBM_PPE_RELEASE_RING_BASE_LSB(ab);
619 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_PPE_RELEASE_RING_HP;
620
621 return 0;
622 }
623
ath12k_hal_qcn9274_rx_mpdu_start_wmask_get(void)624 static u16 ath12k_hal_qcn9274_rx_mpdu_start_wmask_get(void)
625 {
626 return QCN9274_MPDU_START_WMASK;
627 }
628
ath12k_hal_qcn9274_rx_msdu_end_wmask_get(void)629 static u32 ath12k_hal_qcn9274_rx_msdu_end_wmask_get(void)
630 {
631 return QCN9274_MSDU_END_WMASK;
632 }
633
ath12k_hal_qcn9274_get_hal_rx_compact_ops(void)634 static const struct hal_rx_ops *ath12k_hal_qcn9274_get_hal_rx_compact_ops(void)
635 {
636 return &hal_rx_qcn9274_compact_ops;
637 }
638
ath12k_hw_qcn9274_dp_rx_h_msdu_done(struct hal_rx_desc * desc)639 static bool ath12k_hw_qcn9274_dp_rx_h_msdu_done(struct hal_rx_desc *desc)
640 {
641 return !!le32_get_bits(desc->u.qcn9274.msdu_end.info14,
642 RX_MSDU_END_INFO14_MSDU_DONE);
643 }
644
ath12k_hw_qcn9274_dp_rx_h_l4_cksum_fail(struct hal_rx_desc * desc)645 static bool ath12k_hw_qcn9274_dp_rx_h_l4_cksum_fail(struct hal_rx_desc *desc)
646 {
647 return !!le32_get_bits(desc->u.qcn9274.msdu_end.info13,
648 RX_MSDU_END_INFO13_TCP_UDP_CKSUM_FAIL);
649 }
650
ath12k_hw_qcn9274_dp_rx_h_ip_cksum_fail(struct hal_rx_desc * desc)651 static bool ath12k_hw_qcn9274_dp_rx_h_ip_cksum_fail(struct hal_rx_desc *desc)
652 {
653 return !!le32_get_bits(desc->u.qcn9274.msdu_end.info13,
654 RX_MSDU_END_INFO13_IP_CKSUM_FAIL);
655 }
656
ath12k_hw_qcn9274_dp_rx_h_is_decrypted(struct hal_rx_desc * desc)657 static bool ath12k_hw_qcn9274_dp_rx_h_is_decrypted(struct hal_rx_desc *desc)
658 {
659 return (le32_get_bits(desc->u.qcn9274.msdu_end.info14,
660 RX_MSDU_END_INFO14_DECRYPT_STATUS_CODE) ==
661 RX_DESC_DECRYPT_STATUS_CODE_OK);
662 }
663
ath12k_hw_qcn9274_dp_rx_h_mpdu_err(struct hal_rx_desc * desc)664 static u32 ath12k_hw_qcn9274_dp_rx_h_mpdu_err(struct hal_rx_desc *desc)
665 {
666 u32 info = __le32_to_cpu(desc->u.qcn9274.msdu_end.info13);
667 u32 errmap = 0;
668
669 if (info & RX_MSDU_END_INFO13_FCS_ERR)
670 errmap |= HAL_RX_MPDU_ERR_FCS;
671
672 if (info & RX_MSDU_END_INFO13_DECRYPT_ERR)
673 errmap |= HAL_RX_MPDU_ERR_DECRYPT;
674
675 if (info & RX_MSDU_END_INFO13_TKIP_MIC_ERR)
676 errmap |= HAL_RX_MPDU_ERR_TKIP_MIC;
677
678 if (info & RX_MSDU_END_INFO13_A_MSDU_ERROR)
679 errmap |= HAL_RX_MPDU_ERR_AMSDU_ERR;
680
681 if (info & RX_MSDU_END_INFO13_OVERFLOW_ERR)
682 errmap |= HAL_RX_MPDU_ERR_OVERFLOW;
683
684 if (info & RX_MSDU_END_INFO13_MSDU_LEN_ERR)
685 errmap |= HAL_RX_MPDU_ERR_MSDU_LEN;
686
687 if (info & RX_MSDU_END_INFO13_MPDU_LEN_ERR)
688 errmap |= HAL_RX_MPDU_ERR_MPDU_LEN;
689
690 return errmap;
691 }
692
ath12k_hw_qcn9274_get_rx_desc_size(void)693 static u32 ath12k_hw_qcn9274_get_rx_desc_size(void)
694 {
695 return sizeof(struct hal_rx_desc_qcn9274);
696 }
697
ath12k_hw_qcn9274_rx_desc_get_msdu_src_link(struct hal_rx_desc * desc)698 static u8 ath12k_hw_qcn9274_rx_desc_get_msdu_src_link(struct hal_rx_desc *desc)
699 {
700 return 0;
701 }
702
703 const struct hal_rx_ops hal_rx_qcn9274_ops = {
704 .rx_desc_get_first_msdu = ath12k_hw_qcn9274_rx_desc_get_first_msdu,
705 .rx_desc_get_last_msdu = ath12k_hw_qcn9274_rx_desc_get_last_msdu,
706 .rx_desc_get_l3_pad_bytes = ath12k_hw_qcn9274_rx_desc_get_l3_pad_bytes,
707 .rx_desc_encrypt_valid = ath12k_hw_qcn9274_rx_desc_encrypt_valid,
708 .rx_desc_get_encrypt_type = ath12k_hw_qcn9274_rx_desc_get_encrypt_type,
709 .rx_desc_get_decap_type = ath12k_hw_qcn9274_rx_desc_get_decap_type,
710 .rx_desc_get_mesh_ctl = ath12k_hw_qcn9274_rx_desc_get_mesh_ctl,
711 .rx_desc_get_mpdu_seq_ctl_vld = ath12k_hw_qcn9274_rx_desc_get_mpdu_seq_ctl_vld,
712 .rx_desc_get_mpdu_fc_valid = ath12k_hw_qcn9274_rx_desc_get_mpdu_fc_valid,
713 .rx_desc_get_mpdu_start_seq_no = ath12k_hw_qcn9274_rx_desc_get_mpdu_start_seq_no,
714 .rx_desc_get_msdu_len = ath12k_hw_qcn9274_rx_desc_get_msdu_len,
715 .rx_desc_get_msdu_sgi = ath12k_hw_qcn9274_rx_desc_get_msdu_sgi,
716 .rx_desc_get_msdu_rate_mcs = ath12k_hw_qcn9274_rx_desc_get_msdu_rate_mcs,
717 .rx_desc_get_msdu_rx_bw = ath12k_hw_qcn9274_rx_desc_get_msdu_rx_bw,
718 .rx_desc_get_msdu_freq = ath12k_hw_qcn9274_rx_desc_get_msdu_freq,
719 .rx_desc_get_msdu_pkt_type = ath12k_hw_qcn9274_rx_desc_get_msdu_pkt_type,
720 .rx_desc_get_msdu_nss = ath12k_hw_qcn9274_rx_desc_get_msdu_nss,
721 .rx_desc_get_mpdu_tid = ath12k_hw_qcn9274_rx_desc_get_mpdu_tid,
722 .rx_desc_get_mpdu_peer_id = ath12k_hw_qcn9274_rx_desc_get_mpdu_peer_id,
723 .rx_desc_copy_end_tlv = ath12k_hw_qcn9274_rx_desc_copy_end_tlv,
724 .rx_desc_get_mpdu_ppdu_id = ath12k_hw_qcn9274_rx_desc_get_mpdu_ppdu_id,
725 .rx_desc_set_msdu_len = ath12k_hw_qcn9274_rx_desc_set_msdu_len,
726 .rx_desc_get_msdu_payload = ath12k_hw_qcn9274_rx_desc_get_msdu_payload,
727 .rx_desc_get_mpdu_start_offset = ath12k_hw_qcn9274_rx_desc_get_mpdu_start_offset,
728 .rx_desc_get_msdu_end_offset = ath12k_hw_qcn9274_rx_desc_get_msdu_end_offset,
729 .rx_desc_mac_addr2_valid = ath12k_hw_qcn9274_rx_desc_mac_addr2_valid,
730 .rx_desc_mpdu_start_addr2 = ath12k_hw_qcn9274_rx_desc_mpdu_start_addr2,
731 .rx_desc_is_da_mcbc = ath12k_hw_qcn9274_rx_desc_is_da_mcbc,
732 .rx_desc_get_dot11_hdr = ath12k_hw_qcn9274_rx_desc_get_dot11_hdr,
733 .rx_desc_get_crypto_header = ath12k_hw_qcn9274_rx_desc_get_crypto_hdr,
734 .dp_rx_h_msdu_done = ath12k_hw_qcn9274_dp_rx_h_msdu_done,
735 .dp_rx_h_l4_cksum_fail = ath12k_hw_qcn9274_dp_rx_h_l4_cksum_fail,
736 .dp_rx_h_ip_cksum_fail = ath12k_hw_qcn9274_dp_rx_h_ip_cksum_fail,
737 .dp_rx_h_is_decrypted = ath12k_hw_qcn9274_dp_rx_h_is_decrypted,
738 .dp_rx_h_mpdu_err = ath12k_hw_qcn9274_dp_rx_h_mpdu_err,
739 .rx_desc_get_desc_size = ath12k_hw_qcn9274_get_rx_desc_size,
740 .rx_desc_get_msdu_src_link_id = ath12k_hw_qcn9274_rx_desc_get_msdu_src_link,
741 };
742
ath12k_hw_qcn9274_compact_rx_desc_get_first_msdu(struct hal_rx_desc * desc)743 static bool ath12k_hw_qcn9274_compact_rx_desc_get_first_msdu(struct hal_rx_desc *desc)
744 {
745 return !!le16_get_bits(desc->u.qcn9274_compact.msdu_end.info5,
746 RX_MSDU_END_INFO5_FIRST_MSDU);
747 }
748
ath12k_hw_qcn9274_compact_rx_desc_get_last_msdu(struct hal_rx_desc * desc)749 static bool ath12k_hw_qcn9274_compact_rx_desc_get_last_msdu(struct hal_rx_desc *desc)
750 {
751 return !!le16_get_bits(desc->u.qcn9274_compact.msdu_end.info5,
752 RX_MSDU_END_INFO5_LAST_MSDU);
753 }
754
ath12k_hw_qcn9274_compact_rx_desc_get_l3_pad_bytes(struct hal_rx_desc * desc)755 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_l3_pad_bytes(struct hal_rx_desc *desc)
756 {
757 return le16_get_bits(desc->u.qcn9274_compact.msdu_end.info5,
758 RX_MSDU_END_INFO5_L3_HDR_PADDING);
759 }
760
ath12k_hw_qcn9274_compact_rx_desc_encrypt_valid(struct hal_rx_desc * desc)761 static bool ath12k_hw_qcn9274_compact_rx_desc_encrypt_valid(struct hal_rx_desc *desc)
762 {
763 return !!le32_get_bits(desc->u.qcn9274_compact.mpdu_start.info4,
764 RX_MPDU_START_INFO4_ENCRYPT_INFO_VALID);
765 }
766
ath12k_hw_qcn9274_compact_rx_desc_get_encrypt_type(struct hal_rx_desc * desc)767 static u32 ath12k_hw_qcn9274_compact_rx_desc_get_encrypt_type(struct hal_rx_desc *desc)
768 {
769 return le32_get_bits(desc->u.qcn9274_compact.mpdu_start.info2,
770 RX_MPDU_START_INFO2_ENC_TYPE);
771 }
772
ath12k_hw_qcn9274_compact_rx_desc_get_decap_type(struct hal_rx_desc * desc)773 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_decap_type(struct hal_rx_desc *desc)
774 {
775 return le32_get_bits(desc->u.qcn9274_compact.msdu_end.info11,
776 RX_MSDU_END_INFO11_DECAP_FORMAT);
777 }
778
ath12k_hw_qcn9274_compact_rx_desc_get_mesh_ctl(struct hal_rx_desc * desc)779 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_mesh_ctl(struct hal_rx_desc *desc)
780 {
781 return le32_get_bits(desc->u.qcn9274.msdu_end.info11,
782 RX_MSDU_END_INFO11_MESH_CTRL_PRESENT);
783 }
784
785 static bool
ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_seq_ctl_vld(struct hal_rx_desc * desc)786 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_seq_ctl_vld(struct hal_rx_desc *desc)
787 {
788 return !!le32_get_bits(desc->u.qcn9274_compact.mpdu_start.info4,
789 RX_MPDU_START_INFO4_MPDU_SEQ_CTRL_VALID);
790 }
791
ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_fc_valid(struct hal_rx_desc * desc)792 static bool ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_fc_valid(struct hal_rx_desc *desc)
793 {
794 return !!le32_get_bits(desc->u.qcn9274_compact.mpdu_start.info4,
795 RX_MPDU_START_INFO4_MPDU_FCTRL_VALID);
796 }
797
798 static u16
ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_start_seq_no(struct hal_rx_desc * desc)799 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_start_seq_no(struct hal_rx_desc *desc)
800 {
801 return le32_get_bits(desc->u.qcn9274_compact.mpdu_start.info4,
802 RX_MPDU_START_INFO4_MPDU_SEQ_NUM);
803 }
804
ath12k_hw_qcn9274_compact_rx_desc_get_msdu_len(struct hal_rx_desc * desc)805 static u16 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_len(struct hal_rx_desc *desc)
806 {
807 return le32_get_bits(desc->u.qcn9274_compact.msdu_end.info10,
808 RX_MSDU_END_INFO10_MSDU_LENGTH);
809 }
810
ath12k_hw_qcn9274_compact_rx_desc_get_msdu_sgi(struct hal_rx_desc * desc)811 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_sgi(struct hal_rx_desc *desc)
812 {
813 return le32_get_bits(desc->u.qcn9274_compact.msdu_end.info12,
814 RX_MSDU_END_INFO12_SGI);
815 }
816
ath12k_hw_qcn9274_compact_rx_desc_get_msdu_rate_mcs(struct hal_rx_desc * desc)817 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_rate_mcs(struct hal_rx_desc *desc)
818 {
819 return le32_get_bits(desc->u.qcn9274_compact.msdu_end.info12,
820 RX_MSDU_END_INFO12_RATE_MCS);
821 }
822
ath12k_hw_qcn9274_compact_rx_desc_get_msdu_rx_bw(struct hal_rx_desc * desc)823 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_rx_bw(struct hal_rx_desc *desc)
824 {
825 return le32_get_bits(desc->u.qcn9274_compact.msdu_end.info12,
826 RX_MSDU_END_INFO12_RECV_BW);
827 }
828
ath12k_hw_qcn9274_compact_rx_desc_get_msdu_freq(struct hal_rx_desc * desc)829 static u32 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_freq(struct hal_rx_desc *desc)
830 {
831 return __le32_to_cpu(desc->u.qcn9274_compact.msdu_end.phy_meta_data);
832 }
833
ath12k_hw_qcn9274_compact_rx_desc_get_msdu_pkt_type(struct hal_rx_desc * desc)834 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_pkt_type(struct hal_rx_desc *desc)
835 {
836 return le32_get_bits(desc->u.qcn9274_compact.msdu_end.info12,
837 RX_MSDU_END_INFO12_PKT_TYPE);
838 }
839
ath12k_hw_qcn9274_compact_rx_desc_get_msdu_nss(struct hal_rx_desc * desc)840 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_nss(struct hal_rx_desc *desc)
841 {
842 return le32_get_bits(desc->u.qcn9274_compact.msdu_end.info12,
843 RX_MSDU_END_QCN9274_INFO12_MIMO_SS_BITMAP);
844 }
845
ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_tid(struct hal_rx_desc * desc)846 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_tid(struct hal_rx_desc *desc)
847 {
848 return le16_get_bits(desc->u.qcn9274_compact.msdu_end.info5,
849 RX_MSDU_END_QCN9274_INFO5_TID);
850 }
851
ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_peer_id(struct hal_rx_desc * desc)852 static u16 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_peer_id(struct hal_rx_desc *desc)
853 {
854 return __le16_to_cpu(desc->u.qcn9274_compact.mpdu_start.sw_peer_id);
855 }
856
ath12k_hw_qcn9274_compact_rx_desc_copy_end_tlv(struct hal_rx_desc * fdesc,struct hal_rx_desc * ldesc)857 static void ath12k_hw_qcn9274_compact_rx_desc_copy_end_tlv(struct hal_rx_desc *fdesc,
858 struct hal_rx_desc *ldesc)
859 {
860 fdesc->u.qcn9274_compact.msdu_end = ldesc->u.qcn9274_compact.msdu_end;
861 }
862
ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_ppdu_id(struct hal_rx_desc * desc)863 static u32 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_ppdu_id(struct hal_rx_desc *desc)
864 {
865 return __le16_to_cpu(desc->u.qcn9274_compact.mpdu_start.phy_ppdu_id);
866 }
867
868 static void
ath12k_hw_qcn9274_compact_rx_desc_set_msdu_len(struct hal_rx_desc * desc,u16 len)869 ath12k_hw_qcn9274_compact_rx_desc_set_msdu_len(struct hal_rx_desc *desc, u16 len)
870 {
871 u32 info = __le32_to_cpu(desc->u.qcn9274_compact.msdu_end.info10);
872
873 info = u32_replace_bits(info, len, RX_MSDU_END_INFO10_MSDU_LENGTH);
874 desc->u.qcn9274_compact.msdu_end.info10 = __cpu_to_le32(info);
875 }
876
ath12k_hw_qcn9274_compact_rx_desc_get_msdu_payload(struct hal_rx_desc * desc)877 static u8 *ath12k_hw_qcn9274_compact_rx_desc_get_msdu_payload(struct hal_rx_desc *desc)
878 {
879 return &desc->u.qcn9274_compact.msdu_payload[0];
880 }
881
ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_start_offset(void)882 static u32 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_start_offset(void)
883 {
884 return offsetof(struct hal_rx_desc_qcn9274_compact, mpdu_start);
885 }
886
ath12k_hw_qcn9274_compact_rx_desc_get_msdu_end_offset(void)887 static u32 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_end_offset(void)
888 {
889 return offsetof(struct hal_rx_desc_qcn9274_compact, msdu_end);
890 }
891
ath12k_hw_qcn9274_compact_rx_desc_mac_addr2_valid(struct hal_rx_desc * desc)892 static bool ath12k_hw_qcn9274_compact_rx_desc_mac_addr2_valid(struct hal_rx_desc *desc)
893 {
894 return __le32_to_cpu(desc->u.qcn9274_compact.mpdu_start.info4) &
895 RX_MPDU_START_INFO4_MAC_ADDR2_VALID;
896 }
897
ath12k_hw_qcn9274_compact_rx_desc_mpdu_start_addr2(struct hal_rx_desc * desc)898 static u8 *ath12k_hw_qcn9274_compact_rx_desc_mpdu_start_addr2(struct hal_rx_desc *desc)
899 {
900 return desc->u.qcn9274_compact.mpdu_start.addr2;
901 }
902
ath12k_hw_qcn9274_compact_rx_desc_is_da_mcbc(struct hal_rx_desc * desc)903 static bool ath12k_hw_qcn9274_compact_rx_desc_is_da_mcbc(struct hal_rx_desc *desc)
904 {
905 return __le16_to_cpu(desc->u.qcn9274_compact.msdu_end.info5) &
906 RX_MSDU_END_INFO5_DA_IS_MCBC;
907 }
908
ath12k_hw_qcn9274_compact_rx_desc_get_dot11_hdr(struct hal_rx_desc * desc,struct ieee80211_hdr * hdr)909 static void ath12k_hw_qcn9274_compact_rx_desc_get_dot11_hdr(struct hal_rx_desc *desc,
910 struct ieee80211_hdr *hdr)
911 {
912 hdr->frame_control = desc->u.qcn9274_compact.mpdu_start.frame_ctrl;
913 hdr->duration_id = desc->u.qcn9274_compact.mpdu_start.duration;
914 ether_addr_copy(hdr->addr1, desc->u.qcn9274_compact.mpdu_start.addr1);
915 ether_addr_copy(hdr->addr2, desc->u.qcn9274_compact.mpdu_start.addr2);
916 ether_addr_copy(hdr->addr3, desc->u.qcn9274_compact.mpdu_start.addr3);
917 if (__le32_to_cpu(desc->u.qcn9274_compact.mpdu_start.info4) &
918 RX_MPDU_START_INFO4_MAC_ADDR4_VALID) {
919 ether_addr_copy(hdr->addr4, desc->u.qcn9274_compact.mpdu_start.addr4);
920 }
921 hdr->seq_ctrl = desc->u.qcn9274_compact.mpdu_start.seq_ctrl;
922 }
923
924 static void
ath12k_hw_qcn9274_compact_rx_desc_get_crypto_hdr(struct hal_rx_desc * desc,u8 * crypto_hdr,enum hal_encrypt_type enctype)925 ath12k_hw_qcn9274_compact_rx_desc_get_crypto_hdr(struct hal_rx_desc *desc,
926 u8 *crypto_hdr,
927 enum hal_encrypt_type enctype)
928 {
929 unsigned int key_id;
930
931 switch (enctype) {
932 case HAL_ENCRYPT_TYPE_OPEN:
933 return;
934 case HAL_ENCRYPT_TYPE_TKIP_NO_MIC:
935 case HAL_ENCRYPT_TYPE_TKIP_MIC:
936 crypto_hdr[0] =
937 HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.qcn9274_compact.mpdu_start.pn[0]);
938 crypto_hdr[1] = 0;
939 crypto_hdr[2] =
940 HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.qcn9274_compact.mpdu_start.pn[0]);
941 break;
942 case HAL_ENCRYPT_TYPE_CCMP_128:
943 case HAL_ENCRYPT_TYPE_CCMP_256:
944 case HAL_ENCRYPT_TYPE_GCMP_128:
945 case HAL_ENCRYPT_TYPE_AES_GCMP_256:
946 crypto_hdr[0] =
947 HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.qcn9274_compact.mpdu_start.pn[0]);
948 crypto_hdr[1] =
949 HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.qcn9274_compact.mpdu_start.pn[0]);
950 crypto_hdr[2] = 0;
951 break;
952 case HAL_ENCRYPT_TYPE_WEP_40:
953 case HAL_ENCRYPT_TYPE_WEP_104:
954 case HAL_ENCRYPT_TYPE_WEP_128:
955 case HAL_ENCRYPT_TYPE_WAPI_GCM_SM4:
956 case HAL_ENCRYPT_TYPE_WAPI:
957 return;
958 }
959 key_id = le32_get_bits(desc->u.qcn9274_compact.mpdu_start.info5,
960 RX_MPDU_START_INFO5_KEY_ID);
961 crypto_hdr[3] = 0x20 | (key_id << 6);
962 crypto_hdr[4] =
963 HAL_RX_MPDU_INFO_PN_GET_BYTE3(desc->u.qcn9274_compact.mpdu_start.pn[0]);
964 crypto_hdr[5] =
965 HAL_RX_MPDU_INFO_PN_GET_BYTE4(desc->u.qcn9274_compact.mpdu_start.pn[0]);
966 crypto_hdr[6] =
967 HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.qcn9274_compact.mpdu_start.pn[1]);
968 crypto_hdr[7] =
969 HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.qcn9274_compact.mpdu_start.pn[1]);
970 }
971
ath12k_hw_qcn9274_compact_dp_rx_h_msdu_done(struct hal_rx_desc * desc)972 static bool ath12k_hw_qcn9274_compact_dp_rx_h_msdu_done(struct hal_rx_desc *desc)
973 {
974 return !!le32_get_bits(desc->u.qcn9274_compact.msdu_end.info14,
975 RX_MSDU_END_INFO14_MSDU_DONE);
976 }
977
ath12k_hw_qcn9274_compact_dp_rx_h_l4_cksum_fail(struct hal_rx_desc * desc)978 static bool ath12k_hw_qcn9274_compact_dp_rx_h_l4_cksum_fail(struct hal_rx_desc *desc)
979 {
980 return !!le32_get_bits(desc->u.qcn9274_compact.msdu_end.info13,
981 RX_MSDU_END_INFO13_TCP_UDP_CKSUM_FAIL);
982 }
983
ath12k_hw_qcn9274_compact_dp_rx_h_ip_cksum_fail(struct hal_rx_desc * desc)984 static bool ath12k_hw_qcn9274_compact_dp_rx_h_ip_cksum_fail(struct hal_rx_desc *desc)
985 {
986 return !!le32_get_bits(desc->u.qcn9274_compact.msdu_end.info13,
987 RX_MSDU_END_INFO13_IP_CKSUM_FAIL);
988 }
989
ath12k_hw_qcn9274_compact_dp_rx_h_is_decrypted(struct hal_rx_desc * desc)990 static bool ath12k_hw_qcn9274_compact_dp_rx_h_is_decrypted(struct hal_rx_desc *desc)
991 {
992 return (le32_get_bits(desc->u.qcn9274_compact.msdu_end.info14,
993 RX_MSDU_END_INFO14_DECRYPT_STATUS_CODE) ==
994 RX_DESC_DECRYPT_STATUS_CODE_OK);
995 }
996
ath12k_hw_qcn9274_compact_dp_rx_h_mpdu_err(struct hal_rx_desc * desc)997 static u32 ath12k_hw_qcn9274_compact_dp_rx_h_mpdu_err(struct hal_rx_desc *desc)
998 {
999 u32 info = __le32_to_cpu(desc->u.qcn9274_compact.msdu_end.info13);
1000 u32 errmap = 0;
1001
1002 if (info & RX_MSDU_END_INFO13_FCS_ERR)
1003 errmap |= HAL_RX_MPDU_ERR_FCS;
1004
1005 if (info & RX_MSDU_END_INFO13_DECRYPT_ERR)
1006 errmap |= HAL_RX_MPDU_ERR_DECRYPT;
1007
1008 if (info & RX_MSDU_END_INFO13_TKIP_MIC_ERR)
1009 errmap |= HAL_RX_MPDU_ERR_TKIP_MIC;
1010
1011 if (info & RX_MSDU_END_INFO13_A_MSDU_ERROR)
1012 errmap |= HAL_RX_MPDU_ERR_AMSDU_ERR;
1013
1014 if (info & RX_MSDU_END_INFO13_OVERFLOW_ERR)
1015 errmap |= HAL_RX_MPDU_ERR_OVERFLOW;
1016
1017 if (info & RX_MSDU_END_INFO13_MSDU_LEN_ERR)
1018 errmap |= HAL_RX_MPDU_ERR_MSDU_LEN;
1019
1020 if (info & RX_MSDU_END_INFO13_MPDU_LEN_ERR)
1021 errmap |= HAL_RX_MPDU_ERR_MPDU_LEN;
1022
1023 return errmap;
1024 }
1025
ath12k_hw_qcn9274_compact_get_rx_desc_size(void)1026 static u32 ath12k_hw_qcn9274_compact_get_rx_desc_size(void)
1027 {
1028 return sizeof(struct hal_rx_desc_qcn9274_compact);
1029 }
1030
ath12k_hw_qcn9274_compact_rx_desc_get_msdu_src_link(struct hal_rx_desc * desc)1031 static u8 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_src_link(struct hal_rx_desc *desc)
1032 {
1033 return le64_get_bits(desc->u.qcn9274_compact.msdu_end.msdu_end_tag,
1034 RX_MSDU_END_64_TLV_SRC_LINK_ID);
1035 }
1036
1037 const struct hal_rx_ops hal_rx_qcn9274_compact_ops = {
1038 .rx_desc_get_first_msdu = ath12k_hw_qcn9274_compact_rx_desc_get_first_msdu,
1039 .rx_desc_get_last_msdu = ath12k_hw_qcn9274_compact_rx_desc_get_last_msdu,
1040 .rx_desc_get_l3_pad_bytes = ath12k_hw_qcn9274_compact_rx_desc_get_l3_pad_bytes,
1041 .rx_desc_encrypt_valid = ath12k_hw_qcn9274_compact_rx_desc_encrypt_valid,
1042 .rx_desc_get_encrypt_type = ath12k_hw_qcn9274_compact_rx_desc_get_encrypt_type,
1043 .rx_desc_get_decap_type = ath12k_hw_qcn9274_compact_rx_desc_get_decap_type,
1044 .rx_desc_get_mesh_ctl = ath12k_hw_qcn9274_compact_rx_desc_get_mesh_ctl,
1045 .rx_desc_get_mpdu_seq_ctl_vld =
1046 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_seq_ctl_vld,
1047 .rx_desc_get_mpdu_fc_valid = ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_fc_valid,
1048 .rx_desc_get_mpdu_start_seq_no =
1049 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_start_seq_no,
1050 .rx_desc_get_msdu_len = ath12k_hw_qcn9274_compact_rx_desc_get_msdu_len,
1051 .rx_desc_get_msdu_sgi = ath12k_hw_qcn9274_compact_rx_desc_get_msdu_sgi,
1052 .rx_desc_get_msdu_rate_mcs = ath12k_hw_qcn9274_compact_rx_desc_get_msdu_rate_mcs,
1053 .rx_desc_get_msdu_rx_bw = ath12k_hw_qcn9274_compact_rx_desc_get_msdu_rx_bw,
1054 .rx_desc_get_msdu_freq = ath12k_hw_qcn9274_compact_rx_desc_get_msdu_freq,
1055 .rx_desc_get_msdu_pkt_type = ath12k_hw_qcn9274_compact_rx_desc_get_msdu_pkt_type,
1056 .rx_desc_get_msdu_nss = ath12k_hw_qcn9274_compact_rx_desc_get_msdu_nss,
1057 .rx_desc_get_mpdu_tid = ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_tid,
1058 .rx_desc_get_mpdu_peer_id = ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_peer_id,
1059 .rx_desc_copy_end_tlv = ath12k_hw_qcn9274_compact_rx_desc_copy_end_tlv,
1060 .rx_desc_get_mpdu_ppdu_id = ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_ppdu_id,
1061 .rx_desc_set_msdu_len = ath12k_hw_qcn9274_compact_rx_desc_set_msdu_len,
1062 .rx_desc_get_msdu_payload = ath12k_hw_qcn9274_compact_rx_desc_get_msdu_payload,
1063 .rx_desc_get_mpdu_start_offset =
1064 ath12k_hw_qcn9274_compact_rx_desc_get_mpdu_start_offset,
1065 .rx_desc_get_msdu_end_offset =
1066 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_end_offset,
1067 .rx_desc_mac_addr2_valid = ath12k_hw_qcn9274_compact_rx_desc_mac_addr2_valid,
1068 .rx_desc_mpdu_start_addr2 = ath12k_hw_qcn9274_compact_rx_desc_mpdu_start_addr2,
1069 .rx_desc_is_da_mcbc = ath12k_hw_qcn9274_compact_rx_desc_is_da_mcbc,
1070 .rx_desc_get_dot11_hdr = ath12k_hw_qcn9274_compact_rx_desc_get_dot11_hdr,
1071 .rx_desc_get_crypto_header = ath12k_hw_qcn9274_compact_rx_desc_get_crypto_hdr,
1072 .dp_rx_h_msdu_done = ath12k_hw_qcn9274_compact_dp_rx_h_msdu_done,
1073 .dp_rx_h_l4_cksum_fail = ath12k_hw_qcn9274_compact_dp_rx_h_l4_cksum_fail,
1074 .dp_rx_h_ip_cksum_fail = ath12k_hw_qcn9274_compact_dp_rx_h_ip_cksum_fail,
1075 .dp_rx_h_is_decrypted = ath12k_hw_qcn9274_compact_dp_rx_h_is_decrypted,
1076 .dp_rx_h_mpdu_err = ath12k_hw_qcn9274_compact_dp_rx_h_mpdu_err,
1077 .rx_desc_get_desc_size = ath12k_hw_qcn9274_compact_get_rx_desc_size,
1078 .rx_desc_get_msdu_src_link_id =
1079 ath12k_hw_qcn9274_compact_rx_desc_get_msdu_src_link,
1080 };
1081
1082 const struct hal_ops hal_qcn9274_ops = {
1083 .create_srng_config = ath12k_hal_srng_create_config_qcn9274,
1084 .tcl_to_wbm_rbm_map = ath12k_hal_qcn9274_tcl_to_wbm_rbm_map,
1085 .rxdma_ring_wmask_rx_mpdu_start = ath12k_hal_qcn9274_rx_mpdu_start_wmask_get,
1086 .rxdma_ring_wmask_rx_msdu_end = ath12k_hal_qcn9274_rx_msdu_end_wmask_get,
1087 .get_hal_rx_compact_ops = ath12k_hal_qcn9274_get_hal_rx_compact_ops,
1088 };
1089
ath12k_hw_wcn7850_rx_desc_get_first_msdu(struct hal_rx_desc * desc)1090 static bool ath12k_hw_wcn7850_rx_desc_get_first_msdu(struct hal_rx_desc *desc)
1091 {
1092 return !!le16_get_bits(desc->u.wcn7850.msdu_end.info5,
1093 RX_MSDU_END_INFO5_FIRST_MSDU);
1094 }
1095
ath12k_hw_wcn7850_rx_desc_get_last_msdu(struct hal_rx_desc * desc)1096 static bool ath12k_hw_wcn7850_rx_desc_get_last_msdu(struct hal_rx_desc *desc)
1097 {
1098 return !!le16_get_bits(desc->u.wcn7850.msdu_end.info5,
1099 RX_MSDU_END_INFO5_LAST_MSDU);
1100 }
1101
ath12k_hw_wcn7850_rx_desc_get_l3_pad_bytes(struct hal_rx_desc * desc)1102 static u8 ath12k_hw_wcn7850_rx_desc_get_l3_pad_bytes(struct hal_rx_desc *desc)
1103 {
1104 return le16_get_bits(desc->u.wcn7850.msdu_end.info5,
1105 RX_MSDU_END_INFO5_L3_HDR_PADDING);
1106 }
1107
ath12k_hw_wcn7850_rx_desc_encrypt_valid(struct hal_rx_desc * desc)1108 static bool ath12k_hw_wcn7850_rx_desc_encrypt_valid(struct hal_rx_desc *desc)
1109 {
1110 return !!le32_get_bits(desc->u.wcn7850.mpdu_start.info4,
1111 RX_MPDU_START_INFO4_ENCRYPT_INFO_VALID);
1112 }
1113
ath12k_hw_wcn7850_rx_desc_get_encrypt_type(struct hal_rx_desc * desc)1114 static u32 ath12k_hw_wcn7850_rx_desc_get_encrypt_type(struct hal_rx_desc *desc)
1115 {
1116 return le32_get_bits(desc->u.wcn7850.mpdu_start.info2,
1117 RX_MPDU_START_INFO2_ENC_TYPE);
1118 }
1119
ath12k_hw_wcn7850_rx_desc_get_decap_type(struct hal_rx_desc * desc)1120 static u8 ath12k_hw_wcn7850_rx_desc_get_decap_type(struct hal_rx_desc *desc)
1121 {
1122 return le32_get_bits(desc->u.wcn7850.msdu_end.info11,
1123 RX_MSDU_END_INFO11_DECAP_FORMAT);
1124 }
1125
ath12k_hw_wcn7850_rx_desc_get_mesh_ctl(struct hal_rx_desc * desc)1126 static u8 ath12k_hw_wcn7850_rx_desc_get_mesh_ctl(struct hal_rx_desc *desc)
1127 {
1128 return le32_get_bits(desc->u.wcn7850.msdu_end.info11,
1129 RX_MSDU_END_INFO11_MESH_CTRL_PRESENT);
1130 }
1131
ath12k_hw_wcn7850_rx_desc_get_mpdu_seq_ctl_vld(struct hal_rx_desc * desc)1132 static bool ath12k_hw_wcn7850_rx_desc_get_mpdu_seq_ctl_vld(struct hal_rx_desc *desc)
1133 {
1134 return !!le32_get_bits(desc->u.wcn7850.mpdu_start.info4,
1135 RX_MPDU_START_INFO4_MPDU_SEQ_CTRL_VALID);
1136 }
1137
ath12k_hw_wcn7850_rx_desc_get_mpdu_fc_valid(struct hal_rx_desc * desc)1138 static bool ath12k_hw_wcn7850_rx_desc_get_mpdu_fc_valid(struct hal_rx_desc *desc)
1139 {
1140 return !!le32_get_bits(desc->u.wcn7850.mpdu_start.info4,
1141 RX_MPDU_START_INFO4_MPDU_FCTRL_VALID);
1142 }
1143
ath12k_hw_wcn7850_rx_desc_get_mpdu_start_seq_no(struct hal_rx_desc * desc)1144 static u16 ath12k_hw_wcn7850_rx_desc_get_mpdu_start_seq_no(struct hal_rx_desc *desc)
1145 {
1146 return le32_get_bits(desc->u.wcn7850.mpdu_start.info4,
1147 RX_MPDU_START_INFO4_MPDU_SEQ_NUM);
1148 }
1149
ath12k_hw_wcn7850_rx_desc_get_msdu_len(struct hal_rx_desc * desc)1150 static u16 ath12k_hw_wcn7850_rx_desc_get_msdu_len(struct hal_rx_desc *desc)
1151 {
1152 return le32_get_bits(desc->u.wcn7850.msdu_end.info10,
1153 RX_MSDU_END_INFO10_MSDU_LENGTH);
1154 }
1155
ath12k_hw_wcn7850_rx_desc_get_msdu_sgi(struct hal_rx_desc * desc)1156 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_sgi(struct hal_rx_desc *desc)
1157 {
1158 return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
1159 RX_MSDU_END_INFO12_SGI);
1160 }
1161
ath12k_hw_wcn7850_rx_desc_get_msdu_rate_mcs(struct hal_rx_desc * desc)1162 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_rate_mcs(struct hal_rx_desc *desc)
1163 {
1164 return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
1165 RX_MSDU_END_INFO12_RATE_MCS);
1166 }
1167
ath12k_hw_wcn7850_rx_desc_get_msdu_rx_bw(struct hal_rx_desc * desc)1168 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_rx_bw(struct hal_rx_desc *desc)
1169 {
1170 return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
1171 RX_MSDU_END_INFO12_RECV_BW);
1172 }
1173
ath12k_hw_wcn7850_rx_desc_get_msdu_freq(struct hal_rx_desc * desc)1174 static u32 ath12k_hw_wcn7850_rx_desc_get_msdu_freq(struct hal_rx_desc *desc)
1175 {
1176 return __le32_to_cpu(desc->u.wcn7850.msdu_end.phy_meta_data);
1177 }
1178
ath12k_hw_wcn7850_rx_desc_get_msdu_pkt_type(struct hal_rx_desc * desc)1179 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_pkt_type(struct hal_rx_desc *desc)
1180 {
1181 return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
1182 RX_MSDU_END_INFO12_PKT_TYPE);
1183 }
1184
ath12k_hw_wcn7850_rx_desc_get_msdu_nss(struct hal_rx_desc * desc)1185 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_nss(struct hal_rx_desc *desc)
1186 {
1187 return le32_get_bits(desc->u.wcn7850.msdu_end.info12,
1188 RX_MSDU_END_WCN7850_INFO12_MIMO_SS_BITMAP);
1189 }
1190
ath12k_hw_wcn7850_rx_desc_get_mpdu_tid(struct hal_rx_desc * desc)1191 static u8 ath12k_hw_wcn7850_rx_desc_get_mpdu_tid(struct hal_rx_desc *desc)
1192 {
1193 return le32_get_bits(desc->u.wcn7850.mpdu_start.info2,
1194 RX_MPDU_START_INFO2_TID);
1195 }
1196
ath12k_hw_wcn7850_rx_desc_get_mpdu_peer_id(struct hal_rx_desc * desc)1197 static u16 ath12k_hw_wcn7850_rx_desc_get_mpdu_peer_id(struct hal_rx_desc *desc)
1198 {
1199 return __le16_to_cpu(desc->u.wcn7850.mpdu_start.sw_peer_id);
1200 }
1201
ath12k_hw_wcn7850_rx_desc_copy_end_tlv(struct hal_rx_desc * fdesc,struct hal_rx_desc * ldesc)1202 static void ath12k_hw_wcn7850_rx_desc_copy_end_tlv(struct hal_rx_desc *fdesc,
1203 struct hal_rx_desc *ldesc)
1204 {
1205 memcpy(&fdesc->u.wcn7850.msdu_end, &ldesc->u.wcn7850.msdu_end,
1206 sizeof(struct rx_msdu_end_wcn7850));
1207 }
1208
ath12k_hw_wcn7850_rx_desc_get_mpdu_start_tag(struct hal_rx_desc * desc)1209 static u32 ath12k_hw_wcn7850_rx_desc_get_mpdu_start_tag(struct hal_rx_desc *desc)
1210 {
1211 return le64_get_bits(desc->u.wcn7850.mpdu_start_tag,
1212 HAL_TLV_HDR_TAG);
1213 }
1214
ath12k_hw_wcn7850_rx_desc_get_mpdu_ppdu_id(struct hal_rx_desc * desc)1215 static u32 ath12k_hw_wcn7850_rx_desc_get_mpdu_ppdu_id(struct hal_rx_desc *desc)
1216 {
1217 return __le16_to_cpu(desc->u.wcn7850.mpdu_start.phy_ppdu_id);
1218 }
1219
ath12k_hw_wcn7850_rx_desc_set_msdu_len(struct hal_rx_desc * desc,u16 len)1220 static void ath12k_hw_wcn7850_rx_desc_set_msdu_len(struct hal_rx_desc *desc, u16 len)
1221 {
1222 u32 info = __le32_to_cpu(desc->u.wcn7850.msdu_end.info10);
1223
1224 info &= ~RX_MSDU_END_INFO10_MSDU_LENGTH;
1225 info |= u32_encode_bits(len, RX_MSDU_END_INFO10_MSDU_LENGTH);
1226
1227 desc->u.wcn7850.msdu_end.info10 = __cpu_to_le32(info);
1228 }
1229
ath12k_hw_wcn7850_rx_desc_get_msdu_payload(struct hal_rx_desc * desc)1230 static u8 *ath12k_hw_wcn7850_rx_desc_get_msdu_payload(struct hal_rx_desc *desc)
1231 {
1232 return &desc->u.wcn7850.msdu_payload[0];
1233 }
1234
ath12k_hw_wcn7850_rx_desc_get_mpdu_start_offset(void)1235 static u32 ath12k_hw_wcn7850_rx_desc_get_mpdu_start_offset(void)
1236 {
1237 return offsetof(struct hal_rx_desc_wcn7850, mpdu_start_tag);
1238 }
1239
ath12k_hw_wcn7850_rx_desc_get_msdu_end_offset(void)1240 static u32 ath12k_hw_wcn7850_rx_desc_get_msdu_end_offset(void)
1241 {
1242 return offsetof(struct hal_rx_desc_wcn7850, msdu_end_tag);
1243 }
1244
ath12k_hw_wcn7850_rx_desc_mac_addr2_valid(struct hal_rx_desc * desc)1245 static bool ath12k_hw_wcn7850_rx_desc_mac_addr2_valid(struct hal_rx_desc *desc)
1246 {
1247 return __le32_to_cpu(desc->u.wcn7850.mpdu_start.info4) &
1248 RX_MPDU_START_INFO4_MAC_ADDR2_VALID;
1249 }
1250
ath12k_hw_wcn7850_rx_desc_mpdu_start_addr2(struct hal_rx_desc * desc)1251 static u8 *ath12k_hw_wcn7850_rx_desc_mpdu_start_addr2(struct hal_rx_desc *desc)
1252 {
1253 return desc->u.wcn7850.mpdu_start.addr2;
1254 }
1255
ath12k_hw_wcn7850_rx_desc_is_da_mcbc(struct hal_rx_desc * desc)1256 static bool ath12k_hw_wcn7850_rx_desc_is_da_mcbc(struct hal_rx_desc *desc)
1257 {
1258 return __le32_to_cpu(desc->u.wcn7850.msdu_end.info13) &
1259 RX_MSDU_END_INFO13_MCAST_BCAST;
1260 }
1261
ath12k_hw_wcn7850_rx_desc_get_dot11_hdr(struct hal_rx_desc * desc,struct ieee80211_hdr * hdr)1262 static void ath12k_hw_wcn7850_rx_desc_get_dot11_hdr(struct hal_rx_desc *desc,
1263 struct ieee80211_hdr *hdr)
1264 {
1265 hdr->frame_control = desc->u.wcn7850.mpdu_start.frame_ctrl;
1266 hdr->duration_id = desc->u.wcn7850.mpdu_start.duration;
1267 ether_addr_copy(hdr->addr1, desc->u.wcn7850.mpdu_start.addr1);
1268 ether_addr_copy(hdr->addr2, desc->u.wcn7850.mpdu_start.addr2);
1269 ether_addr_copy(hdr->addr3, desc->u.wcn7850.mpdu_start.addr3);
1270 if (__le32_to_cpu(desc->u.wcn7850.mpdu_start.info4) &
1271 RX_MPDU_START_INFO4_MAC_ADDR4_VALID) {
1272 ether_addr_copy(hdr->addr4, desc->u.wcn7850.mpdu_start.addr4);
1273 }
1274 hdr->seq_ctrl = desc->u.wcn7850.mpdu_start.seq_ctrl;
1275 }
1276
ath12k_hw_wcn7850_rx_desc_get_crypto_hdr(struct hal_rx_desc * desc,u8 * crypto_hdr,enum hal_encrypt_type enctype)1277 static void ath12k_hw_wcn7850_rx_desc_get_crypto_hdr(struct hal_rx_desc *desc,
1278 u8 *crypto_hdr,
1279 enum hal_encrypt_type enctype)
1280 {
1281 unsigned int key_id;
1282
1283 switch (enctype) {
1284 case HAL_ENCRYPT_TYPE_OPEN:
1285 return;
1286 case HAL_ENCRYPT_TYPE_TKIP_NO_MIC:
1287 case HAL_ENCRYPT_TYPE_TKIP_MIC:
1288 crypto_hdr[0] =
1289 HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.wcn7850.mpdu_start.pn[0]);
1290 crypto_hdr[1] = 0;
1291 crypto_hdr[2] =
1292 HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.wcn7850.mpdu_start.pn[0]);
1293 break;
1294 case HAL_ENCRYPT_TYPE_CCMP_128:
1295 case HAL_ENCRYPT_TYPE_CCMP_256:
1296 case HAL_ENCRYPT_TYPE_GCMP_128:
1297 case HAL_ENCRYPT_TYPE_AES_GCMP_256:
1298 crypto_hdr[0] =
1299 HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.wcn7850.mpdu_start.pn[0]);
1300 crypto_hdr[1] =
1301 HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.wcn7850.mpdu_start.pn[0]);
1302 crypto_hdr[2] = 0;
1303 break;
1304 case HAL_ENCRYPT_TYPE_WEP_40:
1305 case HAL_ENCRYPT_TYPE_WEP_104:
1306 case HAL_ENCRYPT_TYPE_WEP_128:
1307 case HAL_ENCRYPT_TYPE_WAPI_GCM_SM4:
1308 case HAL_ENCRYPT_TYPE_WAPI:
1309 return;
1310 }
1311 key_id = u32_get_bits(__le32_to_cpu(desc->u.wcn7850.mpdu_start.info5),
1312 RX_MPDU_START_INFO5_KEY_ID);
1313 crypto_hdr[3] = 0x20 | (key_id << 6);
1314 crypto_hdr[4] = HAL_RX_MPDU_INFO_PN_GET_BYTE3(desc->u.wcn7850.mpdu_start.pn[0]);
1315 crypto_hdr[5] = HAL_RX_MPDU_INFO_PN_GET_BYTE4(desc->u.wcn7850.mpdu_start.pn[0]);
1316 crypto_hdr[6] = HAL_RX_MPDU_INFO_PN_GET_BYTE1(desc->u.wcn7850.mpdu_start.pn[1]);
1317 crypto_hdr[7] = HAL_RX_MPDU_INFO_PN_GET_BYTE2(desc->u.wcn7850.mpdu_start.pn[1]);
1318 }
1319
ath12k_hal_srng_create_config_wcn7850(struct ath12k_base * ab)1320 static int ath12k_hal_srng_create_config_wcn7850(struct ath12k_base *ab)
1321 {
1322 struct ath12k_hal *hal = &ab->hal;
1323 struct hal_srng_config *s;
1324
1325 hal->srng_config = kmemdup(hw_srng_config_template,
1326 sizeof(hw_srng_config_template),
1327 GFP_KERNEL);
1328 if (!hal->srng_config)
1329 return -ENOMEM;
1330
1331 s = &hal->srng_config[HAL_REO_DST];
1332 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO1_RING_BASE_LSB(ab);
1333 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO1_RING_HP;
1334 s->reg_size[0] = HAL_REO2_RING_BASE_LSB(ab) - HAL_REO1_RING_BASE_LSB(ab);
1335 s->reg_size[1] = HAL_REO2_RING_HP - HAL_REO1_RING_HP;
1336
1337 s = &hal->srng_config[HAL_REO_EXCEPTION];
1338 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_SW0_RING_BASE_LSB(ab);
1339 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_SW0_RING_HP;
1340
1341 s = &hal->srng_config[HAL_REO_REINJECT];
1342 s->max_rings = 1;
1343 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_SW2REO_RING_BASE_LSB(ab);
1344 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_SW2REO_RING_HP;
1345
1346 s = &hal->srng_config[HAL_REO_CMD];
1347 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_CMD_RING_BASE_LSB(ab);
1348 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_CMD_HP;
1349
1350 s = &hal->srng_config[HAL_REO_STATUS];
1351 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_STATUS_RING_BASE_LSB(ab);
1352 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_REO_REG + HAL_REO_STATUS_HP;
1353
1354 s = &hal->srng_config[HAL_TCL_DATA];
1355 s->max_rings = 5;
1356 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL1_RING_BASE_LSB(ab);
1357 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL1_RING_HP;
1358 s->reg_size[0] = HAL_TCL2_RING_BASE_LSB(ab) - HAL_TCL1_RING_BASE_LSB(ab);
1359 s->reg_size[1] = HAL_TCL2_RING_HP - HAL_TCL1_RING_HP;
1360
1361 s = &hal->srng_config[HAL_TCL_CMD];
1362 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_RING_BASE_LSB(ab);
1363 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_RING_HP;
1364
1365 s = &hal->srng_config[HAL_TCL_STATUS];
1366 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_STATUS_RING_BASE_LSB(ab);
1367 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_TCL_REG + HAL_TCL_STATUS_RING_HP;
1368
1369 s = &hal->srng_config[HAL_CE_SRC];
1370 s->max_rings = 12;
1371 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_SRC_REG(ab) + HAL_CE_DST_RING_BASE_LSB;
1372 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_SRC_REG(ab) + HAL_CE_DST_RING_HP;
1373 s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_SRC_REG(ab) -
1374 HAL_SEQ_WCSS_UMAC_CE0_SRC_REG(ab);
1375 s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_SRC_REG(ab) -
1376 HAL_SEQ_WCSS_UMAC_CE0_SRC_REG(ab);
1377
1378 s = &hal->srng_config[HAL_CE_DST];
1379 s->max_rings = 12;
1380 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab) + HAL_CE_DST_RING_BASE_LSB;
1381 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab) + HAL_CE_DST_RING_HP;
1382 s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG(ab) -
1383 HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab);
1384 s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG(ab) -
1385 HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab);
1386
1387 s = &hal->srng_config[HAL_CE_DST_STATUS];
1388 s->max_rings = 12;
1389 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab) +
1390 HAL_CE_DST_STATUS_RING_BASE_LSB;
1391 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab) + HAL_CE_DST_STATUS_RING_HP;
1392 s->reg_size[0] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG(ab) -
1393 HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab);
1394 s->reg_size[1] = HAL_SEQ_WCSS_UMAC_CE1_DST_REG(ab) -
1395 HAL_SEQ_WCSS_UMAC_CE0_DST_REG(ab);
1396
1397 s = &hal->srng_config[HAL_WBM_IDLE_LINK];
1398 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_IDLE_LINK_RING_BASE_LSB(ab);
1399 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_IDLE_LINK_RING_HP;
1400
1401 s = &hal->srng_config[HAL_SW2WBM_RELEASE];
1402 s->max_rings = 1;
1403 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG +
1404 HAL_WBM_SW_RELEASE_RING_BASE_LSB(ab);
1405 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_SW_RELEASE_RING_HP;
1406
1407 s = &hal->srng_config[HAL_WBM2SW_RELEASE];
1408 s->reg_start[0] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM0_RELEASE_RING_BASE_LSB(ab);
1409 s->reg_start[1] = HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM0_RELEASE_RING_HP;
1410 s->reg_size[0] = HAL_WBM1_RELEASE_RING_BASE_LSB(ab) -
1411 HAL_WBM0_RELEASE_RING_BASE_LSB(ab);
1412 s->reg_size[1] = HAL_WBM1_RELEASE_RING_HP - HAL_WBM0_RELEASE_RING_HP;
1413
1414 s = &hal->srng_config[HAL_RXDMA_BUF];
1415 s->max_rings = 2;
1416 s->mac_type = ATH12K_HAL_SRNG_PMAC;
1417
1418 s = &hal->srng_config[HAL_RXDMA_DST];
1419 s->max_rings = 1;
1420 s->entry_size = sizeof(struct hal_reo_entrance_ring) >> 2;
1421
1422 /* below rings are not used */
1423 s = &hal->srng_config[HAL_RXDMA_DIR_BUF];
1424 s->max_rings = 0;
1425
1426 s = &hal->srng_config[HAL_PPE2TCL];
1427 s->max_rings = 0;
1428
1429 s = &hal->srng_config[HAL_PPE_RELEASE];
1430 s->max_rings = 0;
1431
1432 s = &hal->srng_config[HAL_TX_MONITOR_BUF];
1433 s->max_rings = 0;
1434
1435 s = &hal->srng_config[HAL_TX_MONITOR_DST];
1436 s->max_rings = 0;
1437
1438 s = &hal->srng_config[HAL_PPE2TCL];
1439 s->max_rings = 0;
1440
1441 return 0;
1442 }
1443
ath12k_hw_wcn7850_dp_rx_h_msdu_done(struct hal_rx_desc * desc)1444 static bool ath12k_hw_wcn7850_dp_rx_h_msdu_done(struct hal_rx_desc *desc)
1445 {
1446 return !!le32_get_bits(desc->u.wcn7850.msdu_end.info14,
1447 RX_MSDU_END_INFO14_MSDU_DONE);
1448 }
1449
ath12k_hw_wcn7850_dp_rx_h_l4_cksum_fail(struct hal_rx_desc * desc)1450 static bool ath12k_hw_wcn7850_dp_rx_h_l4_cksum_fail(struct hal_rx_desc *desc)
1451 {
1452 return !!le32_get_bits(desc->u.wcn7850.msdu_end.info13,
1453 RX_MSDU_END_INFO13_TCP_UDP_CKSUM_FAIL);
1454 }
1455
ath12k_hw_wcn7850_dp_rx_h_ip_cksum_fail(struct hal_rx_desc * desc)1456 static bool ath12k_hw_wcn7850_dp_rx_h_ip_cksum_fail(struct hal_rx_desc *desc)
1457 {
1458 return !!le32_get_bits(desc->u.wcn7850.msdu_end.info13,
1459 RX_MSDU_END_INFO13_IP_CKSUM_FAIL);
1460 }
1461
ath12k_hw_wcn7850_dp_rx_h_is_decrypted(struct hal_rx_desc * desc)1462 static bool ath12k_hw_wcn7850_dp_rx_h_is_decrypted(struct hal_rx_desc *desc)
1463 {
1464 return (le32_get_bits(desc->u.wcn7850.msdu_end.info14,
1465 RX_MSDU_END_INFO14_DECRYPT_STATUS_CODE) ==
1466 RX_DESC_DECRYPT_STATUS_CODE_OK);
1467 }
1468
ath12k_hw_wcn7850_dp_rx_h_mpdu_err(struct hal_rx_desc * desc)1469 static u32 ath12k_hw_wcn7850_dp_rx_h_mpdu_err(struct hal_rx_desc *desc)
1470 {
1471 u32 info = __le32_to_cpu(desc->u.wcn7850.msdu_end.info13);
1472 u32 errmap = 0;
1473
1474 if (info & RX_MSDU_END_INFO13_FCS_ERR)
1475 errmap |= HAL_RX_MPDU_ERR_FCS;
1476
1477 if (info & RX_MSDU_END_INFO13_DECRYPT_ERR)
1478 errmap |= HAL_RX_MPDU_ERR_DECRYPT;
1479
1480 if (info & RX_MSDU_END_INFO13_TKIP_MIC_ERR)
1481 errmap |= HAL_RX_MPDU_ERR_TKIP_MIC;
1482
1483 if (info & RX_MSDU_END_INFO13_A_MSDU_ERROR)
1484 errmap |= HAL_RX_MPDU_ERR_AMSDU_ERR;
1485
1486 if (info & RX_MSDU_END_INFO13_OVERFLOW_ERR)
1487 errmap |= HAL_RX_MPDU_ERR_OVERFLOW;
1488
1489 if (info & RX_MSDU_END_INFO13_MSDU_LEN_ERR)
1490 errmap |= HAL_RX_MPDU_ERR_MSDU_LEN;
1491
1492 if (info & RX_MSDU_END_INFO13_MPDU_LEN_ERR)
1493 errmap |= HAL_RX_MPDU_ERR_MPDU_LEN;
1494
1495 return errmap;
1496 }
1497
ath12k_hw_wcn7850_get_rx_desc_size(void)1498 static u32 ath12k_hw_wcn7850_get_rx_desc_size(void)
1499 {
1500 return sizeof(struct hal_rx_desc_wcn7850);
1501 }
1502
ath12k_hw_wcn7850_rx_desc_get_msdu_src_link(struct hal_rx_desc * desc)1503 static u8 ath12k_hw_wcn7850_rx_desc_get_msdu_src_link(struct hal_rx_desc *desc)
1504 {
1505 return 0;
1506 }
1507
1508 const struct hal_rx_ops hal_rx_wcn7850_ops = {
1509 .rx_desc_get_first_msdu = ath12k_hw_wcn7850_rx_desc_get_first_msdu,
1510 .rx_desc_get_last_msdu = ath12k_hw_wcn7850_rx_desc_get_last_msdu,
1511 .rx_desc_get_l3_pad_bytes = ath12k_hw_wcn7850_rx_desc_get_l3_pad_bytes,
1512 .rx_desc_encrypt_valid = ath12k_hw_wcn7850_rx_desc_encrypt_valid,
1513 .rx_desc_get_encrypt_type = ath12k_hw_wcn7850_rx_desc_get_encrypt_type,
1514 .rx_desc_get_decap_type = ath12k_hw_wcn7850_rx_desc_get_decap_type,
1515 .rx_desc_get_mesh_ctl = ath12k_hw_wcn7850_rx_desc_get_mesh_ctl,
1516 .rx_desc_get_mpdu_seq_ctl_vld = ath12k_hw_wcn7850_rx_desc_get_mpdu_seq_ctl_vld,
1517 .rx_desc_get_mpdu_fc_valid = ath12k_hw_wcn7850_rx_desc_get_mpdu_fc_valid,
1518 .rx_desc_get_mpdu_start_seq_no = ath12k_hw_wcn7850_rx_desc_get_mpdu_start_seq_no,
1519 .rx_desc_get_msdu_len = ath12k_hw_wcn7850_rx_desc_get_msdu_len,
1520 .rx_desc_get_msdu_sgi = ath12k_hw_wcn7850_rx_desc_get_msdu_sgi,
1521 .rx_desc_get_msdu_rate_mcs = ath12k_hw_wcn7850_rx_desc_get_msdu_rate_mcs,
1522 .rx_desc_get_msdu_rx_bw = ath12k_hw_wcn7850_rx_desc_get_msdu_rx_bw,
1523 .rx_desc_get_msdu_freq = ath12k_hw_wcn7850_rx_desc_get_msdu_freq,
1524 .rx_desc_get_msdu_pkt_type = ath12k_hw_wcn7850_rx_desc_get_msdu_pkt_type,
1525 .rx_desc_get_msdu_nss = ath12k_hw_wcn7850_rx_desc_get_msdu_nss,
1526 .rx_desc_get_mpdu_tid = ath12k_hw_wcn7850_rx_desc_get_mpdu_tid,
1527 .rx_desc_get_mpdu_peer_id = ath12k_hw_wcn7850_rx_desc_get_mpdu_peer_id,
1528 .rx_desc_copy_end_tlv = ath12k_hw_wcn7850_rx_desc_copy_end_tlv,
1529 .rx_desc_get_mpdu_start_tag = ath12k_hw_wcn7850_rx_desc_get_mpdu_start_tag,
1530 .rx_desc_get_mpdu_ppdu_id = ath12k_hw_wcn7850_rx_desc_get_mpdu_ppdu_id,
1531 .rx_desc_set_msdu_len = ath12k_hw_wcn7850_rx_desc_set_msdu_len,
1532 .rx_desc_get_msdu_payload = ath12k_hw_wcn7850_rx_desc_get_msdu_payload,
1533 .rx_desc_get_mpdu_start_offset = ath12k_hw_wcn7850_rx_desc_get_mpdu_start_offset,
1534 .rx_desc_get_msdu_end_offset = ath12k_hw_wcn7850_rx_desc_get_msdu_end_offset,
1535 .rx_desc_mac_addr2_valid = ath12k_hw_wcn7850_rx_desc_mac_addr2_valid,
1536 .rx_desc_mpdu_start_addr2 = ath12k_hw_wcn7850_rx_desc_mpdu_start_addr2,
1537 .rx_desc_is_da_mcbc = ath12k_hw_wcn7850_rx_desc_is_da_mcbc,
1538 .rx_desc_get_dot11_hdr = ath12k_hw_wcn7850_rx_desc_get_dot11_hdr,
1539 .rx_desc_get_crypto_header = ath12k_hw_wcn7850_rx_desc_get_crypto_hdr,
1540 .dp_rx_h_msdu_done = ath12k_hw_wcn7850_dp_rx_h_msdu_done,
1541 .dp_rx_h_l4_cksum_fail = ath12k_hw_wcn7850_dp_rx_h_l4_cksum_fail,
1542 .dp_rx_h_ip_cksum_fail = ath12k_hw_wcn7850_dp_rx_h_ip_cksum_fail,
1543 .dp_rx_h_is_decrypted = ath12k_hw_wcn7850_dp_rx_h_is_decrypted,
1544 .dp_rx_h_mpdu_err = ath12k_hw_wcn7850_dp_rx_h_mpdu_err,
1545 .rx_desc_get_desc_size = ath12k_hw_wcn7850_get_rx_desc_size,
1546 .rx_desc_get_msdu_src_link_id = ath12k_hw_wcn7850_rx_desc_get_msdu_src_link,
1547 };
1548
1549 const struct hal_ops hal_wcn7850_ops = {
1550 .create_srng_config = ath12k_hal_srng_create_config_wcn7850,
1551 .tcl_to_wbm_rbm_map = ath12k_hal_wcn7850_tcl_to_wbm_rbm_map,
1552 .rxdma_ring_wmask_rx_mpdu_start = NULL,
1553 .rxdma_ring_wmask_rx_msdu_end = NULL,
1554 .get_hal_rx_compact_ops = NULL,
1555 };
1556
ath12k_hal_alloc_cont_rdp(struct ath12k_base * ab)1557 static int ath12k_hal_alloc_cont_rdp(struct ath12k_base *ab)
1558 {
1559 struct ath12k_hal *hal = &ab->hal;
1560 size_t size;
1561
1562 size = sizeof(u32) * HAL_SRNG_RING_ID_MAX;
1563 hal->rdp.vaddr = dma_alloc_coherent(ab->dev, size, &hal->rdp.paddr,
1564 GFP_KERNEL);
1565 if (!hal->rdp.vaddr)
1566 return -ENOMEM;
1567
1568 return 0;
1569 }
1570
ath12k_hal_free_cont_rdp(struct ath12k_base * ab)1571 static void ath12k_hal_free_cont_rdp(struct ath12k_base *ab)
1572 {
1573 struct ath12k_hal *hal = &ab->hal;
1574 size_t size;
1575
1576 if (!hal->rdp.vaddr)
1577 return;
1578
1579 size = sizeof(u32) * HAL_SRNG_RING_ID_MAX;
1580 dma_free_coherent(ab->dev, size,
1581 hal->rdp.vaddr, hal->rdp.paddr);
1582 hal->rdp.vaddr = NULL;
1583 }
1584
ath12k_hal_alloc_cont_wrp(struct ath12k_base * ab)1585 static int ath12k_hal_alloc_cont_wrp(struct ath12k_base *ab)
1586 {
1587 struct ath12k_hal *hal = &ab->hal;
1588 size_t size;
1589
1590 size = sizeof(u32) * (HAL_SRNG_NUM_PMAC_RINGS + HAL_SRNG_NUM_DMAC_RINGS);
1591 hal->wrp.vaddr = dma_alloc_coherent(ab->dev, size, &hal->wrp.paddr,
1592 GFP_KERNEL);
1593 if (!hal->wrp.vaddr)
1594 return -ENOMEM;
1595
1596 return 0;
1597 }
1598
ath12k_hal_free_cont_wrp(struct ath12k_base * ab)1599 static void ath12k_hal_free_cont_wrp(struct ath12k_base *ab)
1600 {
1601 struct ath12k_hal *hal = &ab->hal;
1602 size_t size;
1603
1604 if (!hal->wrp.vaddr)
1605 return;
1606
1607 size = sizeof(u32) * (HAL_SRNG_NUM_PMAC_RINGS + HAL_SRNG_NUM_DMAC_RINGS);
1608 dma_free_coherent(ab->dev, size,
1609 hal->wrp.vaddr, hal->wrp.paddr);
1610 hal->wrp.vaddr = NULL;
1611 }
1612
ath12k_hal_ce_dst_setup(struct ath12k_base * ab,struct hal_srng * srng,int ring_num)1613 static void ath12k_hal_ce_dst_setup(struct ath12k_base *ab,
1614 struct hal_srng *srng, int ring_num)
1615 {
1616 struct hal_srng_config *srng_config = &ab->hal.srng_config[HAL_CE_DST];
1617 u32 addr;
1618 u32 val;
1619
1620 addr = HAL_CE_DST_RING_CTRL +
1621 srng_config->reg_start[HAL_SRNG_REG_GRP_R0] +
1622 ring_num * srng_config->reg_size[HAL_SRNG_REG_GRP_R0];
1623
1624 val = ath12k_hif_read32(ab, addr);
1625 val &= ~HAL_CE_DST_R0_DEST_CTRL_MAX_LEN;
1626 val |= u32_encode_bits(srng->u.dst_ring.max_buffer_length,
1627 HAL_CE_DST_R0_DEST_CTRL_MAX_LEN);
1628 ath12k_hif_write32(ab, addr, val);
1629 }
1630
ath12k_hal_srng_dst_hw_init(struct ath12k_base * ab,struct hal_srng * srng)1631 static void ath12k_hal_srng_dst_hw_init(struct ath12k_base *ab,
1632 struct hal_srng *srng)
1633 {
1634 struct ath12k_hal *hal = &ab->hal;
1635 u32 val;
1636 u64 hp_addr;
1637 u32 reg_base;
1638
1639 reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R0];
1640
1641 if (srng->flags & HAL_SRNG_FLAGS_MSI_INTR) {
1642 ath12k_hif_write32(ab, reg_base +
1643 ath12k_hal_reo1_ring_msi1_base_lsb_offset(ab),
1644 srng->msi_addr);
1645
1646 val = u32_encode_bits(((u64)srng->msi_addr >> HAL_ADDR_MSB_REG_SHIFT),
1647 HAL_REO1_RING_MSI1_BASE_MSB_ADDR) |
1648 HAL_REO1_RING_MSI1_BASE_MSB_MSI1_ENABLE;
1649 ath12k_hif_write32(ab, reg_base +
1650 ath12k_hal_reo1_ring_msi1_base_msb_offset(ab), val);
1651
1652 ath12k_hif_write32(ab,
1653 reg_base + ath12k_hal_reo1_ring_msi1_data_offset(ab),
1654 srng->msi_data);
1655 }
1656
1657 ath12k_hif_write32(ab, reg_base, srng->ring_base_paddr);
1658
1659 val = u32_encode_bits(((u64)srng->ring_base_paddr >> HAL_ADDR_MSB_REG_SHIFT),
1660 HAL_REO1_RING_BASE_MSB_RING_BASE_ADDR_MSB) |
1661 u32_encode_bits((srng->entry_size * srng->num_entries),
1662 HAL_REO1_RING_BASE_MSB_RING_SIZE);
1663 ath12k_hif_write32(ab, reg_base + ath12k_hal_reo1_ring_base_msb_offset(ab), val);
1664
1665 val = u32_encode_bits(srng->ring_id, HAL_REO1_RING_ID_RING_ID) |
1666 u32_encode_bits(srng->entry_size, HAL_REO1_RING_ID_ENTRY_SIZE);
1667 ath12k_hif_write32(ab, reg_base + ath12k_hal_reo1_ring_id_offset(ab), val);
1668
1669 /* interrupt setup */
1670 val = u32_encode_bits((srng->intr_timer_thres_us >> 3),
1671 HAL_REO1_RING_PRDR_INT_SETUP_INTR_TMR_THOLD);
1672
1673 val |= u32_encode_bits((srng->intr_batch_cntr_thres_entries * srng->entry_size),
1674 HAL_REO1_RING_PRDR_INT_SETUP_BATCH_COUNTER_THOLD);
1675
1676 ath12k_hif_write32(ab,
1677 reg_base + ath12k_hal_reo1_ring_producer_int_setup_offset(ab),
1678 val);
1679
1680 hp_addr = hal->rdp.paddr +
1681 ((unsigned long)srng->u.dst_ring.hp_addr -
1682 (unsigned long)hal->rdp.vaddr);
1683 ath12k_hif_write32(ab, reg_base + ath12k_hal_reo1_ring_hp_addr_lsb_offset(ab),
1684 hp_addr & HAL_ADDR_LSB_REG_MASK);
1685 ath12k_hif_write32(ab, reg_base + ath12k_hal_reo1_ring_hp_addr_msb_offset(ab),
1686 hp_addr >> HAL_ADDR_MSB_REG_SHIFT);
1687
1688 /* Initialize head and tail pointers to indicate ring is empty */
1689 reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R2];
1690 ath12k_hif_write32(ab, reg_base, 0);
1691 ath12k_hif_write32(ab, reg_base + HAL_REO1_RING_TP_OFFSET, 0);
1692 *srng->u.dst_ring.hp_addr = 0;
1693
1694 reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R0];
1695 val = 0;
1696 if (srng->flags & HAL_SRNG_FLAGS_DATA_TLV_SWAP)
1697 val |= HAL_REO1_RING_MISC_DATA_TLV_SWAP;
1698 if (srng->flags & HAL_SRNG_FLAGS_RING_PTR_SWAP)
1699 val |= HAL_REO1_RING_MISC_HOST_FW_SWAP;
1700 if (srng->flags & HAL_SRNG_FLAGS_MSI_SWAP)
1701 val |= HAL_REO1_RING_MISC_MSI_SWAP;
1702 val |= HAL_REO1_RING_MISC_SRNG_ENABLE;
1703
1704 ath12k_hif_write32(ab, reg_base + ath12k_hal_reo1_ring_misc_offset(ab), val);
1705 }
1706
ath12k_hal_srng_src_hw_init(struct ath12k_base * ab,struct hal_srng * srng)1707 static void ath12k_hal_srng_src_hw_init(struct ath12k_base *ab,
1708 struct hal_srng *srng)
1709 {
1710 struct ath12k_hal *hal = &ab->hal;
1711 u32 val;
1712 u64 tp_addr;
1713 u32 reg_base;
1714
1715 reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R0];
1716
1717 if (srng->flags & HAL_SRNG_FLAGS_MSI_INTR) {
1718 ath12k_hif_write32(ab, reg_base +
1719 HAL_TCL1_RING_MSI1_BASE_LSB_OFFSET(ab),
1720 srng->msi_addr);
1721
1722 val = u32_encode_bits(((u64)srng->msi_addr >> HAL_ADDR_MSB_REG_SHIFT),
1723 HAL_TCL1_RING_MSI1_BASE_MSB_ADDR) |
1724 HAL_TCL1_RING_MSI1_BASE_MSB_MSI1_ENABLE;
1725 ath12k_hif_write32(ab, reg_base +
1726 HAL_TCL1_RING_MSI1_BASE_MSB_OFFSET(ab),
1727 val);
1728
1729 ath12k_hif_write32(ab, reg_base +
1730 HAL_TCL1_RING_MSI1_DATA_OFFSET(ab),
1731 srng->msi_data);
1732 }
1733
1734 ath12k_hif_write32(ab, reg_base, srng->ring_base_paddr);
1735
1736 val = u32_encode_bits(((u64)srng->ring_base_paddr >> HAL_ADDR_MSB_REG_SHIFT),
1737 HAL_TCL1_RING_BASE_MSB_RING_BASE_ADDR_MSB) |
1738 u32_encode_bits((srng->entry_size * srng->num_entries),
1739 HAL_TCL1_RING_BASE_MSB_RING_SIZE);
1740 ath12k_hif_write32(ab, reg_base + HAL_TCL1_RING_BASE_MSB_OFFSET(ab), val);
1741
1742 val = u32_encode_bits(srng->entry_size, HAL_REO1_RING_ID_ENTRY_SIZE);
1743 ath12k_hif_write32(ab, reg_base + HAL_TCL1_RING_ID_OFFSET(ab), val);
1744
1745 val = u32_encode_bits(srng->intr_timer_thres_us,
1746 HAL_TCL1_RING_CONSR_INT_SETUP_IX0_INTR_TMR_THOLD);
1747
1748 val |= u32_encode_bits((srng->intr_batch_cntr_thres_entries * srng->entry_size),
1749 HAL_TCL1_RING_CONSR_INT_SETUP_IX0_BATCH_COUNTER_THOLD);
1750
1751 ath12k_hif_write32(ab,
1752 reg_base + HAL_TCL1_RING_CONSR_INT_SETUP_IX0_OFFSET(ab),
1753 val);
1754
1755 val = 0;
1756 if (srng->flags & HAL_SRNG_FLAGS_LOW_THRESH_INTR_EN) {
1757 val |= u32_encode_bits(srng->u.src_ring.low_threshold,
1758 HAL_TCL1_RING_CONSR_INT_SETUP_IX1_LOW_THOLD);
1759 }
1760 ath12k_hif_write32(ab,
1761 reg_base + HAL_TCL1_RING_CONSR_INT_SETUP_IX1_OFFSET(ab),
1762 val);
1763
1764 if (srng->ring_id != HAL_SRNG_RING_ID_WBM_IDLE_LINK) {
1765 tp_addr = hal->rdp.paddr +
1766 ((unsigned long)srng->u.src_ring.tp_addr -
1767 (unsigned long)hal->rdp.vaddr);
1768 ath12k_hif_write32(ab,
1769 reg_base + HAL_TCL1_RING_TP_ADDR_LSB_OFFSET(ab),
1770 tp_addr & HAL_ADDR_LSB_REG_MASK);
1771 ath12k_hif_write32(ab,
1772 reg_base + HAL_TCL1_RING_TP_ADDR_MSB_OFFSET(ab),
1773 tp_addr >> HAL_ADDR_MSB_REG_SHIFT);
1774 }
1775
1776 /* Initialize head and tail pointers to indicate ring is empty */
1777 reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R2];
1778 ath12k_hif_write32(ab, reg_base, 0);
1779 ath12k_hif_write32(ab, reg_base + HAL_TCL1_RING_TP_OFFSET, 0);
1780 *srng->u.src_ring.tp_addr = 0;
1781
1782 reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R0];
1783 val = 0;
1784 if (srng->flags & HAL_SRNG_FLAGS_DATA_TLV_SWAP)
1785 val |= HAL_TCL1_RING_MISC_DATA_TLV_SWAP;
1786 if (srng->flags & HAL_SRNG_FLAGS_RING_PTR_SWAP)
1787 val |= HAL_TCL1_RING_MISC_HOST_FW_SWAP;
1788 if (srng->flags & HAL_SRNG_FLAGS_MSI_SWAP)
1789 val |= HAL_TCL1_RING_MISC_MSI_SWAP;
1790
1791 /* Loop count is not used for SRC rings */
1792 val |= HAL_TCL1_RING_MISC_MSI_LOOPCNT_DISABLE;
1793
1794 val |= HAL_TCL1_RING_MISC_SRNG_ENABLE;
1795
1796 if (srng->ring_id == HAL_SRNG_RING_ID_WBM_IDLE_LINK)
1797 val |= HAL_TCL1_RING_MISC_MSI_RING_ID_DISABLE;
1798
1799 ath12k_hif_write32(ab, reg_base + HAL_TCL1_RING_MISC_OFFSET(ab), val);
1800 }
1801
ath12k_hal_srng_hw_init(struct ath12k_base * ab,struct hal_srng * srng)1802 static void ath12k_hal_srng_hw_init(struct ath12k_base *ab,
1803 struct hal_srng *srng)
1804 {
1805 if (srng->ring_dir == HAL_SRNG_DIR_SRC)
1806 ath12k_hal_srng_src_hw_init(ab, srng);
1807 else
1808 ath12k_hal_srng_dst_hw_init(ab, srng);
1809 }
1810
ath12k_hal_srng_get_ring_id(struct ath12k_base * ab,enum hal_ring_type type,int ring_num,int mac_id)1811 static int ath12k_hal_srng_get_ring_id(struct ath12k_base *ab,
1812 enum hal_ring_type type,
1813 int ring_num, int mac_id)
1814 {
1815 struct hal_srng_config *srng_config = &ab->hal.srng_config[type];
1816 int ring_id;
1817
1818 if (ring_num >= srng_config->max_rings) {
1819 ath12k_warn(ab, "invalid ring number :%d\n", ring_num);
1820 return -EINVAL;
1821 }
1822
1823 ring_id = srng_config->start_ring_id + ring_num;
1824 if (srng_config->mac_type == ATH12K_HAL_SRNG_PMAC)
1825 ring_id += mac_id * HAL_SRNG_RINGS_PER_PMAC;
1826
1827 if (WARN_ON(ring_id >= HAL_SRNG_RING_ID_MAX))
1828 return -EINVAL;
1829
1830 return ring_id;
1831 }
1832
ath12k_hal_srng_get_entrysize(struct ath12k_base * ab,u32 ring_type)1833 int ath12k_hal_srng_get_entrysize(struct ath12k_base *ab, u32 ring_type)
1834 {
1835 struct hal_srng_config *srng_config;
1836
1837 if (WARN_ON(ring_type >= HAL_MAX_RING_TYPES))
1838 return -EINVAL;
1839
1840 srng_config = &ab->hal.srng_config[ring_type];
1841
1842 return (srng_config->entry_size << 2);
1843 }
1844
ath12k_hal_srng_get_max_entries(struct ath12k_base * ab,u32 ring_type)1845 int ath12k_hal_srng_get_max_entries(struct ath12k_base *ab, u32 ring_type)
1846 {
1847 struct hal_srng_config *srng_config;
1848
1849 if (WARN_ON(ring_type >= HAL_MAX_RING_TYPES))
1850 return -EINVAL;
1851
1852 srng_config = &ab->hal.srng_config[ring_type];
1853
1854 return (srng_config->max_size / srng_config->entry_size);
1855 }
1856
ath12k_hal_srng_get_params(struct ath12k_base * ab,struct hal_srng * srng,struct hal_srng_params * params)1857 void ath12k_hal_srng_get_params(struct ath12k_base *ab, struct hal_srng *srng,
1858 struct hal_srng_params *params)
1859 {
1860 params->ring_base_paddr = srng->ring_base_paddr;
1861 params->ring_base_vaddr = srng->ring_base_vaddr;
1862 params->num_entries = srng->num_entries;
1863 params->intr_timer_thres_us = srng->intr_timer_thres_us;
1864 params->intr_batch_cntr_thres_entries =
1865 srng->intr_batch_cntr_thres_entries;
1866 params->low_threshold = srng->u.src_ring.low_threshold;
1867 params->msi_addr = srng->msi_addr;
1868 params->msi2_addr = srng->msi2_addr;
1869 params->msi_data = srng->msi_data;
1870 params->msi2_data = srng->msi2_data;
1871 params->flags = srng->flags;
1872 }
1873
ath12k_hal_srng_get_hp_addr(struct ath12k_base * ab,struct hal_srng * srng)1874 dma_addr_t ath12k_hal_srng_get_hp_addr(struct ath12k_base *ab,
1875 struct hal_srng *srng)
1876 {
1877 if (!(srng->flags & HAL_SRNG_FLAGS_LMAC_RING))
1878 return 0;
1879
1880 if (srng->ring_dir == HAL_SRNG_DIR_SRC)
1881 return ab->hal.wrp.paddr +
1882 ((unsigned long)srng->u.src_ring.hp_addr -
1883 (unsigned long)ab->hal.wrp.vaddr);
1884 else
1885 return ab->hal.rdp.paddr +
1886 ((unsigned long)srng->u.dst_ring.hp_addr -
1887 (unsigned long)ab->hal.rdp.vaddr);
1888 }
1889
ath12k_hal_srng_get_tp_addr(struct ath12k_base * ab,struct hal_srng * srng)1890 dma_addr_t ath12k_hal_srng_get_tp_addr(struct ath12k_base *ab,
1891 struct hal_srng *srng)
1892 {
1893 if (!(srng->flags & HAL_SRNG_FLAGS_LMAC_RING))
1894 return 0;
1895
1896 if (srng->ring_dir == HAL_SRNG_DIR_SRC)
1897 return ab->hal.rdp.paddr +
1898 ((unsigned long)srng->u.src_ring.tp_addr -
1899 (unsigned long)ab->hal.rdp.vaddr);
1900 else
1901 return ab->hal.wrp.paddr +
1902 ((unsigned long)srng->u.dst_ring.tp_addr -
1903 (unsigned long)ab->hal.wrp.vaddr);
1904 }
1905
ath12k_hal_ce_get_desc_size(enum hal_ce_desc type)1906 u32 ath12k_hal_ce_get_desc_size(enum hal_ce_desc type)
1907 {
1908 switch (type) {
1909 case HAL_CE_DESC_SRC:
1910 return sizeof(struct hal_ce_srng_src_desc);
1911 case HAL_CE_DESC_DST:
1912 return sizeof(struct hal_ce_srng_dest_desc);
1913 case HAL_CE_DESC_DST_STATUS:
1914 return sizeof(struct hal_ce_srng_dst_status_desc);
1915 }
1916
1917 return 0;
1918 }
1919
ath12k_hal_ce_src_set_desc(struct hal_ce_srng_src_desc * desc,dma_addr_t paddr,u32 len,u32 id,u8 byte_swap_data)1920 void ath12k_hal_ce_src_set_desc(struct hal_ce_srng_src_desc *desc, dma_addr_t paddr,
1921 u32 len, u32 id, u8 byte_swap_data)
1922 {
1923 desc->buffer_addr_low = cpu_to_le32(paddr & HAL_ADDR_LSB_REG_MASK);
1924 desc->buffer_addr_info =
1925 le32_encode_bits(((u64)paddr >> HAL_ADDR_MSB_REG_SHIFT),
1926 HAL_CE_SRC_DESC_ADDR_INFO_ADDR_HI) |
1927 le32_encode_bits(byte_swap_data,
1928 HAL_CE_SRC_DESC_ADDR_INFO_BYTE_SWAP) |
1929 le32_encode_bits(0, HAL_CE_SRC_DESC_ADDR_INFO_GATHER) |
1930 le32_encode_bits(len, HAL_CE_SRC_DESC_ADDR_INFO_LEN);
1931 desc->meta_info = le32_encode_bits(id, HAL_CE_SRC_DESC_META_INFO_DATA);
1932 }
1933
ath12k_hal_ce_dst_set_desc(struct hal_ce_srng_dest_desc * desc,dma_addr_t paddr)1934 void ath12k_hal_ce_dst_set_desc(struct hal_ce_srng_dest_desc *desc, dma_addr_t paddr)
1935 {
1936 desc->buffer_addr_low = cpu_to_le32(paddr & HAL_ADDR_LSB_REG_MASK);
1937 desc->buffer_addr_info =
1938 le32_encode_bits(((u64)paddr >> HAL_ADDR_MSB_REG_SHIFT),
1939 HAL_CE_DEST_DESC_ADDR_INFO_ADDR_HI);
1940 }
1941
ath12k_hal_ce_dst_status_get_length(struct hal_ce_srng_dst_status_desc * desc)1942 u32 ath12k_hal_ce_dst_status_get_length(struct hal_ce_srng_dst_status_desc *desc)
1943 {
1944 u32 len;
1945
1946 len = le32_get_bits(READ_ONCE(desc->flags), HAL_CE_DST_STATUS_DESC_FLAGS_LEN);
1947 desc->flags &= ~cpu_to_le32(HAL_CE_DST_STATUS_DESC_FLAGS_LEN);
1948
1949 return len;
1950 }
1951
ath12k_hal_set_link_desc_addr(struct hal_wbm_link_desc * desc,u32 cookie,dma_addr_t paddr,enum hal_rx_buf_return_buf_manager rbm)1952 void ath12k_hal_set_link_desc_addr(struct hal_wbm_link_desc *desc, u32 cookie,
1953 dma_addr_t paddr,
1954 enum hal_rx_buf_return_buf_manager rbm)
1955 {
1956 desc->buf_addr_info.info0 = le32_encode_bits((paddr & HAL_ADDR_LSB_REG_MASK),
1957 BUFFER_ADDR_INFO0_ADDR);
1958 desc->buf_addr_info.info1 =
1959 le32_encode_bits(((u64)paddr >> HAL_ADDR_MSB_REG_SHIFT),
1960 BUFFER_ADDR_INFO1_ADDR) |
1961 le32_encode_bits(rbm, BUFFER_ADDR_INFO1_RET_BUF_MGR) |
1962 le32_encode_bits(cookie, BUFFER_ADDR_INFO1_SW_COOKIE);
1963 }
1964
ath12k_hal_srng_dst_peek(struct ath12k_base * ab,struct hal_srng * srng)1965 void *ath12k_hal_srng_dst_peek(struct ath12k_base *ab, struct hal_srng *srng)
1966 {
1967 lockdep_assert_held(&srng->lock);
1968
1969 if (srng->u.dst_ring.tp != srng->u.dst_ring.cached_hp)
1970 return (srng->ring_base_vaddr + srng->u.dst_ring.tp);
1971
1972 return NULL;
1973 }
1974
ath12k_hal_srng_dst_get_next_entry(struct ath12k_base * ab,struct hal_srng * srng)1975 void *ath12k_hal_srng_dst_get_next_entry(struct ath12k_base *ab,
1976 struct hal_srng *srng)
1977 {
1978 void *desc;
1979
1980 lockdep_assert_held(&srng->lock);
1981
1982 if (srng->u.dst_ring.tp == srng->u.dst_ring.cached_hp)
1983 return NULL;
1984
1985 desc = srng->ring_base_vaddr + srng->u.dst_ring.tp;
1986
1987 srng->u.dst_ring.tp = (srng->u.dst_ring.tp + srng->entry_size) %
1988 srng->ring_size;
1989
1990 return desc;
1991 }
1992
ath12k_hal_srng_dst_num_free(struct ath12k_base * ab,struct hal_srng * srng,bool sync_hw_ptr)1993 int ath12k_hal_srng_dst_num_free(struct ath12k_base *ab, struct hal_srng *srng,
1994 bool sync_hw_ptr)
1995 {
1996 u32 tp, hp;
1997
1998 lockdep_assert_held(&srng->lock);
1999
2000 tp = srng->u.dst_ring.tp;
2001
2002 if (sync_hw_ptr) {
2003 hp = *srng->u.dst_ring.hp_addr;
2004 srng->u.dst_ring.cached_hp = hp;
2005 } else {
2006 hp = srng->u.dst_ring.cached_hp;
2007 }
2008
2009 if (hp >= tp)
2010 return (hp - tp) / srng->entry_size;
2011 else
2012 return (srng->ring_size - tp + hp) / srng->entry_size;
2013 }
2014
2015 /* Returns number of available entries in src ring */
ath12k_hal_srng_src_num_free(struct ath12k_base * ab,struct hal_srng * srng,bool sync_hw_ptr)2016 int ath12k_hal_srng_src_num_free(struct ath12k_base *ab, struct hal_srng *srng,
2017 bool sync_hw_ptr)
2018 {
2019 u32 tp, hp;
2020
2021 lockdep_assert_held(&srng->lock);
2022
2023 hp = srng->u.src_ring.hp;
2024
2025 if (sync_hw_ptr) {
2026 tp = *srng->u.src_ring.tp_addr;
2027 srng->u.src_ring.cached_tp = tp;
2028 } else {
2029 tp = srng->u.src_ring.cached_tp;
2030 }
2031
2032 if (tp > hp)
2033 return ((tp - hp) / srng->entry_size) - 1;
2034 else
2035 return ((srng->ring_size - hp + tp) / srng->entry_size) - 1;
2036 }
2037
ath12k_hal_srng_src_get_next_entry(struct ath12k_base * ab,struct hal_srng * srng)2038 void *ath12k_hal_srng_src_get_next_entry(struct ath12k_base *ab,
2039 struct hal_srng *srng)
2040 {
2041 void *desc;
2042 u32 next_hp;
2043
2044 lockdep_assert_held(&srng->lock);
2045
2046 /* TODO: Using % is expensive, but we have to do this since size of some
2047 * SRNG rings is not power of 2 (due to descriptor sizes). Need to see
2048 * if separate function is defined for rings having power of 2 ring size
2049 * (TCL2SW, REO2SW, SW2RXDMA and CE rings) so that we can avoid the
2050 * overhead of % by using mask (with &).
2051 */
2052 next_hp = (srng->u.src_ring.hp + srng->entry_size) % srng->ring_size;
2053
2054 if (next_hp == srng->u.src_ring.cached_tp)
2055 return NULL;
2056
2057 desc = srng->ring_base_vaddr + srng->u.src_ring.hp;
2058 srng->u.src_ring.hp = next_hp;
2059
2060 /* TODO: Reap functionality is not used by all rings. If particular
2061 * ring does not use reap functionality, we need not update reap_hp
2062 * with next_hp pointer. Need to make sure a separate function is used
2063 * before doing any optimization by removing below code updating
2064 * reap_hp.
2065 */
2066 srng->u.src_ring.reap_hp = next_hp;
2067
2068 return desc;
2069 }
2070
ath12k_hal_srng_src_reap_next(struct ath12k_base * ab,struct hal_srng * srng)2071 void *ath12k_hal_srng_src_reap_next(struct ath12k_base *ab,
2072 struct hal_srng *srng)
2073 {
2074 void *desc;
2075 u32 next_reap_hp;
2076
2077 lockdep_assert_held(&srng->lock);
2078
2079 next_reap_hp = (srng->u.src_ring.reap_hp + srng->entry_size) %
2080 srng->ring_size;
2081
2082 if (next_reap_hp == srng->u.src_ring.cached_tp)
2083 return NULL;
2084
2085 desc = srng->ring_base_vaddr + next_reap_hp;
2086 srng->u.src_ring.reap_hp = next_reap_hp;
2087
2088 return desc;
2089 }
2090
ath12k_hal_srng_src_get_next_reaped(struct ath12k_base * ab,struct hal_srng * srng)2091 void *ath12k_hal_srng_src_get_next_reaped(struct ath12k_base *ab,
2092 struct hal_srng *srng)
2093 {
2094 void *desc;
2095
2096 lockdep_assert_held(&srng->lock);
2097
2098 if (srng->u.src_ring.hp == srng->u.src_ring.reap_hp)
2099 return NULL;
2100
2101 desc = srng->ring_base_vaddr + srng->u.src_ring.hp;
2102 srng->u.src_ring.hp = (srng->u.src_ring.hp + srng->entry_size) %
2103 srng->ring_size;
2104
2105 return desc;
2106 }
2107
ath12k_hal_srng_access_begin(struct ath12k_base * ab,struct hal_srng * srng)2108 void ath12k_hal_srng_access_begin(struct ath12k_base *ab, struct hal_srng *srng)
2109 {
2110 u32 hp;
2111
2112 lockdep_assert_held(&srng->lock);
2113
2114 if (srng->ring_dir == HAL_SRNG_DIR_SRC) {
2115 srng->u.src_ring.cached_tp =
2116 *(volatile u32 *)srng->u.src_ring.tp_addr;
2117 } else {
2118 hp = READ_ONCE(*srng->u.dst_ring.hp_addr);
2119
2120 if (hp != srng->u.dst_ring.cached_hp) {
2121 srng->u.dst_ring.cached_hp = hp;
2122 /* Make sure descriptor is read after the head
2123 * pointer.
2124 */
2125 dma_rmb();
2126 }
2127 }
2128 }
2129
2130 /* Update cached ring head/tail pointers to HW. ath12k_hal_srng_access_begin()
2131 * should have been called before this.
2132 */
ath12k_hal_srng_access_end(struct ath12k_base * ab,struct hal_srng * srng)2133 void ath12k_hal_srng_access_end(struct ath12k_base *ab, struct hal_srng *srng)
2134 {
2135 lockdep_assert_held(&srng->lock);
2136
2137 if (srng->flags & HAL_SRNG_FLAGS_LMAC_RING) {
2138 /* For LMAC rings, ring pointer updates are done through FW and
2139 * hence written to a shared memory location that is read by FW
2140 */
2141 if (srng->ring_dir == HAL_SRNG_DIR_SRC) {
2142 srng->u.src_ring.last_tp =
2143 *(volatile u32 *)srng->u.src_ring.tp_addr;
2144 /* Make sure descriptor is written before updating the
2145 * head pointer.
2146 */
2147 dma_wmb();
2148 WRITE_ONCE(*srng->u.src_ring.hp_addr, srng->u.src_ring.hp);
2149 } else {
2150 srng->u.dst_ring.last_hp = *srng->u.dst_ring.hp_addr;
2151 /* Make sure descriptor is read before updating the
2152 * tail pointer.
2153 */
2154 dma_mb();
2155 WRITE_ONCE(*srng->u.dst_ring.tp_addr, srng->u.dst_ring.tp);
2156 }
2157 } else {
2158 if (srng->ring_dir == HAL_SRNG_DIR_SRC) {
2159 srng->u.src_ring.last_tp =
2160 *(volatile u32 *)srng->u.src_ring.tp_addr;
2161 /* Assume implementation use an MMIO write accessor
2162 * which has the required wmb() so that the descriptor
2163 * is written before the updating the head pointer.
2164 */
2165 ath12k_hif_write32(ab,
2166 (unsigned long)srng->u.src_ring.hp_addr -
2167 (unsigned long)ab->mem,
2168 srng->u.src_ring.hp);
2169 } else {
2170 srng->u.dst_ring.last_hp = *srng->u.dst_ring.hp_addr;
2171 /* Make sure descriptor is read before updating the
2172 * tail pointer.
2173 */
2174 mb();
2175 ath12k_hif_write32(ab,
2176 (unsigned long)srng->u.dst_ring.tp_addr -
2177 (unsigned long)ab->mem,
2178 srng->u.dst_ring.tp);
2179 }
2180 }
2181
2182 srng->timestamp = jiffies;
2183 }
2184
ath12k_hal_setup_link_idle_list(struct ath12k_base * ab,struct hal_wbm_idle_scatter_list * sbuf,u32 nsbufs,u32 tot_link_desc,u32 end_offset)2185 void ath12k_hal_setup_link_idle_list(struct ath12k_base *ab,
2186 struct hal_wbm_idle_scatter_list *sbuf,
2187 u32 nsbufs, u32 tot_link_desc,
2188 u32 end_offset)
2189 {
2190 struct ath12k_buffer_addr *link_addr;
2191 int i;
2192 u32 reg_scatter_buf_sz = HAL_WBM_IDLE_SCATTER_BUF_SIZE / 64;
2193 u32 val;
2194
2195 link_addr = (void *)sbuf[0].vaddr + HAL_WBM_IDLE_SCATTER_BUF_SIZE;
2196
2197 for (i = 1; i < nsbufs; i++) {
2198 link_addr->info0 = cpu_to_le32(sbuf[i].paddr & HAL_ADDR_LSB_REG_MASK);
2199
2200 link_addr->info1 =
2201 le32_encode_bits((u64)sbuf[i].paddr >> HAL_ADDR_MSB_REG_SHIFT,
2202 HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_39_32) |
2203 le32_encode_bits(BASE_ADDR_MATCH_TAG_VAL,
2204 HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_MATCH_TAG);
2205
2206 link_addr = (void *)sbuf[i].vaddr +
2207 HAL_WBM_IDLE_SCATTER_BUF_SIZE;
2208 }
2209
2210 val = u32_encode_bits(reg_scatter_buf_sz, HAL_WBM_SCATTER_BUFFER_SIZE) |
2211 u32_encode_bits(0x1, HAL_WBM_LINK_DESC_IDLE_LIST_MODE);
2212
2213 ath12k_hif_write32(ab,
2214 HAL_SEQ_WCSS_UMAC_WBM_REG +
2215 HAL_WBM_R0_IDLE_LIST_CONTROL_ADDR(ab),
2216 val);
2217
2218 val = u32_encode_bits(reg_scatter_buf_sz * nsbufs,
2219 HAL_WBM_SCATTER_RING_SIZE_OF_IDLE_LINK_DESC_LIST);
2220 ath12k_hif_write32(ab,
2221 HAL_SEQ_WCSS_UMAC_WBM_REG + HAL_WBM_R0_IDLE_LIST_SIZE_ADDR(ab),
2222 val);
2223
2224 val = u32_encode_bits(sbuf[0].paddr & HAL_ADDR_LSB_REG_MASK,
2225 BUFFER_ADDR_INFO0_ADDR);
2226 ath12k_hif_write32(ab,
2227 HAL_SEQ_WCSS_UMAC_WBM_REG +
2228 HAL_WBM_SCATTERED_RING_BASE_LSB(ab),
2229 val);
2230
2231 val = u32_encode_bits(BASE_ADDR_MATCH_TAG_VAL,
2232 HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_MATCH_TAG) |
2233 u32_encode_bits((u64)sbuf[0].paddr >> HAL_ADDR_MSB_REG_SHIFT,
2234 HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_39_32);
2235 ath12k_hif_write32(ab,
2236 HAL_SEQ_WCSS_UMAC_WBM_REG +
2237 HAL_WBM_SCATTERED_RING_BASE_MSB(ab),
2238 val);
2239
2240 /* Setup head and tail pointers for the idle list */
2241 val = u32_encode_bits(sbuf[nsbufs - 1].paddr, BUFFER_ADDR_INFO0_ADDR);
2242 ath12k_hif_write32(ab,
2243 HAL_SEQ_WCSS_UMAC_WBM_REG +
2244 HAL_WBM_SCATTERED_DESC_PTR_HEAD_INFO_IX0(ab),
2245 val);
2246
2247 val = u32_encode_bits(((u64)sbuf[nsbufs - 1].paddr >> HAL_ADDR_MSB_REG_SHIFT),
2248 HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_39_32) |
2249 u32_encode_bits((end_offset >> 2),
2250 HAL_WBM_SCATTERED_DESC_HEAD_P_OFFSET_IX1);
2251 ath12k_hif_write32(ab,
2252 HAL_SEQ_WCSS_UMAC_WBM_REG +
2253 HAL_WBM_SCATTERED_DESC_PTR_HEAD_INFO_IX1(ab),
2254 val);
2255
2256 val = u32_encode_bits(sbuf[0].paddr, BUFFER_ADDR_INFO0_ADDR);
2257 ath12k_hif_write32(ab,
2258 HAL_SEQ_WCSS_UMAC_WBM_REG +
2259 HAL_WBM_SCATTERED_DESC_PTR_HEAD_INFO_IX0(ab),
2260 val);
2261
2262 val = u32_encode_bits(sbuf[0].paddr, BUFFER_ADDR_INFO0_ADDR);
2263 ath12k_hif_write32(ab,
2264 HAL_SEQ_WCSS_UMAC_WBM_REG +
2265 HAL_WBM_SCATTERED_DESC_PTR_TAIL_INFO_IX0(ab),
2266 val);
2267
2268 val = u32_encode_bits(((u64)sbuf[0].paddr >> HAL_ADDR_MSB_REG_SHIFT),
2269 HAL_WBM_SCATTERED_DESC_MSB_BASE_ADDR_39_32) |
2270 u32_encode_bits(0, HAL_WBM_SCATTERED_DESC_TAIL_P_OFFSET_IX1);
2271 ath12k_hif_write32(ab,
2272 HAL_SEQ_WCSS_UMAC_WBM_REG +
2273 HAL_WBM_SCATTERED_DESC_PTR_TAIL_INFO_IX1(ab),
2274 val);
2275
2276 val = 2 * tot_link_desc;
2277 ath12k_hif_write32(ab,
2278 HAL_SEQ_WCSS_UMAC_WBM_REG +
2279 HAL_WBM_SCATTERED_DESC_PTR_HP_ADDR(ab),
2280 val);
2281
2282 /* Enable the SRNG */
2283 val = u32_encode_bits(1, HAL_WBM_IDLE_LINK_RING_MISC_SRNG_ENABLE) |
2284 u32_encode_bits(1, HAL_WBM_IDLE_LINK_RING_MISC_RIND_ID_DISABLE);
2285 ath12k_hif_write32(ab,
2286 HAL_SEQ_WCSS_UMAC_WBM_REG +
2287 HAL_WBM_IDLE_LINK_RING_MISC_ADDR(ab),
2288 val);
2289 }
2290
ath12k_hal_srng_setup(struct ath12k_base * ab,enum hal_ring_type type,int ring_num,int mac_id,struct hal_srng_params * params)2291 int ath12k_hal_srng_setup(struct ath12k_base *ab, enum hal_ring_type type,
2292 int ring_num, int mac_id,
2293 struct hal_srng_params *params)
2294 {
2295 struct ath12k_hal *hal = &ab->hal;
2296 struct hal_srng_config *srng_config = &ab->hal.srng_config[type];
2297 struct hal_srng *srng;
2298 int ring_id;
2299 u32 idx;
2300 int i;
2301 u32 reg_base;
2302
2303 ring_id = ath12k_hal_srng_get_ring_id(ab, type, ring_num, mac_id);
2304 if (ring_id < 0)
2305 return ring_id;
2306
2307 srng = &hal->srng_list[ring_id];
2308
2309 srng->ring_id = ring_id;
2310 srng->ring_dir = srng_config->ring_dir;
2311 srng->ring_base_paddr = params->ring_base_paddr;
2312 srng->ring_base_vaddr = params->ring_base_vaddr;
2313 srng->entry_size = srng_config->entry_size;
2314 srng->num_entries = params->num_entries;
2315 srng->ring_size = srng->entry_size * srng->num_entries;
2316 srng->intr_batch_cntr_thres_entries =
2317 params->intr_batch_cntr_thres_entries;
2318 srng->intr_timer_thres_us = params->intr_timer_thres_us;
2319 srng->flags = params->flags;
2320 srng->msi_addr = params->msi_addr;
2321 srng->msi2_addr = params->msi2_addr;
2322 srng->msi_data = params->msi_data;
2323 srng->msi2_data = params->msi2_data;
2324 srng->initialized = 1;
2325 spin_lock_init(&srng->lock);
2326 lockdep_set_class(&srng->lock, &srng->lock_key);
2327
2328 for (i = 0; i < HAL_SRNG_NUM_REG_GRP; i++) {
2329 srng->hwreg_base[i] = srng_config->reg_start[i] +
2330 (ring_num * srng_config->reg_size[i]);
2331 }
2332
2333 memset(srng->ring_base_vaddr, 0,
2334 (srng->entry_size * srng->num_entries) << 2);
2335
2336 reg_base = srng->hwreg_base[HAL_SRNG_REG_GRP_R2];
2337
2338 if (srng->ring_dir == HAL_SRNG_DIR_SRC) {
2339 srng->u.src_ring.hp = 0;
2340 srng->u.src_ring.cached_tp = 0;
2341 srng->u.src_ring.reap_hp = srng->ring_size - srng->entry_size;
2342 srng->u.src_ring.tp_addr = (void *)(hal->rdp.vaddr + ring_id);
2343 srng->u.src_ring.low_threshold = params->low_threshold *
2344 srng->entry_size;
2345 if (srng_config->mac_type == ATH12K_HAL_SRNG_UMAC) {
2346 if (!ab->hw_params->supports_shadow_regs)
2347 srng->u.src_ring.hp_addr =
2348 (u32 *)((unsigned long)ab->mem + reg_base);
2349 else
2350 ath12k_dbg(ab, ATH12K_DBG_HAL,
2351 "hal type %d ring_num %d reg_base 0x%x shadow 0x%lx\n",
2352 type, ring_num,
2353 reg_base,
2354 (unsigned long)srng->u.src_ring.hp_addr -
2355 (unsigned long)ab->mem);
2356 } else {
2357 idx = ring_id - HAL_SRNG_RING_ID_DMAC_CMN_ID_START;
2358 srng->u.src_ring.hp_addr = (void *)(hal->wrp.vaddr +
2359 idx);
2360 srng->flags |= HAL_SRNG_FLAGS_LMAC_RING;
2361 }
2362 } else {
2363 /* During initialization loop count in all the descriptors
2364 * will be set to zero, and HW will set it to 1 on completing
2365 * descriptor update in first loop, and increments it by 1 on
2366 * subsequent loops (loop count wraps around after reaching
2367 * 0xffff). The 'loop_cnt' in SW ring state is the expected
2368 * loop count in descriptors updated by HW (to be processed
2369 * by SW).
2370 */
2371 srng->u.dst_ring.loop_cnt = 1;
2372 srng->u.dst_ring.tp = 0;
2373 srng->u.dst_ring.cached_hp = 0;
2374 srng->u.dst_ring.hp_addr = (void *)(hal->rdp.vaddr + ring_id);
2375 if (srng_config->mac_type == ATH12K_HAL_SRNG_UMAC) {
2376 if (!ab->hw_params->supports_shadow_regs)
2377 srng->u.dst_ring.tp_addr =
2378 (u32 *)((unsigned long)ab->mem + reg_base +
2379 (HAL_REO1_RING_TP - HAL_REO1_RING_HP));
2380 else
2381 ath12k_dbg(ab, ATH12K_DBG_HAL,
2382 "type %d ring_num %d target_reg 0x%x shadow 0x%lx\n",
2383 type, ring_num,
2384 reg_base + HAL_REO1_RING_TP - HAL_REO1_RING_HP,
2385 (unsigned long)srng->u.dst_ring.tp_addr -
2386 (unsigned long)ab->mem);
2387 } else {
2388 /* For PMAC & DMAC rings, tail pointer updates will be done
2389 * through FW by writing to a shared memory location
2390 */
2391 idx = ring_id - HAL_SRNG_RING_ID_DMAC_CMN_ID_START;
2392 srng->u.dst_ring.tp_addr = (void *)(hal->wrp.vaddr +
2393 idx);
2394 srng->flags |= HAL_SRNG_FLAGS_LMAC_RING;
2395 }
2396 }
2397
2398 if (srng_config->mac_type != ATH12K_HAL_SRNG_UMAC)
2399 return ring_id;
2400
2401 ath12k_hal_srng_hw_init(ab, srng);
2402
2403 if (type == HAL_CE_DST) {
2404 srng->u.dst_ring.max_buffer_length = params->max_buffer_len;
2405 ath12k_hal_ce_dst_setup(ab, srng, ring_num);
2406 }
2407
2408 return ring_id;
2409 }
2410
ath12k_hal_srng_update_hp_tp_addr(struct ath12k_base * ab,int shadow_cfg_idx,enum hal_ring_type ring_type,int ring_num)2411 static void ath12k_hal_srng_update_hp_tp_addr(struct ath12k_base *ab,
2412 int shadow_cfg_idx,
2413 enum hal_ring_type ring_type,
2414 int ring_num)
2415 {
2416 struct hal_srng *srng;
2417 struct ath12k_hal *hal = &ab->hal;
2418 int ring_id;
2419 struct hal_srng_config *srng_config = &hal->srng_config[ring_type];
2420
2421 ring_id = ath12k_hal_srng_get_ring_id(ab, ring_type, ring_num, 0);
2422 if (ring_id < 0)
2423 return;
2424
2425 srng = &hal->srng_list[ring_id];
2426
2427 if (srng_config->ring_dir == HAL_SRNG_DIR_DST)
2428 srng->u.dst_ring.tp_addr = (u32 *)(HAL_SHADOW_REG(shadow_cfg_idx) +
2429 (unsigned long)ab->mem);
2430 else
2431 srng->u.src_ring.hp_addr = (u32 *)(HAL_SHADOW_REG(shadow_cfg_idx) +
2432 (unsigned long)ab->mem);
2433 }
2434
ath12k_hal_srng_update_shadow_config(struct ath12k_base * ab,enum hal_ring_type ring_type,int ring_num)2435 int ath12k_hal_srng_update_shadow_config(struct ath12k_base *ab,
2436 enum hal_ring_type ring_type,
2437 int ring_num)
2438 {
2439 struct ath12k_hal *hal = &ab->hal;
2440 struct hal_srng_config *srng_config = &hal->srng_config[ring_type];
2441 int shadow_cfg_idx = hal->num_shadow_reg_configured;
2442 u32 target_reg;
2443
2444 if (shadow_cfg_idx >= HAL_SHADOW_NUM_REGS)
2445 return -EINVAL;
2446
2447 hal->num_shadow_reg_configured++;
2448
2449 target_reg = srng_config->reg_start[HAL_HP_OFFSET_IN_REG_START];
2450 target_reg += srng_config->reg_size[HAL_HP_OFFSET_IN_REG_START] *
2451 ring_num;
2452
2453 /* For destination ring, shadow the TP */
2454 if (srng_config->ring_dir == HAL_SRNG_DIR_DST)
2455 target_reg += HAL_OFFSET_FROM_HP_TO_TP;
2456
2457 hal->shadow_reg_addr[shadow_cfg_idx] = target_reg;
2458
2459 /* update hp/tp addr to hal structure*/
2460 ath12k_hal_srng_update_hp_tp_addr(ab, shadow_cfg_idx, ring_type,
2461 ring_num);
2462
2463 ath12k_dbg(ab, ATH12K_DBG_HAL,
2464 "target_reg %x, shadow reg 0x%x shadow_idx 0x%x, ring_type %d, ring num %d",
2465 target_reg,
2466 HAL_SHADOW_REG(shadow_cfg_idx),
2467 shadow_cfg_idx,
2468 ring_type, ring_num);
2469
2470 return 0;
2471 }
2472
ath12k_hal_srng_shadow_config(struct ath12k_base * ab)2473 void ath12k_hal_srng_shadow_config(struct ath12k_base *ab)
2474 {
2475 struct ath12k_hal *hal = &ab->hal;
2476 int ring_type, ring_num;
2477
2478 /* update all the non-CE srngs. */
2479 for (ring_type = 0; ring_type < HAL_MAX_RING_TYPES; ring_type++) {
2480 struct hal_srng_config *srng_config = &hal->srng_config[ring_type];
2481
2482 if (ring_type == HAL_CE_SRC ||
2483 ring_type == HAL_CE_DST ||
2484 ring_type == HAL_CE_DST_STATUS)
2485 continue;
2486
2487 if (srng_config->mac_type == ATH12K_HAL_SRNG_DMAC ||
2488 srng_config->mac_type == ATH12K_HAL_SRNG_PMAC)
2489 continue;
2490
2491 for (ring_num = 0; ring_num < srng_config->max_rings; ring_num++)
2492 ath12k_hal_srng_update_shadow_config(ab, ring_type, ring_num);
2493 }
2494 }
2495
ath12k_hal_srng_get_shadow_config(struct ath12k_base * ab,u32 ** cfg,u32 * len)2496 void ath12k_hal_srng_get_shadow_config(struct ath12k_base *ab,
2497 u32 **cfg, u32 *len)
2498 {
2499 struct ath12k_hal *hal = &ab->hal;
2500
2501 *len = hal->num_shadow_reg_configured;
2502 *cfg = hal->shadow_reg_addr;
2503 }
2504
ath12k_hal_srng_shadow_update_hp_tp(struct ath12k_base * ab,struct hal_srng * srng)2505 void ath12k_hal_srng_shadow_update_hp_tp(struct ath12k_base *ab,
2506 struct hal_srng *srng)
2507 {
2508 lockdep_assert_held(&srng->lock);
2509
2510 /* check whether the ring is empty. Update the shadow
2511 * HP only when then ring isn't' empty.
2512 */
2513 if (srng->ring_dir == HAL_SRNG_DIR_SRC &&
2514 *srng->u.src_ring.tp_addr != srng->u.src_ring.hp)
2515 ath12k_hal_srng_access_end(ab, srng);
2516 }
2517
ath12k_hal_register_srng_lock_keys(struct ath12k_base * ab)2518 static void ath12k_hal_register_srng_lock_keys(struct ath12k_base *ab)
2519 {
2520 struct ath12k_hal *hal = &ab->hal;
2521 u32 ring_id;
2522
2523 for (ring_id = 0; ring_id < HAL_SRNG_RING_ID_MAX; ring_id++)
2524 lockdep_register_key(&hal->srng_list[ring_id].lock_key);
2525 }
2526
ath12k_hal_unregister_srng_lock_keys(struct ath12k_base * ab)2527 static void ath12k_hal_unregister_srng_lock_keys(struct ath12k_base *ab)
2528 {
2529 struct ath12k_hal *hal = &ab->hal;
2530 u32 ring_id;
2531
2532 for (ring_id = 0; ring_id < HAL_SRNG_RING_ID_MAX; ring_id++)
2533 lockdep_unregister_key(&hal->srng_list[ring_id].lock_key);
2534 }
2535
ath12k_hal_srng_init(struct ath12k_base * ab)2536 int ath12k_hal_srng_init(struct ath12k_base *ab)
2537 {
2538 struct ath12k_hal *hal = &ab->hal;
2539 int ret;
2540
2541 memset(hal, 0, sizeof(*hal));
2542
2543 ret = ab->hw_params->hal_ops->create_srng_config(ab);
2544 if (ret)
2545 goto err_hal;
2546
2547 ret = ath12k_hal_alloc_cont_rdp(ab);
2548 if (ret)
2549 goto err_hal;
2550
2551 ret = ath12k_hal_alloc_cont_wrp(ab);
2552 if (ret)
2553 goto err_free_cont_rdp;
2554
2555 ath12k_hal_register_srng_lock_keys(ab);
2556
2557 return 0;
2558
2559 err_free_cont_rdp:
2560 ath12k_hal_free_cont_rdp(ab);
2561
2562 err_hal:
2563 return ret;
2564 }
2565
ath12k_hal_srng_deinit(struct ath12k_base * ab)2566 void ath12k_hal_srng_deinit(struct ath12k_base *ab)
2567 {
2568 struct ath12k_hal *hal = &ab->hal;
2569
2570 ath12k_hal_unregister_srng_lock_keys(ab);
2571 ath12k_hal_free_cont_rdp(ab);
2572 ath12k_hal_free_cont_wrp(ab);
2573 kfree(hal->srng_config);
2574 hal->srng_config = NULL;
2575 }
2576
ath12k_hal_dump_srng_stats(struct ath12k_base * ab)2577 void ath12k_hal_dump_srng_stats(struct ath12k_base *ab)
2578 {
2579 struct hal_srng *srng;
2580 struct ath12k_ext_irq_grp *irq_grp;
2581 struct ath12k_ce_pipe *ce_pipe;
2582 int i;
2583
2584 ath12k_err(ab, "Last interrupt received for each CE:\n");
2585 for (i = 0; i < ab->hw_params->ce_count; i++) {
2586 ce_pipe = &ab->ce.ce_pipe[i];
2587
2588 if (ath12k_ce_get_attr_flags(ab, i) & CE_ATTR_DIS_INTR)
2589 continue;
2590
2591 ath12k_err(ab, "CE_id %d pipe_num %d %ums before\n",
2592 i, ce_pipe->pipe_num,
2593 jiffies_to_msecs(jiffies - ce_pipe->timestamp));
2594 }
2595
2596 ath12k_err(ab, "\nLast interrupt received for each group:\n");
2597 for (i = 0; i < ATH12K_EXT_IRQ_GRP_NUM_MAX; i++) {
2598 irq_grp = &ab->ext_irq_grp[i];
2599 ath12k_err(ab, "group_id %d %ums before\n",
2600 irq_grp->grp_id,
2601 jiffies_to_msecs(jiffies - irq_grp->timestamp));
2602 }
2603
2604 for (i = 0; i < HAL_SRNG_RING_ID_MAX; i++) {
2605 srng = &ab->hal.srng_list[i];
2606
2607 if (!srng->initialized)
2608 continue;
2609
2610 if (srng->ring_dir == HAL_SRNG_DIR_SRC)
2611 ath12k_err(ab,
2612 "src srng id %u hp %u, reap_hp %u, cur tp %u, cached tp %u last tp %u napi processed before %ums\n",
2613 srng->ring_id, srng->u.src_ring.hp,
2614 srng->u.src_ring.reap_hp,
2615 *srng->u.src_ring.tp_addr, srng->u.src_ring.cached_tp,
2616 srng->u.src_ring.last_tp,
2617 jiffies_to_msecs(jiffies - srng->timestamp));
2618 else if (srng->ring_dir == HAL_SRNG_DIR_DST)
2619 ath12k_err(ab,
2620 "dst srng id %u tp %u, cur hp %u, cached hp %u last hp %u napi processed before %ums\n",
2621 srng->ring_id, srng->u.dst_ring.tp,
2622 *srng->u.dst_ring.hp_addr,
2623 srng->u.dst_ring.cached_hp,
2624 srng->u.dst_ring.last_hp,
2625 jiffies_to_msecs(jiffies - srng->timestamp));
2626 }
2627 }
2628